lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
fall/gen/src/generate/codegen.rs
zayenz/fall
71cd05c6eaa026f692db87d4a8ffe1326706efd4
use serde_json; use tera::Context; use fall_tree::{Text, AstNode}; use lang_fall::syntax::{FallFile, SynRule, LexRule, Expr, BlockExpr, MethodDef, Parameter}; use lang_fall::{RefKind, CallKind, MethodKind, Analysis, PratVariant, PrattOp, Arity, ChildKind}; use fall_parse as dst; use crate::util::{scream, camel}; pub type Result<T> = std::result::Result<T, ::failure::Error>; pub(super) struct Codegen<'a, 'f: 'a> { analysis: &'a Analysis<'f>, node_types: Vec<(Text<'f>, bool)>, expressions: Vec<dst::Expr>, } impl<'a, 'f> Codegen<'a, 'f> { pub fn new(analysis: &'a Analysis<'f>) -> Codegen<'a, 'f> { let node_types = { let mut result = Vec::new(); if let Some(tokenizer) = analysis.ast().tokenizer_def() { result.extend( tokenizer.lex_rules() .map(|r| (r.node_type(), r.is_skip())) ) } result.extend( analysis.ast() .syn_rules() .filter(|r| r.is_pub() && r.type_attr().is_none()) .filter_map(|r| r.name()) .map(|n| (n, false)) ); result }; Codegen { analysis, node_types, expressions: Vec::new(), } } pub fn generate(&mut self) -> Result<Context> { let mut context = Context::new(); context.insert("node_types", &self.node_types); for _ in self.file().syn_rules() { self.expressions.push(dst::Expr::Any) } for (i, r) in self.file().syn_rules().enumerate() { let expr = self.gen_rule(r)?; self.expressions[i] = expr; } let parser = serde_json::to_string(&self.expressions).unwrap(); context.insert("parser_json", &parser); let lex_rules = self.file().tokenizer_def() .ok_or(format_err!("no tokens defined"))? .lex_rules() .filter(|r| !r.is_contextual()) .map(|r| { let re = r.token_re().ok_or(format_err!("Bad token"))?; Ok(CtxLexRule { ty: r.node_type(), re: format!("{:?}", re), f: r.extern_fn() }) }).collect::<Result<Vec<_>>>()?; context.insert("lex_rules", &lex_rules); let verbatim = self.file().verbatim_def().map(|v| v.contents()); context.insert("verbatim", &verbatim); context.insert("has_whitespace_binder", &verbatim.map(|t| t.contains("whitespace_binder")).unwrap_or(false)); if let Some(ast) = self.file().ast_def() { context.insert("ast_nodes", &ast.ast_nodes().map(|node| { Ok(CtxAstNode { struct_name: camel(node.name()), node_type_name: scream(node.name()), methods: node.methods() .map(|method| self.gen_method(method)) .collect::<Result<Vec<CtxMethod>>>()?, }) }).collect::<Result<Vec<_>>>()?); context.insert("ast_classes", &ast.ast_classes().map(|class| { CtxAstClass { enum_name: camel(class.name()), variants: class.variants().map(|variant| (scream(variant), camel(variant))).collect(), } }).collect::<Vec<_>>()); context.insert("ast_traits", &ast.ast_traits().map(|trait_| { Ok(CtxAstTrait { trait_name: camel(trait_.name()), methods: trait_.methods() .map(|method| self.gen_method(method)) .collect::<Result<Vec<CtxMethod>>>()?, impl_for: ast.ast_nodes() .filter(|&node| { self.analysis.ast_node_traits(node).contains(&trait_) }) .map(|node| camel(node.name())) .collect(), }) }).collect::<Result<Vec<_>>>()?); } Ok(context) } fn file(&self) -> FallFile<'f> { self.analysis.ast() } fn syn_rule_ty(&self, rule: SynRule<'f>) -> Option<dst::NodeTypeRef> { let name = rule.ty_name()?; self.node_types.iter() .position(|&(ty_name, _)| ty_name == name) .map(|i| dst::NodeTypeRef((i + 1) as u32)) } fn syn_rule_ref(&self, rule: SynRule<'f>) -> dst::ExprRef { let idx = self.file().syn_rules().position(|r| r.node() == rule.node()).unwrap(); dst::ExprRef(idx as u32) } fn lex_rule_ty(&self, rule: LexRule<'f>) -> dst::NodeTypeRef { let name = rule.node_type(); let i = self.node_types.iter() .position(|&(ty_name, _)| ty_name == name) .unwrap(); dst::NodeTypeRef((i + 1) as u32) } fn param_ref(&self, param: Parameter<'f>) -> dst::Arg { let idx = self.file().syn_rules() .filter_map(|rule| rule.parameters()) .flat_map(|p| p.parameters()) .position(|p| p.node() == param.node()) .unwrap(); dst::Arg(idx as u32) } fn gen_rule(&mut self, rule: SynRule<'f>) -> Result<dst::Expr> { let body = match (rule.is_pratt(), rule.body()) { (true, Expr::BlockExpr(block)) => { let pratt = dst::Expr::Pratt(Box::new(self.gen_pratt(block)?)); self.push_expr(pratt) } (true, _) => unreachable!(), (false, body) => self.gen_expr(body)? }; let body = match (self.syn_rule_ty(rule), rule.is_replaces(), rule.is_cached()) { (Some(ty), true, _) => dst::Expr::PubReplace { ty, body, }, (Some(ty), false, false) => dst::Expr::Pub { ty, body, replaceable: rule.is_replaceable(), }, (Some(ty), false, true) => { let body = self.push_expr(dst::Expr::Cached(body)); dst::Expr::Pub { ty, body, replaceable: rule.is_replaceable(), } } (None, false, true) => { assert_eq!(self.expressions.len() - 1, body.0 as usize); dst::Expr::Cached(body) } (None, false, false) => { assert_eq!(self.expressions.len() - 1, body.0 as usize); self.expressions.pop().unwrap() } _ => unreachable!(), }; Ok(body) } fn push_expr(&mut self, expr: dst::Expr) -> dst::ExprRef { let idx = self.expressions.len(); self.expressions.push(expr); dst::ExprRef(idx as u32) } fn gen_expr(&mut self, expr: Expr<'f>) -> Result<dst::ExprRef> { let result = match expr { Expr::BlockExpr(block) => dst::Expr::Or(block.alts().map(|e| self.gen_expr(e)).collect::<Result<Vec<_>>>()?), Expr::SeqExpr(seq) => { fn is_commit(part: Expr) -> bool { part.node().text() == "<commit>" } let commit = seq.parts().position(is_commit); let parts = seq.parts() .filter(|&p| !is_commit(p)) .map(|e| self.gen_expr(e)) .collect::<Result<Vec<_>>>()?; dst::Expr::And(parts, commit) } Expr::RefExpr(ref_) => { let ref_ = self.analysis.resolve_reference(ref_) .ok_or(format_err!("Unresolved references: {}", ref_.node().text()))?; match ref_ { RefKind::Token(rule) => { let ty_ref = self.lex_rule_ty(rule); if rule.is_contextual() { dst::Expr::ContextualToken( ty_ref, rule.token_text() .ok_or(format_err!("Missing contextual token text"))? .to_string(), ) } else { dst::Expr::Token(ty_ref) } } RefKind::RuleReference(rule) => return Ok(self.syn_rule_ref(rule)), RefKind::Param(p) => dst::Expr::Var(self.param_ref(p)), } } Expr::CallExpr(call) => { let call = self.analysis.resolve_call(call) .ok_or(format_err!("Failed to compile {}", call.node().text()))?; match call { CallKind::Eof => dst::Expr::Eof, CallKind::Any => dst::Expr::Any, CallKind::Enter(idx, expr) => dst::Expr::Enter( dst::Context(idx as u32), self.gen_expr(expr)?, ), CallKind::Exit(idx, expr) => dst::Expr::Exit( dst::Context(idx as u32), self.gen_expr(expr)?, ), CallKind::IsIn(idx) => dst::Expr::IsIn( dst::Context(idx as u32) ), CallKind::Not(expr) => dst::Expr::Not(self.gen_expr(expr)?), CallKind::Layer(e1, e2) => dst::Expr::Layer( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::WithSkip(e1, e2) => dst::Expr::WithSkip( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::Inject(e1, e2) => dst::Expr::Inject( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::RuleCall(rule, args) => dst::Expr::Call( self.syn_rule_ref(rule), args.iter() .map(|&(p, e)| Ok((self.param_ref(p), self.gen_expr(e)?))) .collect::<Result<Vec<_>>>()?, ), CallKind::PrevIs(tokens) => dst::Expr::PrevIs( tokens.iter().map(|&r| self.syn_rule_ty(r).unwrap()).collect() ), CallKind::Commit => panic!("Should be handled specially"), } } Expr::OptExpr(opt_expr) => dst::Expr::Opt(self.gen_expr(opt_expr.expr())?), Expr::RepExpr(rep_expr) => dst::Expr::Rep(self.gen_expr(rep_expr.expr())?), }; Ok(self.push_expr(result)) } fn gen_pratt(&mut self, ast: BlockExpr<'f>) -> Result<dst::PrattTable> { fn alt_to_rule<'f>(analysis: &Analysis<'f>, alt: Expr<'f>) -> Result<SynRule<'f>> { match alt { Expr::SeqExpr(expr) => match expr.parts().next() { Some(Expr::RefExpr(ref_)) => match analysis.resolve_reference(ref_) { Some(RefKind::RuleReference(rule)) => Ok(rule), _ => return Err(format_err!("Bad pratt spec")), }, _ => return Err(format_err!("Bad pratt spec")) }, _ => return Err(format_err!("Bad pratt spec")) } } let mut result = dst::PrattTable { atoms: Vec::new(), prefixes: Vec::new(), infixes: Vec::new(), }; for alt in ast.alts() { let rule = alt_to_rule(&self.analysis, alt)?; let ty = self.syn_rule_ty(rule) .ok_or(format_err!("non public pratt rule"))?; let prat_kind = self.analysis.resolve_pratt_variant(rule) .ok_or(format_err!("pratt rule without attributes"))?; match prat_kind { PratVariant::Atom(_) => result.atoms.push(self.syn_rule_ref(rule)), PratVariant::Postfix(PrattOp { op, priority }) => { result.infixes.push(dst::Infix { ty, op: self.gen_expr(op)?, priority, has_rhs: false, }); } PratVariant::Prefix(PrattOp { op, priority }) => { result.prefixes.push(dst::Prefix { ty, op: self.gen_expr(op)?, priority, }) } PratVariant::Bin(PrattOp { op, priority }) => { result.infixes.push(dst::Infix { ty, op: self.gen_expr(op)?, priority, has_rhs: true, }); } }; } Ok(result) } fn gen_method(&self, method: MethodDef<'f>) -> Result<CtxMethod<'f>> { let description = self.analysis.resolve_method(method) .ok_or(format_err!("Bad method `{}`", method.node().text()))?; let (ret_type, body) = match description { MethodKind::TextAccessor(lex_rule, arity) => { let node_type = scream(lex_rule.node_type()); match arity { Arity::Single => ("rt::Text<'f>".to_owned(), format!("rt::child_of_type_exn(self.node(), {}).text()", node_type)), Arity::Optional => ("Option<rt::Text<'f>>".to_owned(), format!("rt::child_of_type(self.node(), {}).map(|n| n.text())", node_type)), Arity::Many => unimplemented!(), } } MethodKind::NodeAccessor(kind, arity) => { match (kind, arity) { (ChildKind::AstNode(n), Arity::Single) => (format!("{}<'f>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next().unwrap()".to_owned()), (ChildKind::AstNode(n), Arity::Optional) => (format!("Option<{}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next()".to_owned()), (ChildKind::AstNode(n), Arity::Many) => (format!("rt::AstChildren<'f, {}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children())".to_owned()), (ChildKind::AstClass(n), Arity::Single) => (format!("{}<'f>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next().unwrap()".to_owned()), (ChildKind::AstClass(n), Arity::Optional) => (format!("Option<{}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next()".to_owned()), (ChildKind::AstClass(n), Arity::Many) => (format!("rt::AstChildren<'f, {}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children())".to_owned()), (ChildKind::Token(lex_rule), arity) => { let node_type = scream(lex_rule.node_type()); match arity { Arity::Single => ("rt::Node<'f>".to_owned(), format!("self.node().children().find(|n| n.ty() == {}).unwrap()", node_type)), Arity::Optional => ("Option<rt::Node<'f>>".to_owned(), format!("self.node().children().find(|n| n.ty() == {})", node_type)), Arity::Many => unimplemented!(), } } } } }; Ok(CtxMethod { name: method.name(), ret_type, body }) } } #[derive(Serialize)] struct CtxLexRule<'f> { ty: Text<'f>, re: String, f: Option<Text<'f>>, } #[derive(Serialize)] struct CtxAstNode<'f> { struct_name: String, node_type_name: String, methods: Vec<CtxMethod<'f>>, } #[derive(Serialize)] struct CtxAstClass { enum_name: String, variants: Vec<(String, String)>, } #[derive(Serialize)] struct CtxAstTrait<'f> { trait_name: String, methods: Vec<CtxMethod<'f>>, impl_for: Vec<String>, } #[derive(Serialize)] struct CtxMethod<'f> { name: Text<'f>, ret_type: String, body: String, }
use serde_json; use tera::Context; use fall_tree::{Text, AstNode}; use lang_fall::syntax::{FallFile, SynRule, LexRule, Expr, BlockExpr, MethodDef, Parameter}; use lang_fall::{RefKind, CallKind, MethodKind, Analysis, PratVariant, PrattOp, Arity, ChildKind}; use fall_parse as dst; use crate::util::{scream, camel}; pub type Result<T> = std::result::Result<T, ::failure::Error>; pub(super) struct Codegen<'a, 'f: 'a> { analysis: &'a Analysis<'f>, node_types: Vec<(Text<'f>, bool)>, expressions: Vec<dst::Expr>, } impl<'a, 'f> Codegen<'a, 'f> { pub fn new(analysis: &'a Analysis<'f>) -> Codegen<'a, 'f> { let node_types = { let mut result = Vec::new(); if let Some(tokenizer) = analysis.ast().tokenizer_def() { result.extend( tokenizer.lex_rules() .map(|r| (r.node_type(), r.is_skip())) ) } result.extend( analysis.ast() .syn_rules() .filter(|r| r.is_pub() && r.type_attr().is_none()) .filter_map(|r| r.name()) .map(|n| (n, false)) ); result }; Codegen { analysis, node_types, expressions: Vec::new(), } } pub fn generate(&mut self) -> Result<Context> { let mut context = Context::new(); context.insert("node_types", &self.node_types); for _ in self.file().syn_rules() { self.expressions.push(dst::Expr::Any) } for (i, r) in self.file().syn_rules().enumerate() { let expr = self.gen_rule(r)?; self.expressions[i] = expr; } let parser = serde_json::to_string(&self.expressions).unwrap(); context.insert("parser_json", &parser); let lex_rules = self.file().tokenizer_def() .ok_or(format_err!("no tokens defined"))? .lex_rules() .filter(|r| !r.is_contextual()) .map(|r| { let re = r.token_re().ok_or(format_err!("Bad token"))?; Ok(CtxLexRule { ty: r.node_type(), re: format!("{:?}", re), f: r.extern_fn() }) }).collect::<Result<Vec<_>>>()?; context.insert("lex_rules", &lex_rules); let verbatim = self.file().verbatim_def().map(|v| v.contents()); context.insert("verbatim", &verbatim); context.insert("has_whitespace_binder", &verbatim.map(|t| t.contains("whitespace_binder")).unwrap_or(false)); if let Some(ast) = self.file().ast_def() { context.insert("ast_nodes", &ast.ast_nodes().map(|node| { Ok(CtxAstNode { struct_name: camel(node.name()), node_type_name: scream(node.name()), methods: node.methods() .map(|method| self.gen_method(method)) .collect::<Result<Vec<CtxMethod>>>()?, }) }).collect::<Result<Vec<_>>>()?); context.insert("ast_classes", &ast.ast_classes().map(|class| { CtxAstClass { enum_name: camel(class.name()), variants: class.variants().map(|variant| (scream(variant), camel(variant))).collect(), } }).collect::<Vec<_>>()); context.insert("ast_traits", &ast.ast_traits().map(|trait_| { Ok(CtxAstTrait { trait_name: camel(trait_.name()), methods: trait_.methods() .map(|method| self.gen_method(method)) .collect::<Result<Vec<CtxMethod>>>()?, impl_for: ast.ast_nodes() .filter(|&node| { self.analysis.ast_node_traits(node).contains(&trait_) }) .map(|node| camel(node.name())) .collect(), }) }).collect::<Result<Vec<_>>>()?); } Ok(context) } fn file(&self) -> FallFile<'f> { self.analysis.ast() } fn syn_rule_ty(&self, rule: SynRule<'f>) -> Option<dst::NodeTypeRef> { let name = rule.ty_name()?; self.node_types.iter() .position(|&(ty_name, _)| ty_name == name) .map(|i| dst::NodeTypeRef((i + 1) as u32)) } fn syn_rule_ref(&self, rule: SynRule<'f>) -> dst::ExprRef { let idx = self.file().syn_rules().position(|r| r.node() == rule.node()).unwrap(); dst::ExprRef(idx as u32) } fn lex_rule_ty(&self, rule: LexRule<'f>) -> dst::NodeTypeRef { let name = rule.node_type(); let i = self.node_types.iter() .position(|&(ty_name, _)| ty_name == name) .unwrap(); dst::NodeTypeRef((i + 1) as u32) } fn param_ref(&self, param: Parameter<'f>) -> dst::Arg { let idx = self.file().syn_rules() .filter_map(|rule| rule.parameters()) .flat_map(|p| p.parameters()) .position(|p| p.node() == param.node()) .unwrap(); dst::Arg(idx as u32) } fn gen_rule(&mut self, rule: SynRule<'f>) -> Result<dst::Expr> { let body = match (rule.is_pratt(), rule.body()) { (true, Expr::BlockExpr(block)) => { let pratt = dst::Expr::Pratt(Box::new(self.gen_pratt(block)?)); self.push_expr(pratt) } (true, _) => unreachable!(), (false, body) => self.gen_expr(body)? }; let body = match (self.syn_rule_ty(rule), rule.is_replaces(), rule.is_cached()) { (Some(ty), true, _) => dst::Expr::PubReplace { ty, body, }, (Some(ty), false, false) => dst::Expr::Pub { ty, body, replaceable: rule.is_replaceable(), }, (Some(ty), false, true) => { let body = self.push_expr(dst::Expr::Cached(body)); dst::Expr::Pub { ty, body, replaceable: rule.is_replaceable(), } } (None, false, true) => { assert_eq!(self.expressions.len() - 1, body.0 as usize); dst::Expr::Cached(body) } (None, false, false) => { assert_eq!(self.expressions.len() - 1, body.0 as usize); self.expressions.pop().unwrap() } _ => unreachable!(), }; Ok(body) } fn push_expr(&mut self, expr: dst::Expr) -> dst::ExprRef { let idx = self.expressions.len(); self.expressions.push(expr); dst::ExprRef(idx as u32) } fn gen_expr(&mut self, expr: Expr<'f>) -> Result<dst::ExprRef> { let result = match expr { Expr::BlockExpr(block) => dst::Expr::Or(block.alts().map(|e| self.gen_expr(e)).collect::<Result<Vec<_>>>()?), Expr::SeqExpr(seq) => { fn is_commit(part: Expr) -> bool { part.node().text() == "<commit>" } let commit = seq.parts().position(is_commit); let parts = seq.parts() .filter(|&p| !is_commit(p)) .map(|e| self.gen_expr(e)) .collect::<Result<Vec<_>>>()?; dst::Expr::And(parts, commit) } Expr::RefExpr(ref_) => { let ref_ = self.analysis.resolve_reference(ref_) .ok_or(format_err!("Unresolved references: {}", ref_.node().text()))?; match ref_ { RefKind::Token(rule) => { let ty_ref = self.lex_rule_ty(rule);
} RefKind::RuleReference(rule) => return Ok(self.syn_rule_ref(rule)), RefKind::Param(p) => dst::Expr::Var(self.param_ref(p)), } } Expr::CallExpr(call) => { let call = self.analysis.resolve_call(call) .ok_or(format_err!("Failed to compile {}", call.node().text()))?; match call { CallKind::Eof => dst::Expr::Eof, CallKind::Any => dst::Expr::Any, CallKind::Enter(idx, expr) => dst::Expr::Enter( dst::Context(idx as u32), self.gen_expr(expr)?, ), CallKind::Exit(idx, expr) => dst::Expr::Exit( dst::Context(idx as u32), self.gen_expr(expr)?, ), CallKind::IsIn(idx) => dst::Expr::IsIn( dst::Context(idx as u32) ), CallKind::Not(expr) => dst::Expr::Not(self.gen_expr(expr)?), CallKind::Layer(e1, e2) => dst::Expr::Layer( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::WithSkip(e1, e2) => dst::Expr::WithSkip( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::Inject(e1, e2) => dst::Expr::Inject( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::RuleCall(rule, args) => dst::Expr::Call( self.syn_rule_ref(rule), args.iter() .map(|&(p, e)| Ok((self.param_ref(p), self.gen_expr(e)?))) .collect::<Result<Vec<_>>>()?, ), CallKind::PrevIs(tokens) => dst::Expr::PrevIs( tokens.iter().map(|&r| self.syn_rule_ty(r).unwrap()).collect() ), CallKind::Commit => panic!("Should be handled specially"), } } Expr::OptExpr(opt_expr) => dst::Expr::Opt(self.gen_expr(opt_expr.expr())?), Expr::RepExpr(rep_expr) => dst::Expr::Rep(self.gen_expr(rep_expr.expr())?), }; Ok(self.push_expr(result)) } fn gen_pratt(&mut self, ast: BlockExpr<'f>) -> Result<dst::PrattTable> { fn alt_to_rule<'f>(analysis: &Analysis<'f>, alt: Expr<'f>) -> Result<SynRule<'f>> { match alt { Expr::SeqExpr(expr) => match expr.parts().next() { Some(Expr::RefExpr(ref_)) => match analysis.resolve_reference(ref_) { Some(RefKind::RuleReference(rule)) => Ok(rule), _ => return Err(format_err!("Bad pratt spec")), }, _ => return Err(format_err!("Bad pratt spec")) }, _ => return Err(format_err!("Bad pratt spec")) } } let mut result = dst::PrattTable { atoms: Vec::new(), prefixes: Vec::new(), infixes: Vec::new(), }; for alt in ast.alts() { let rule = alt_to_rule(&self.analysis, alt)?; let ty = self.syn_rule_ty(rule) .ok_or(format_err!("non public pratt rule"))?; let prat_kind = self.analysis.resolve_pratt_variant(rule) .ok_or(format_err!("pratt rule without attributes"))?; match prat_kind { PratVariant::Atom(_) => result.atoms.push(self.syn_rule_ref(rule)), PratVariant::Postfix(PrattOp { op, priority }) => { result.infixes.push(dst::Infix { ty, op: self.gen_expr(op)?, priority, has_rhs: false, }); } PratVariant::Prefix(PrattOp { op, priority }) => { result.prefixes.push(dst::Prefix { ty, op: self.gen_expr(op)?, priority, }) } PratVariant::Bin(PrattOp { op, priority }) => { result.infixes.push(dst::Infix { ty, op: self.gen_expr(op)?, priority, has_rhs: true, }); } }; } Ok(result) } fn gen_method(&self, method: MethodDef<'f>) -> Result<CtxMethod<'f>> { let description = self.analysis.resolve_method(method) .ok_or(format_err!("Bad method `{}`", method.node().text()))?; let (ret_type, body) = match description { MethodKind::TextAccessor(lex_rule, arity) => { let node_type = scream(lex_rule.node_type()); match arity { Arity::Single => ("rt::Text<'f>".to_owned(), format!("rt::child_of_type_exn(self.node(), {}).text()", node_type)), Arity::Optional => ("Option<rt::Text<'f>>".to_owned(), format!("rt::child_of_type(self.node(), {}).map(|n| n.text())", node_type)), Arity::Many => unimplemented!(), } } MethodKind::NodeAccessor(kind, arity) => { match (kind, arity) { (ChildKind::AstNode(n), Arity::Single) => (format!("{}<'f>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next().unwrap()".to_owned()), (ChildKind::AstNode(n), Arity::Optional) => (format!("Option<{}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next()".to_owned()), (ChildKind::AstNode(n), Arity::Many) => (format!("rt::AstChildren<'f, {}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children())".to_owned()), (ChildKind::AstClass(n), Arity::Single) => (format!("{}<'f>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next().unwrap()".to_owned()), (ChildKind::AstClass(n), Arity::Optional) => (format!("Option<{}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next()".to_owned()), (ChildKind::AstClass(n), Arity::Many) => (format!("rt::AstChildren<'f, {}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children())".to_owned()), (ChildKind::Token(lex_rule), arity) => { let node_type = scream(lex_rule.node_type()); match arity { Arity::Single => ("rt::Node<'f>".to_owned(), format!("self.node().children().find(|n| n.ty() == {}).unwrap()", node_type)), Arity::Optional => ("Option<rt::Node<'f>>".to_owned(), format!("self.node().children().find(|n| n.ty() == {})", node_type)), Arity::Many => unimplemented!(), } } } } }; Ok(CtxMethod { name: method.name(), ret_type, body }) } } #[derive(Serialize)] struct CtxLexRule<'f> { ty: Text<'f>, re: String, f: Option<Text<'f>>, } #[derive(Serialize)] struct CtxAstNode<'f> { struct_name: String, node_type_name: String, methods: Vec<CtxMethod<'f>>, } #[derive(Serialize)] struct CtxAstClass { enum_name: String, variants: Vec<(String, String)>, } #[derive(Serialize)] struct CtxAstTrait<'f> { trait_name: String, methods: Vec<CtxMethod<'f>>, impl_for: Vec<String>, } #[derive(Serialize)] struct CtxMethod<'f> { name: Text<'f>, ret_type: String, body: String, }
if rule.is_contextual() { dst::Expr::ContextualToken( ty_ref, rule.token_text() .ok_or(format_err!("Missing contextual token text"))? .to_string(), ) } else { dst::Expr::Token(ty_ref) }
if_condition
[ { "content": "fn parse_expr_pred(p: &mut Parser, expr: ExprRef, tokens: Pos) -> Option<Pos> {\n\n let old_mode = p.predicate_mode;\n\n p.predicate_mode = true;\n\n let result = parse_expr(p, expr, tokens);\n\n p.predicate_mode = old_mode;\n\n result\n\n}\n\n\n", "file_path": "fall/parse/src/syn_engine/expr.rs", "rank": 0, "score": 283037.7742247367 }, { "content": "fn parse_expr_inner(p: &mut Parser, expr: ExprRef, tokens: Pos) -> Option<Pos> {\n\n let grammar = &*p.grammar;\n\n match grammar[expr] {\n\n Expr::Pub { ty, body, replaceable } =>\n\n parse_pub(p, tokens, ty, body, replaceable),\n\n\n\n Expr::PubReplace { ty, body } =>\n\n parse_pub_replace(p, tokens, ty, body),\n\n\n\n Expr::Or(ref parts) =>\n\n parse_or(p, parts, tokens),\n\n\n\n Expr::And(ref parts, commit) =>\n\n parse_and(p, tokens, &*parts, commit),\n\n\n\n Expr::Token(ty) =>\n\n parse_token(p, tokens, ty),\n\n\n\n Expr::ContextualToken(ty, ref text) =>\n\n parse_contextual_token(p, tokens, ty, text),\n", "file_path": "fall/parse/src/syn_engine/expr.rs", "rank": 1, "score": 283037.7742247367 }, { "content": "pub fn walk_tree<F: FnMut(Node)>(node: Node, mut f: F) {\n\n go(node, &mut f);\n\n\n\n fn go<F: FnMut(Node)>(node: Node, f: &mut F) {\n\n f(node);\n\n for child in node.children() {\n\n go(child, f)\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "fall/tree/src/util.rs", "rank": 2, "score": 282407.66403033654 }, { "content": "fn resolve_context(db: &DB, d: &mut DiagnosticSink, call: CallExpr) -> Option<u32> {\n\n if let Some(name) = call.context_name() {\n\n db.get(query::AllContexts)\n\n .iter()\n\n .position(|&c| c == name)\n\n .map(|usize_| usize_ as u32)\n\n } else {\n\n d.error(call.args().next().unwrap().node(), \"Context should be a single quoted string\");\n\n None\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use fall_tree::search::find_leaf_at_offset;\n\n use fall_tree::search::ast;\n\n use crate::analysis::*;\n\n use crate::test_util::parse_with_caret;\n\n\n", "file_path": "lang/fall/src/analysis/query/resolve_call.rs", "rank": 3, "score": 270044.27517404617 }, { "content": "fn whitespace_binder(ty: rt::NodeType, adjacent_tokens: Vec<(rt::NodeType, &str)>, is_leading: bool) -> usize {\n\n if !is_leading {\n\n return 0;\n\n }\n\n match ty {\n\n STRUCT_DEF => {\n\n let mut has_comment = false;\n\n adjacent_tokens.iter().rev()\n\n .take_while(|&&(ty, text)| {\n\n if ty == LINE_COMMENT {\n\n has_comment = true;\n\n true\n\n } else {\n\n ty == WHITESPACE && text.chars().filter(|&c| c == '\\n').next().is_none()\n\n }\n\n })\n\n .count()\n\n }\n\n _ => 0,\n\n }\n\n}\n", "file_path": "lang/rust/syntax/src/rust.rs", "rank": 4, "score": 269989.4668285309 }, { "content": "pub fn generate(analysis: &Analysis) -> Result<String> {\n\n let mut cg = codegen::Codegen::new(analysis);\n\n let context = cg.generate()?;\n\n Tera::one_off(TEMPLATE.trim(), &context, false)\n\n .map_err(|e| format_err!(\"Failed to format template:\\n{:?}\", e))\n\n}\n\n\n\nconst TEMPLATE: &'static str = r#####\"\n\nuse fall_parse::runtime as rt;\n\npub use self::rt::ERROR;\n\n\n\n{% for node_type in node_types %}\n\npub const {{ node_type.0 | upper }}: rt::NodeType = rt::NodeType({{ 100 + loop.index0 }});\n\n{% endfor %}\n\n\n\n\n", "file_path": "fall/gen/src/generate/mod.rs", "rank": 5, "score": 245877.50973455072 }, { "content": "struct F(u32,);\n", "file_path": "lang/rust/syntax/tests/data/struct.rs", "rank": 6, "score": 245139.62733526825 }, { "content": "pub fn children_of_type<'f>(node: Node<'f>, ty: NodeType) -> Box<dyn Iterator<Item=Node<'f>> + 'f> {\n\n Box::new(node.children().filter(move |n| n.ty() == ty))\n\n}\n\n\n", "file_path": "fall/tree/src/search.rs", "rank": 7, "score": 243627.7035266973 }, { "content": "pub fn extract_rule(file: &File, range: TextRange, apply: bool) -> Option<ActionResult> {\n\n if range.is_empty() {\n\n return None;\n\n }\n\n let expr = ancestors(find_covering_node(file.root(), range))\n\n .find(|&n| is_expression(n))?;\n\n\n\n if RefExpr::wrap(expr).is_some() {\n\n return None;\n\n }\n\n\n\n if !apply {\n\n return Some(ActionResult::Available)\n\n }\n\n\n\n let rule = ast::ancestor_exn::<SynRule>(expr).node();\n\n let range = range_to_extract(expr, range);\n\n\n\n let new_rule = format!(\"\\n\\nrule new_rule {{\\n {}\\n}}\", file.text().slice(range));\n\n\n\n let mut edit = FileEdit::new(file);\n\n edit.replace_substring(range, \"new_rule\".to_owned());\n\n edit.insert_text_after(rule, new_rule);\n\n Some(ActionResult::Applied(edit.into_text_edit()))\n\n}\n\n\n\n\n", "file_path": "lang/fall/src/editor/actions/extract_rule.rs", "rank": 8, "score": 243478.20390488708 }, { "content": "fn parse_cached<'g>(p: &mut Parser<'g>, expr: ExprRef, pos: Pos) -> Option<Pos> {\n\n\n\n let mark = p.start_cached(expr);\n\n let result = p.get_from_cache(expr, pos).or_else(|| parse_expr(p, expr, pos))?;\n\n p.finish_cached(mark);\n\n\n\n Some(result)\n\n}\n", "file_path": "fall/parse/src/syn_engine/expr.rs", "rank": 9, "score": 239672.50425561744 }, { "content": "pub fn generate(analysis: &lang_fall::Analysis) -> Result<String> {\n\n generate::generate(analysis)\n\n}\n\n\n\npub struct TestRenderer;\n\n\n\nimpl TestRenderer {\n\n pub fn render_one(&mut self, file: &File, test: usize) -> String {\n\n let file = lang_fall::syntax::FallFile::wrap(file.root()).unwrap();\n\n let text = match file.tests().nth(test).and_then(|t| t.contents()) {\n\n None => return String::new(),\n\n Some(text) => text.to_string()\n\n };\n\n\n\n match self.render_all(file.node().text().to_string(), Some(text)) {\n\n Ok(result) => result,\n\n Err(e) => format!(\"{}\", e),\n\n }\n\n }\n\n\n", "file_path": "fall/gen/src/lib.rs", "rank": 10, "score": 238925.02627605185 }, { "content": "pub fn child_of_type_exn(node: Node, ty: NodeType) -> Node {\n\n child_of_type(node, ty).unwrap_or_else(|| {\n\n panic!(\"No child of type {:?} for {:?}\\\n\n ----\\\n\n {}\\\n\n ----\", ty, node.ty(), node.text())\n\n })\n\n}\n\n\n\n\n", "file_path": "fall/tree/src/search.rs", "rank": 11, "score": 234284.50390476495 }, { "content": "pub fn child_of_type(node: Node, ty: NodeType) -> Option<Node> {\n\n node.children().find(|n| n.ty() == ty)\n\n}\n\n\n", "file_path": "fall/tree/src/search.rs", "rank": 12, "score": 230503.65218714759 }, { "content": "pub fn descendants_of_type(node: Node, ty: NodeType) -> Vec<Node> {\n\n process_subtree_bottom_up(\n\n node,\n\n visitor(Vec::new())\n\n .visit_nodes(&[ty], |node, nodes| nodes.push(node))\n\n )\n\n}\n\n\n", "file_path": "fall/tree/src/search.rs", "rank": 13, "score": 230503.65218714756 }, { "content": "pub fn arg<A: DeserializeOwned + Send, T: This>(scope: &mut RootScope, args: &Arguments<T>, idx: i32) -> VmResult<A> {\n\n let arg = args.require(scope, idx)?;\n\n Ok(from_value(scope, arg)?)\n\n}\n\n\n", "file_path": "code/generic_backend/src/support.rs", "rank": 14, "score": 229104.90568119683 }, { "content": "pub fn process_symbols<'f>(file: &'f File, f: &mut dyn FnMut(Text<'f>, Node<'f>)) {\n\n fn p<'f, T: NameOwner<'f>>(n: T, f: &mut dyn FnMut(Text<'f>, Node<'f>)) {\n\n if let Some(name) = n.name() {\n\n f(name, n.node())\n\n }\n\n }\n\n process_subtree_bottom_up(\n\n file.root(),\n\n visitor(f)\n\n .visit::<FnDef, _>(|def, f| p(def, f))\n\n .visit::<StructDef, _>(|def, f| p(def, f))\n\n .visit::<EnumDef, _>(|def, f| p(def, f))\n\n .visit::<TypeDef, _>(|def, f| p(def, f))\n\n .visit::<TraitDef, _>(|def, f| p(def, f))\n\n .visit::<ModDef, _>(|def, f| p(def, f)),\n\n );\n\n}", "file_path": "lang/rust/src/editor/file_symbols.rs", "rank": 15, "score": 224593.2894121698 }, { "content": "pub fn resolve_reference<'p, 'f>(\n\n analysis: &Analysis<'f>,\n\n offset: TextUnit,\n\n provider: ReferenceProvider<'p, 'f>\n\n) -> Option<TextRange> {\n\n let reference = match try_find_at_offset(analysis.ast().node().file(), offset, |node| provider(node)) {\n\n Some(ref_) => ref_,\n\n None => return None,\n\n };\n\n\n\n reference.resolve(analysis).map(|d| d.navigation_range())\n\n}\n\n\n", "file_path": "lang/fall/src/editor/references/refdec.rs", "rank": 16, "score": 220938.13688968826 }, { "content": "fn add_impl_for<'f, T: NameOwner<'f> + TypeParametersOwner<'f>>(\n\n file: &'f File,\n\n offset: TextUnit,\n\n apply: bool,\n\n) -> Option<ActionResult> {\n\n let decl: T = ast::node_at_offset(file.root(), offset)?;\n\n let name = decl.name()?;\n\n if !apply {\n\n return Some(ActionResult::Available);\n\n }\n\n let mut result = String::new();\n\n result += \"\\n\\n\";\n\n result += \"impl\";\n\n if let Some(params) = decl.type_parameters() {\n\n result += params.node().text().to_string().as_str();\n\n }\n\n result += \" \";\n\n result += name.to_string().as_str();\n\n if let Some(params) = decl.type_parameters() {\n\n result += \"<\";\n", "file_path": "lang/rust/src/editor/actions.rs", "rank": 17, "score": 215716.60432286662 }, { "content": "fn is_expression(node: Node) -> bool {\n\n Expr::wrap(node).is_some()\n\n}\n\n\n", "file_path": "lang/fall/src/editor/actions/extract_rule.rs", "rank": 18, "score": 215303.86500672274 }, { "content": "pub fn find_usages<'p, 'f>(\n\n analysis: &Analysis<'f>,\n\n offset: TextUnit,\n\n reference_provider: ReferenceProvider<'p, 'f>,\n\n declaration_provider: DeclarationProvider<'f>\n\n) -> Vec<TextRange> {\n\n let file = analysis.ast();\n\n let declaration = try_find_at_offset(file.node().file(), offset, |node| {\n\n declaration_provider(node)\n\n .and_then(|d| {\n\n if d.navigation_range().contains_offset_nonstrict(offset) { Some(d) } else { None }\n\n })\n\n .or_else(|| reference_provider(node).and_then(|ref_| ref_.resolve(analysis)))\n\n });\n\n let declaration = match declaration {\n\n Some(decl) => decl,\n\n None => return Vec::new(),\n\n };\n\n\n\n subtree(file.node())\n\n .filter_map(|node| reference_provider(node))\n\n .filter(|ref_| ref_.resolve(analysis).as_ref() == Some(&declaration))\n\n .map(|ref_| ref_.node.range())\n\n .collect()\n\n}\n\n\n", "file_path": "lang/fall/src/editor/references/refdec.rs", "rank": 19, "score": 210747.11834294 }, { "content": "pub fn resolve_reference(analysis: &Analysis, offset: TextUnit) -> Option<TextRange> {\n\n return refdec::resolve_reference(\n\n analysis,\n\n offset,\n\n &|node| ref_provider(analysis, node)\n\n );\n\n}\n\n\n", "file_path": "lang/fall/src/editor/references/mod.rs", "rank": 20, "score": 207919.39439922612 }, { "content": "pub fn camel(word: Text) -> String {\n\n word.to_cow()\n\n .split(\"_\")\n\n .map(|w| w[..1].to_ascii_uppercase() + &w[1..])\n\n .collect()\n\n}\n", "file_path": "fall/gen/src/util.rs", "rank": 21, "score": 206063.915037393 }, { "content": "pub fn is_leaf(node: Node) -> bool {\n\n node.children().next().is_none() && !node.range().is_empty()\n\n}\n\n\n", "file_path": "fall/tree/src/search.rs", "rank": 22, "score": 205858.16689344065 }, { "content": "pub fn process(command: Task) -> Result<()> {\n\n let mut renderer = TestRenderer;\n\n match command {\n\n Task::Generate(grammar) => {\n\n let input = read_file(&grammar)?;\n\n let result = lang_fall::analyse(input).analyse(generate)?;\n\n fs::write(grammar.with_extension(\"rs\"), result)?;\n\n }\n\n Task::Examples(grammar) => {\n\n let input = read_file(&grammar)?;\n\n let result = renderer.render_all(input, None)?;\n\n fs::write(grammar.with_extension(\"txt\"), result)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n\n", "file_path": "fall/gen/src/lib.rs", "rank": 23, "score": 205769.7768118301 }, { "content": "pub fn tu(value: u32) -> TextUnit {\n\n TextUnit(value)\n\n}\n\n\n\nimpl From<TextUnit> for u32 {\n\n fn from(tu: TextUnit) -> u32 {\n\n tu.0\n\n }\n\n}\n\n\n\nimpl ops::Add<u32> for TextUnit {\n\n type Output = TextUnit;\n\n fn add(self, rhs: u32) -> TextUnit {\n\n TextUnit(self.0 + rhs)\n\n }\n\n}\n\n\n\nimpl ops::Add<TextUnit> for TextUnit {\n\n type Output = TextUnit;\n\n fn add(self, rhs: TextUnit) -> TextUnit {\n", "file_path": "fall/text/src/text_unit.rs", "rank": 24, "score": 201383.94787037355 }, { "content": "pub fn resolve_reference(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<JsFallEditorFile>()?;\n\n let offset: TextUnit = arg(scope, &call.arguments, 1)?;\n\n let result = editor_file.grab(|file| file.resolve_reference(offset));\n\n ret(scope, result)\n\n}\n\n\n", "file_path": "code/fall/native/src/lib.rs", "rank": 26, "score": 198560.4690703108 }, { "content": "pub fn swap_alternatives(file: &File, offset: TextUnit, apply: bool) -> Option<ActionResult> {\n\n let (left, right) = find_swappable_nodes(file, offset)?;\n\n if !apply {\n\n return Some(ActionResult::Available);\n\n }\n\n\n\n let mut edit = FileEdit::new(file);\n\n edit.replace(left, right);\n\n edit.replace(right, left);\n\n Some(ActionResult::Applied(edit.into_text_edit()))\n\n}\n\n\n", "file_path": "lang/fall/src/editor/actions/swap_alternatives.rs", "rank": 27, "score": 198550.23407397314 }, { "content": "pub fn find_usages(analysis: &Analysis, offset: TextUnit) -> Vec<TextRange> {\n\n return refdec::find_usages(\n\n analysis,\n\n offset,\n\n &|node| ref_provider(analysis, node),\n\n def_provider\n\n );\n\n}\n\n\n", "file_path": "lang/fall/src/editor/references/mod.rs", "rank": 29, "score": 196733.93249056 }, { "content": "fn ref_provider<'f>(analysis: &Analysis<'f>, node: Node<'f>) -> Option<Reference<'f>> {\n\n process_node(\n\n node,\n\n visitor(None)\n\n .visit::<RefExpr, _>(|ref_expr, result| {\n\n *result = Some(Reference::new(ref_expr.node(), |analysis, node| {\n\n let ref_ = RefExpr::wrap(node).unwrap();\n\n let target = match analysis.resolve_reference(ref_) {\n\n None => return None,\n\n Some(t) => t\n\n };\n\n\n\n Some(match target {\n\n RefKind::RuleReference(rule) => rule.into(),\n\n RefKind::Param(param) => param.into(),\n\n RefKind::Token(token) => token.into(),\n\n })\n\n }))\n\n })\n\n .visit::<MethodDef, _>(|method, result| {\n", "file_path": "lang/fall/src/editor/references/mod.rs", "rank": 30, "score": 192652.25039948346 }, { "content": "fn parse_pub_replace<'g>(\n\n p: &mut Parser<'g>, tokens: Pos,\n\n ty_idx: NodeTypeRef, body: ExprRef\n\n) -> Option<Pos> {\n\n let ts = parse_expr(p, body, tokens)?;\n\n p.replacement = Some(ty_idx);\n\n Some(ts)\n\n}\n\n\n\npub(crate) fn parse_or<'t, 'g>(\n\n p: &mut Parser<'g>,\n\n options: &'g [ExprRef],\n\n tokens: Pos\n\n) -> Option<Pos> {\n\n options.iter().filter_map(|&opt| parse_expr(p, opt, tokens)).next()\n\n}\n\n\n", "file_path": "fall/parse/src/syn_engine/expr.rs", "rank": 31, "score": 192405.00207614622 }, { "content": "pub fn process_node<'f, V, C>(node: Node<'f>, mut visitor: VisitorBuilder<'f, C, V>) -> C\n\n where V: Visit<'f, Context=C>\n\n{\n\n visitor.do_visit(node);\n\n visitor.ctx\n\n}\n\n\n\npub struct VisitorBuilder<'f, C, V> {\n\n ctx: C,\n\n visitor: V,\n\n n: PhantomData<Node<'f>>,\n\n}\n\n\n\nimpl<'f, C, V> VisitorBuilder<'f, C, V> {\n\n pub fn new(ctx: C, visitor: V) -> VisitorBuilder<'f, C, V> {\n\n VisitorBuilder { ctx, visitor, n: PhantomData}\n\n }\n\n}\n\n\n\nimpl<'f, C, V> VisitorBuilder<'f, C, V> {\n", "file_path": "fall/tree/src/visitor.rs", "rank": 32, "score": 191244.77134791212 }, { "content": "pub fn ret<'j, T: Serialize>(scope: &mut RootScope<'j>, value: T) -> JsResult<'j, JsValue> {\n\n Ok(to_value(scope, &value)?)\n\n}\n", "file_path": "code/generic_backend/src/support.rs", "rank": 33, "score": 190590.90036419043 }, { "content": "pub fn process_subtree_bottom_up<'f, V, C>(node: Node<'f>, mut visitor: VisitorBuilder<'f, C, V>) -> C\n\n where V: Visit<'f, Context=C>\n\n{\n\n traversal::bottom_up(node, |node| visitor.do_visit(node));\n\n return visitor.ctx;\n\n}\n\n\n", "file_path": "fall/tree/src/visitor.rs", "rank": 34, "score": 189552.98783144826 }, { "content": "pub fn default_context_actions(\n\n file: &File,\n\n range: TextRange,\n\n actions: &mut Vec<&'static str>,\n\n) {\n\n for &(action_id, action) in DEFAULT_ACTIONS.iter() {\n\n if action(file, range.start(), false).is_some() {\n\n actions.push(action_id)\n\n }\n\n }\n\n}\n\n\n", "file_path": "fall/editor/src/actions.rs", "rank": 35, "score": 189308.1280235314 }, { "content": "fn try_find_at_offset<'f, T, F: Fn(Node<'f>) -> Option<T>>(\n\n file: &'f File,\n\n offset: TextUnit,\n\n f: F\n\n) -> Option<T> {\n\n let node = match try_find_non_ws_node_at_offset(file, offset) {\n\n None => return None,\n\n Some(node) => node\n\n };\n\n\n\n ancestors(node).filter_map(f).next()\n\n}\n", "file_path": "lang/fall/src/editor/references/refdec.rs", "rank": 36, "score": 188755.7193552885 }, { "content": "type QMap<'f, Q> = Mutex<HashMap<Q, <Q as Query<'f>>::Result>>;\n\n\n\npub(crate) struct DB<'f> {\n\n file: FallFile<'f>,\n\n pub(super) diagnostics: Mutex<Vec<Diagnostic>>,\n\n\n\n //query_stack: Mutex<Vec<String>>,\n\n\n\n all_lex_rules: QMap<'f, query::AllLexRules>,\n\n all_syn_rules: QMap<'f, query::AllSynRules>,\n\n all_contexts: QMap<'f, query::AllContexts>,\n\n resolve_ref_expr: QMap<'f, query::ResolveRefExpr<'f>>,\n\n resolve_call: QMap<'f, query::ResolveCall<'f>>,\n\n unused_rules: QMap<'f, query::UnusedRules>,\n\n resolve_pratt_variant: QMap<'f, query::ResolvePrattVariant<'f>>,\n\n resolve_method: QMap<'f, query::ResolveMethod<'f>>,\n\n ast_node_traits: QMap<'f, query::AstNodeTraits<'f>>,\n\n}\n\n\n\nimpl<'f> DB<'f> {\n", "file_path": "lang/fall/src/analysis/db.rs", "rank": 37, "score": 187088.13930808994 }, { "content": "pub fn apply_default_context_action(\n\n file: &File,\n\n range: TextRange,\n\n id: &str\n\n) -> Option<Option<TextEdit>> {\n\n let action = DEFAULT_ACTIONS.iter().find(|&&(aid, _)| aid == id)?.1;\n\n Some(action(file, range.start(), true).map(ActionResult::into_edit))\n\n}\n\n\n\npub const DEFAULT_ACTIONS: &[(&str, fn(&File, TextUnit, bool) -> Option<ActionResult>)] = &[\n\n (\"Swap\", swap)\n\n];\n\n\n\npub enum ActionResult {\n\n Available,\n\n Applied(TextEdit),\n\n}\n\n\n\nimpl ActionResult {\n\n pub fn into_edit(self) -> TextEdit {\n\n match self {\n\n ActionResult::Available =>\n\n panic!(\"Context action should provide edit when apply is set to true\"),\n\n ActionResult::Applied(edit) => edit,\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "fall/editor/src/actions.rs", "rank": 38, "score": 186869.31510871233 }, { "content": "fn child_kind<'f>(file: FallFile<'f>, selector: AstSelector<'f>) -> Option<ChildKind<'f>> {\n\n let ast_def = file.ast_def()?;\n\n if let Some(ast) = ast_def.ast_nodes().find(|a| a.name() == selector.child()) {\n\n return Some(ChildKind::AstNode(ast));\n\n }\n\n if let Some(class) = ast_def.ast_classes().find(|c| c.name() == selector.child()) {\n\n return Some(ChildKind::AstClass(class));\n\n }\n\n if let Some(lex_rule) = file.tokenizer_def().and_then(|td| td.lex_rules().find(|r| r.node_type() == selector.child())) {\n\n return Some(ChildKind::Token(lex_rule));\n\n }\n\n None\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {}\n", "file_path": "lang/fall/src/analysis/query/resolve_method.rs", "rank": 39, "score": 186371.82433625724 }, { "content": "fn add_use_braces(file: &File, offset: TextUnit, apply: bool) -> Option<ActionResult> {\n\n let use_decl: UseDecl = ast::node_at_offset(file.root(), offset)?;\n\n let path = use_decl.path()?;\n\n let last_segment = path.segment()?.node();\n\n if use_decl.spec().is_some() {\n\n return None;\n\n }\n\n if !apply {\n\n return Some(ActionResult::Available);\n\n }\n\n let mut edit = FileEdit::new(&file);\n\n edit.replace_with_text(last_segment, format!(\"{{{}}}\", last_segment.text()));\n\n Some(ActionResult::Applied(edit.into_text_edit()))\n\n}\n\n\n", "file_path": "lang/rust/src/editor/actions.rs", "rank": 40, "score": 182880.4361194125 }, { "content": "pub trait TypeParametersOwner<'f>: rt::AstNode<'f> {\n\n fn type_parameters(&self) -> Option<TypeParameters<'f>> {\n\n rt::AstChildren::new(self.node().children()).next()\n\n }\n\n}\n\nimpl<'f> TypeParametersOwner<'f> for StructDef<'f> {}\n\nimpl<'f> TypeParametersOwner<'f> for EnumDef<'f> {}", "file_path": "lang/rust/syntax/src/rust.rs", "rank": 41, "score": 181623.8410544527 }, { "content": "pub fn hl(node: Node, tag: HlTag, highlights: &mut Highlights) {\n\n highlights.push((node.range(), tag))\n\n}\n\n\n", "file_path": "fall/editor/src/hl.rs", "rank": 42, "score": 178853.89233319554 }, { "content": "struct D { f: A, pub j: B }\n", "file_path": "lang/rust/syntax/tests/data/struct.rs", "rank": 43, "score": 176043.40203076255 }, { "content": "fn reformat_file(file: &File, rules: &[Rule], ws_type: NodeType) -> TextEdit {\n\n let spacer = Spacer { rules, ws_type };\n\n let mut edit = FileEdit::new(file);\n\n reformat_node(file.root(), &mut edit, &spacer);\n\n edit.into_text_edit()\n\n}\n\n\n\n\n", "file_path": "lang/fall/src/editor/formatter.rs", "rank": 44, "score": 174604.0770345438 }, { "content": "pub fn lex<L: Lexer>(lexer: &L, text: Text) -> Vec<Token> {\n\n let mut result = Vec::new();\n\n let mut text = text;\n\n while !text.is_empty() {\n\n let t = lexer.step(&mut text);\n\n result.push(t);\n\n }\n\n result\n\n}\n\n\n", "file_path": "fall/parse/src/lex_engine.rs", "rank": 45, "score": 173988.2963803899 }, { "content": "pub fn check_no_context_action<E: EditorFileImpl>(\n\n action_id: &str,\n\n text: &str,\n\n) {\n\n let (before, range) = test_util::extract_range(text, \"^\");\n\n let file = E::parse(&before);\n\n let actions = file.context_actions(range);\n\n if actions.contains(&action_id) {\n\n panic!(\"Action `{}` is avialable\", action_id);\n\n }\n\n}\n\n\n\n\n", "file_path": "fall/editor/src/actions.rs", "rank": 46, "score": 171925.4666094751 }, { "content": "pub fn check_context_action<E: EditorFileImpl>(\n\n action_id: &str,\n\n before: &str,\n\n after: &str\n\n) {\n\n let (before, range) = test_util::extract_range(before, \"^\");\n\n let file = E::parse(&before);\n\n let actions = file.context_actions(range);\n\n if !actions.contains(&action_id) {\n\n panic!(\"Action `{}` is not avialable\", action_id);\n\n }\n\n match file.apply_context_action(range, action_id) {\n\n None => panic!(\"Failed to apply `{}` action\", action_id),\n\n Some(edit) => {\n\n let actual = edit.apply(file.file().text());\n\n test_util::report_diff(after.trim(), actual.as_text().to_cow().trim())\n\n }\n\n }\n\n}\n\n\n", "file_path": "fall/editor/src/actions.rs", "rank": 47, "score": 171925.4666094751 }, { "content": "pub fn match_ast(actual: &str, expected: &str) {\n\n let actual = actual.trim();\n\n let expected = expected.trim();\n\n if actual != expected {\n\n panic!(\"Actual:\\n{}\\nExpected:\\n{}\\n\", actual, expected)\n\n }\n\n}\n", "file_path": "fall/test/src/lib.rs", "rank": 48, "score": 167870.03209177792 }, { "content": "fn collect_tests(mut grammar: &str) -> Vec<String> {\n\n let mut result = Vec::new();\n\n while let Some(pos) = grammar.find(\"test r\") {\n\n grammar = &grammar[pos + \"test r\".len()..];\n\n let n_hashes = grammar.chars().take_while(|&c| c == '#').count();\n\n grammar = &grammar[n_hashes + 1..];\n\n if let Some(end) = grammar.find(&\"\\\"################\"[..1 + n_hashes]) {\n\n let example = &grammar[..end].trim();\n\n result.push(example.to_string())\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "fall/tree/src/test_util.rs", "rank": 49, "score": 167277.56342847546 }, { "content": "struct E(u32);\n", "file_path": "lang/rust/syntax/tests/data/struct.rs", "rank": 50, "score": 166831.9507292146 }, { "content": "pub fn context_actions<I: EditorFileImpl, C: Class<Internals=EditorFile<I>>>(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<C>()?;\n\n let range: TextRange = arg(scope, &call.arguments, 1)?;\n\n let result = editor_file.grab(|file| file.context_actions(range));\n\n ret(scope, result)\n\n}\n\n\n", "file_path": "code/generic_backend/src/lib.rs", "rank": 51, "score": 165592.02971552825 }, { "content": "pub fn dump_file(f: &File) -> String {\n\n dump(f.language(), f.root(), &f.text().to_cow(), false)\n\n}\n\n\n", "file_path": "fall/tree/src/util.rs", "rank": 52, "score": 164646.79755093413 }, { "content": "struct Foo<> {}\n", "file_path": "lang/rust/syntax/tests/data/type_parameters.rs", "rank": 53, "score": 164330.70175909728 }, { "content": "struct A<A,>();\n", "file_path": "lang/rust/syntax/tests/data/type_parameters.rs", "rank": 54, "score": 164183.1867355529 }, { "content": "pub fn apply_context_action<I: EditorFileImpl, C: Class<Internals=EditorFile<I>>>(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<C>()?;\n\n let range: TextRange = arg(scope, &call.arguments, 1)?;\n\n let id: String = arg(scope, &call.arguments, 2)?;\n\n let result = editor_file.grab(move |file| {\n\n file.apply_context_action(range, &id).map(to_vs_edits)\n\n });\n\n ret(scope, result)\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct VsEdit {\n\n delete: TextRange,\n\n insert: String,\n\n}\n\n\n", "file_path": "code/generic_backend/src/lib.rs", "rank": 55, "score": 163602.5022864114 }, { "content": "pub fn dump_file_ws(f: &File) -> String {\n\n dump(f.language(), f.root(), &f.text().to_cow(), true)\n\n}\n\n\n", "file_path": "fall/tree/src/util.rs", "rank": 56, "score": 163099.85578943908 }, { "content": "struct B<'a,>();\n", "file_path": "lang/rust/syntax/tests/data/type_parameters.rs", "rank": 58, "score": 161590.975643323 }, { "content": "pub fn render_test(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<JsFallEditorFile>()?;\n\n let file = editor_file.grab(|file| file.clone());\n\n let test_n: usize = arg(scope, &call.arguments, 1)?;\n\n let callback = call.arguments.require(scope, 2)?.check::<JsFunction>()?;\n\n RenderTask(file, test_n).schedule(callback);\n\n Ok(JsNull::new().upcast())\n\n}\n\n\n", "file_path": "code/fall/native/src/lib.rs", "rank": 59, "score": 161387.14672406748 }, { "content": "pub fn find_usages(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<JsFallEditorFile>()?;\n\n let offset: TextUnit = arg(scope, &call.arguments, 1)?;\n\n let result = editor_file.grab(|file| file.find_usages(offset));\n\n ret(scope, result)\n\n}\n\n\n", "file_path": "code/fall/native/src/lib.rs", "rank": 60, "score": 161387.14672406748 }, { "content": "pub fn test_at_offset(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<JsFallEditorFile>()?;\n\n let offset: TextUnit = arg(scope, &call.arguments, 1)?;\n\n let result = editor_file.grab(|file| file.test_at_offset(offset));\n\n ret(scope, result)\n\n}\n\n\n", "file_path": "code/fall/native/src/lib.rs", "rank": 61, "score": 161387.14672406748 }, { "content": "struct G(u32, String);\n", "file_path": "lang/rust/syntax/tests/data/struct.rs", "rank": 62, "score": 160567.59068129543 }, { "content": "pub fn analyse<S: Into<String>>(text: S) -> FileWithAnalysis {\n\n FileWithAnalysis::new(parse(text))\n\n}\n\n\n", "file_path": "lang/fall/src/lib.rs", "rank": 63, "score": 160092.36383063247 }, { "content": "struct C<'a, A,>();\n\n\n", "file_path": "lang/rust/syntax/tests/data/type_parameters.rs", "rank": 64, "score": 159208.7414510874 }, { "content": "fn parse_token<'g>(\n\n p: &mut Parser<'g>, tokens: Pos,\n\n ty_idx: NodeTypeRef,\n\n) -> Option<Pos> {\n\n let (ty, ts) = p.bump(tokens)?;\n\n if p[ty_idx] != ty {\n\n return None;\n\n }\n\n Some(ts)\n\n}\n\n\n", "file_path": "fall/parse/src/syn_engine/expr.rs", "rank": 67, "score": 154503.0466748929 }, { "content": "fn struct_cond_ambiguity() {\n\n if foo {}\n\n if (Foo {}) {}\n\n if foo(Foo {}) {}\n\n if {Foo {}} {}\n\n}\n\n\n", "file_path": "lang/rust/syntax/tests/data/expressions.rs", "rank": 68, "score": 154333.25593198638 }, { "content": "pub fn parse<C: Class>(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let text = call.arguments.require(scope, 0)?.check::<JsString>()?;\n\n\n\n let class: Handle<JsClass<C>> = C::class(scope)?;\n\n let ctor: Handle<JsFunction<C>> = class.constructor(scope)?;\n\n let ctor_args = iter::once(text.upcast());\n\n let file = ctor.construct::<_, JsValue, _>(scope, ctor_args)?;\n\n Ok(file.upcast())\n\n}\n\n\n", "file_path": "code/generic_backend/src/lib.rs", "rank": 69, "score": 154311.34947574616 }, { "content": "#[test]\n\nfn test_syn_rule_diagnostics() {\n\n check_diagnostics(r\"\n\n pub rule foo { <eof x> }\n\n rule bar { foo <abracadabra>}\n\n rule baz { <prev_is foo> <prev_is bar> <prev_is {foo}>}\n\n rule dupe { dupe }\n\n rule dupe { dupe }\n\n \", \"\\\n\nE <eof x>: Wrong number of arguments, expected 0, got 1\n\nE x: Unresolved reference\n\nE abracadabra: Unresolved reference\n\nE <prev_is bar>: <prev_is> arguments must be public rules\n\nE <prev_is {foo}>: <prev_is> arguments must be public rules\n\nE dupe: Duplicate rule\n\nW baz: Unused rule\n\nW dupe: Unused rule\");\n\n}\n\n\n", "file_path": "lang/fall/src/analysis/mod.rs", "rank": 70, "score": 154002.88510129153 }, { "content": "#[test]\n\nfn test_lex_rule_diagnostics() {\n\n check_diagnostics(r\"\n\n tokenizer {\n\n class 'class'\n\n class 'trait'\n\n }\n\n \", \"\\\n\nE class 'trait': Duplicate token\n\n\");\n\n}\n\n\n", "file_path": "lang/fall/src/analysis/mod.rs", "rank": 71, "score": 154002.88510129153 }, { "content": "fn parse_pub<'g>(\n\n p: &mut Parser<'g>, tokens: Pos,\n\n ty_idx: NodeTypeRef, body: ExprRef, replaceable: bool,\n\n) -> Option<Pos> {\n\n if replaceable {\n\n p.replacement = None;\n\n }\n\n let mark = p.start(ty_idx);\n\n let ts = parse_expr(p, body, tokens)?;\n\n if let (true, Some(ty)) = (replaceable, p.replacement) {\n\n p.replacement = None;\n\n p.replace(mark, ty)\n\n };\n\n p.finish();\n\n p.prev = Some(p[ty_idx]);\n\n Some(ts)\n\n}\n\n\n", "file_path": "fall/parse/src/syn_engine/expr.rs", "rank": 72, "score": 153844.79300283815 }, { "content": "fn swap(file: &File, offset: TextUnit, apply: bool) -> Option<ActionResult> {\n\n let comma = find_comma(file.root(), offset)?;\n\n let left = nonws_sibling(comma, Direction::Left)?;\n\n let right = nonws_sibling(comma, Direction::Right)?;\n\n if left.ty() != right.ty() {\n\n return None;\n\n }\n\n\n\n if !apply {\n\n return Some(ActionResult::Available);\n\n }\n\n let mut edit = FileEdit::new(file);\n\n edit.replace(left, right);\n\n edit.replace(right, left);\n\n Some(ActionResult::Applied(edit.into_text_edit()))\n\n}\n\n\n", "file_path": "fall/editor/src/actions.rs", "rank": 73, "score": 153573.55708956148 }, { "content": "fn c() { 1; }\n\n\n", "file_path": "lang/rust/syntax/tests/data/expressions.rs", "rank": 74, "score": 153244.26503962622 }, { "content": "fn b() { 1 }\n\n\n", "file_path": "lang/rust/syntax/tests/data/expressions.rs", "rank": 75, "score": 153244.26503962622 }, { "content": "fn baz<X,>() {}\n\n\n", "file_path": "lang/rust/syntax/tests/data/type_parameters.rs", "rank": 76, "score": 152078.38755032426 }, { "content": "fn parse_contextual_token<'g>(\n\n p: &mut Parser<'g>, tokens: Pos,\n\n ty_idx: NodeTypeRef, text: &str,\n\n) -> Option<Pos> {\n\n p.bump_by_text(tokens, text, ty_idx)\n\n}\n\n\n", "file_path": "fall/parse/src/syn_engine/expr.rs", "rank": 77, "score": 152051.4078902386 }, { "content": "pub fn subtree<'f>(node: Node<'f>) -> Box<dyn Iterator<Item=Node<'f>> + 'f> {\n\n Box::new(node.children().flat_map(subtree).chain(::std::iter::once(node)))\n\n}\n\n\n", "file_path": "fall/tree/src/search.rs", "rank": 78, "score": 151824.3106551723 }, { "content": "pub fn visitor<'f>(mapping: HlMap) -> VisitorBuilder<'f, Highlights, HlVisitor> {\n\n VisitorBuilder::new(Vec::new(), HlVisitor(mapping))\n\n}\n\n\n\npub struct HlVisitor(HlMap);\n\n\n\nimpl<'f> Visit<'f> for HlVisitor {\n\n type Context = Highlights;\n\n\n\n fn visit(&mut self, node: Node<'f>, ctx: &mut Highlights) {\n\n if node.ty() == ERROR_TY {\n\n let range = if node.range().is_empty() {\n\n TextRange::from_len(node.range().start(), tu(1))\n\n } else {\n\n node.range()\n\n };\n\n ctx.push((range, ERROR));\n\n return;\n\n }\n\n for &(tag, tys) in self.0.iter() {\n\n for &ty in tys.iter() {\n\n if node.ty() == ty {\n\n hl(node, tag, ctx);\n\n return;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "fall/editor/src/hl.rs", "rank": 79, "score": 151753.13012729504 }, { "content": "fn reformat_node<'f>(node: Node<'f>, edit: &mut FileEdit<'f>, spacer: &Spacer) {\n\n spacer.apply(node, edit);\n\n for child in node.children() {\n\n reformat_node(child, edit, spacer);\n\n }\n\n}\n\n\n\n\n", "file_path": "lang/fall/src/editor/formatter.rs", "rank": 80, "score": 151634.47424528716 }, { "content": "pub fn parse(\n\n lang: &Language,\n\n lexer_def: &RegexLexer,\n\n parser_def: &ParserDefinition,\n\n text: Text,\n\n metrics: &Metrics,\n\n builder: &mut TreeBuilder,\n\n) -> Option<Box<dyn Any + Sync + Send>> {\n\n let tokens: Vec<Token> = metrics.measure_time(\"lexing\", || {\n\n lex_engine::lex(lexer_def, text)\n\n });\n\n metrics.record(\"relexed region\", text.len().utf8_len() as u64, \"\");\n\n\n\n let events = parser_def.parse(None, text, &tokens, lang, metrics, builder);\n\n let incremental_data = IncrementalData { tokens, events };\n\n Some(Box::new(incremental_data))\n\n}\n\n\n", "file_path": "fall/parse/src/lib.rs", "rank": 81, "score": 151071.15653229452 }, { "content": "pub fn reparse(\n\n lang: &Language,\n\n lexer_def: &RegexLexer,\n\n parser_def: &ParserDefinition,\n\n incremental_data: &dyn Any,\n\n edit: &TextEdit,\n\n new_text: Text,\n\n metrics: &Metrics,\n\n builder: &mut TreeBuilder,\n\n) -> Option<Box<dyn Any + Sync + Send>> {\n\n let incremental_data: &IncrementalData = incremental_data.downcast_ref().unwrap();\n\n let (tokens, relexed_region) = metrics.measure_time(\"lexing\", || {\n\n lex_engine::relex(lexer_def, &incremental_data.tokens, edit, new_text)\n\n });\n\n metrics.record(\"relexed region\", relexed_region as u64, \"\");\n\n\n\n let salvaged = syn_engine::salvage_segments(\n\n &incremental_data.events,\n\n &incremental_data.tokens,\n\n &|t| lang.node_type_info(t.ty).whitespace_like,\n", "file_path": "fall/parse/src/lib.rs", "rank": 82, "score": 151071.15653229452 }, { "content": "fn add_impl(file: &File, offset: TextUnit, apply: bool) -> Option<ActionResult> {\n\n None\n\n .or_else(|| add_impl_for::<StructDef>(file, offset, apply))\n\n .or_else(|| add_impl_for::<EnumDef>(file, offset, apply))\n\n}\n\n\n", "file_path": "lang/rust/src/editor/actions.rs", "rank": 83, "score": 149872.10350584803 }, { "content": "pub fn quux() {}\n", "file_path": "lang/rust/syntax/tests/data/visibility.rs", "rank": 84, "score": 147647.22738246387 }, { "content": "struct RenderTask(EditorFile<FileWithAnalysis>, usize);\n\n\n\nimpl Task for RenderTask {\n\n type Output = String;\n\n type Error = ();\n\n type JsEvent = JsString;\n\n\n\n fn perform(&self) -> Result<String, ()> {\n\n lazy_static! {\n\n static ref TEST_RENDERER: Mutex<TestRenderer> = Mutex::new(TestRenderer);\n\n }\n\n\n\n let mut renderer = TEST_RENDERER.lock().unwrap();\n\n let tree = renderer.render_one(&self.0.file(), self.1);\n\n Ok(tree)\n\n }\n\n\n\n fn complete<'a, T: Scope<'a>>(self, scope: &'a mut T, result: Result<String, ()>) -> JsResult<JsString> {\n\n Ok(JsString::new(scope, &result.unwrap()).unwrap())\n\n }\n", "file_path": "code/fall/native/src/lib.rs", "rank": 85, "score": 147404.01721308203 }, { "content": "fn colorize_child(node: Node, child: NodeType, tag: HlTag, spans: &mut Highlights) {\n\n if let Some(child) = child_of_type(node, child) {\n\n colorize_node(child, tag, spans);\n\n }\n\n}\n\n\n", "file_path": "lang/fall/src/editor/highlighting.rs", "rank": 86, "score": 146063.03119821785 }, { "content": "struct Bar<U, V>(U, V);\n", "file_path": "lang/rust/syntax/tests/data/type_parameters.rs", "rank": 87, "score": 145873.90486050484 }, { "content": "pub trait NameOwner<'f>: rt::AstNode<'f> {\n\n fn name_ident(&self) -> Option<rt::Node<'f>> {\n\n self.node().children().find(|n| n.ty() == IDENT)\n\n }\n\n fn name(&self) -> Option<rt::Text<'f>> {\n\n rt::child_of_type(self.node(), IDENT).map(|n| n.text())\n\n }\n\n}\n\nimpl<'f> NameOwner<'f> for FnDef<'f> {}\n\nimpl<'f> NameOwner<'f> for StructDef<'f> {}\n\nimpl<'f> NameOwner<'f> for EnumDef<'f> {}\n\nimpl<'f> NameOwner<'f> for TraitDef<'f> {}\n\nimpl<'f> NameOwner<'f> for TypeDef<'f> {}\n\nimpl<'f> NameOwner<'f> for ModDef<'f> {}\n\nimpl<'f> NameOwner<'f> for TypeParameter<'f> {}\n", "file_path": "lang/rust/syntax/src/rust.rs", "rank": 88, "score": 144579.9519802549 }, { "content": "pub fn visitor<'f, C>(ctx: C) -> VisitorBuilder<'f, C, EmptyVisitor<C>> {\n\n VisitorBuilder::new(ctx, EmptyVisitor(PhantomData))\n\n}\n\n\n", "file_path": "fall/tree/src/visitor.rs", "rank": 89, "score": 144454.18509804597 }, { "content": "fn base_directory() -> Result<PathBuf> {\n\n let result = ::std::env::temp_dir().join(\"fall-tests\");\n\n fs::create_dir_all(&result)?;\n\n fs::create_dir_all(&result.join(\"src\"))?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "fall/gen/src/lib.rs", "rank": 90, "score": 143495.90538958478 }, { "content": "fn is_blank(text: &str) -> bool {\n\n text.trim().is_empty()\n\n}\n", "file_path": "lang/fall/syntax/tests/prop.rs", "rank": 91, "score": 139676.3079713017 }, { "content": "fn def_provider<'f>(node: Node<'f>) -> Option<Declaration<'f>> {\n\n process_node(\n\n node,\n\n visitor(None)\n\n .visit::<SynRule, _>(|node, result| *result = Some(node.into()))\n\n .visit::<LexRule, _>(|node, result| *result = Some(node.into()))\n\n .visit::<Parameter, _>(|node, result| *result = Some(node.into()))\n\n .visit::<AstNodeDef, _>(|node, result| *result = Some(node.into()))\n\n .visit::<AstClassDef, _>(|node, result| *result = Some(node.into()))\n\n )\n\n}\n\n\n\nimpl<'f> From<SynRule<'f>> for Declaration<'f> {\n\n fn from(rule: SynRule<'f>) -> Self {\n\n Declaration::with_name_ident(rule.node(), rule.name_ident())\n\n }\n\n}\n\n\n\nimpl<'f> From<LexRule<'f>> for Declaration<'f> {\n\n fn from(rule: LexRule<'f>) -> Self {\n", "file_path": "lang/fall/src/editor/references/mod.rs", "rank": 92, "score": 138937.7269367198 }, { "content": "pub fn relex<L: Lexer>(\n\n lexer: &L,\n\n old_tokens: &[Token],\n\n edit: &TextEdit,\n\n new_text: Text\n\n) -> (Vec<Token>, usize)\n\n{\n\n if old_tokens.iter().any(|&token| token.ty == ERROR) {\n\n return (lex(lexer, new_text), 0);\n\n }\n\n\n\n let mut old_tokens = old_tokens.iter().cloned();\n\n let mut old_len = tu(0);\n\n\n\n let mut new_tokens: Vec<Token> = Vec::new();\n\n let mut new_len = tu(0);\n\n\n\n let mut edit_point = tu(0);\n\n let mut reused = tu(0);\n\n\n", "file_path": "fall/parse/src/lex_engine.rs", "rank": 93, "score": 137860.47825851967 }, { "content": "fn after_space_typed(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut file = call.arguments.require(scope, 0)?.check::<JsRustEditorFile>()?;\n\n let offset: TextUnit = arg(scope, &call.arguments, 1)?;\n\n let result = file.grab(move |file| file.after_space_typed(offset));\n\n ret(scope, result)\n\n}\n\n\n\n\n", "file_path": "code/rust/native/src/lib.rs", "rank": 94, "score": 137592.55185581287 }, { "content": "pub fn structure<I: EditorFileImpl, C: Class<Internals=EditorFile<I>>>(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<C>()?;\n\n let result = editor_file.grab(|file| file.structure());\n\n ret(scope, result)\n\n}\n\n\n", "file_path": "code/generic_backend/src/lib.rs", "rank": 95, "score": 134684.67543736353 }, { "content": "pub fn highlight<I: EditorFileImpl, C: Class<Internals=EditorFile<I>>>(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<C>()?;\n\n let result = editor_file.grab(|file| file.highlight());\n\n ret(scope, result)\n\n}\n\n\n", "file_path": "code/generic_backend/src/lib.rs", "rank": 96, "score": 134684.67543736353 }, { "content": "pub fn metrics<I: EditorFileImpl, C: Class<Internals=EditorFile<I>>>(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<C>()?;\n\n let result = editor_file.grab(|file| file.metrics());\n\n ret(scope, result)\n\n}\n\n\n", "file_path": "code/generic_backend/src/lib.rs", "rank": 97, "score": 134684.67543736353 }, { "content": "pub fn edit<I: EditorFileImpl, C: Class<Internals=EditorFile<I>>>(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n\n\n let editor_file = call.arguments.require(scope, 0)?.check::<C>()?;\n\n let edits = call.arguments.require(scope, 1)?;\n\n\n\n let class: Handle<JsClass<C>> = C::class(scope)?;\n\n let constructor: Handle<JsFunction<C>> = class.constructor(scope)?;\n\n let args = iter::once(editor_file.upcast()).chain(iter::once(edits));\n\n let file = constructor.construct(scope, args)?;\n\n Ok(file.upcast())\n\n}\n\n\n", "file_path": "code/generic_backend/src/lib.rs", "rank": 98, "score": 134684.67543736353 }, { "content": "pub fn reformat<I: EditorFileImpl, C: Class<Internals=EditorFile<I>>>(call: Call) -> JsResult<JsValue> {\n\n let scope = call.scope;\n\n let mut editor_file = call.arguments.require(scope, 0)?.check::<C>()?;\n\n let result = editor_file.grab(|file| {\n\n let edits = file.reformat();\n\n to_vs_edits(edits)\n\n });\n\n ret(scope, result)\n\n}\n\n\n", "file_path": "code/generic_backend/src/lib.rs", "rank": 99, "score": 134684.67543736353 } ]
Rust
truck-rendimpl/src/shaperend.rs
mattiasgronlund/truck
df78ff348b448d41743c2f7db1f93ebb3c0f41ca
use crate::*; use truck_meshalgo::tessellation::*; use truck_topology::*; impl Default for ShapeInstanceDescriptor { #[inline(always)] fn default() -> Self { ShapeInstanceDescriptor { instance_state: Default::default(), mesh_precision: 0.005, } } } impl<Shape: MeshableShape> TryIntoInstance<PolygonInstance> for Shape { type Descriptor = ShapeInstanceDescriptor; fn try_into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> Option<PolygonInstance> { let polygon = self.triangulation(desc.mesh_precision)?.into_polygon(); Some(polygon.into_instance( handler, shaders, &PolygonInstanceDescriptor { instance_state: desc.instance_state.clone(), }, )) } } impl<P, C, S> IntoInstance<PolygonInstance> for Shell<P, C, S> where Shell<P, C, S>: MeshableShape, { type Descriptor = ShapeInstanceDescriptor; #[inline(always)] fn into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> PolygonInstance { self.try_into_instance(handler, shaders, desc) .expect("failed to create instance") } } impl<P, C, S> IntoInstance<PolygonInstance> for Solid<P, C, S> where Solid<P, C, S>: MeshableShape, { type Descriptor = ShapeInstanceDescriptor; #[inline(always)] fn into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> PolygonInstance { self.try_into_instance(handler, shaders, desc) .expect("failed to create instance") } } impl<C, S> IntoInstance<WireFrameInstance> for Shell<Point3, C, S> where C: PolylineableCurve, { type Descriptor = ShapeWireFrameDescriptor; fn into_instance( &self, handler: &DeviceHandler, shaders: &WireShaders, desc: &ShapeWireFrameDescriptor, ) -> WireFrameInstance { let mut lengths = Vec::new(); let points: Vec<[f32; 3]> = self .face_iter() .flat_map(|face| face.boundary_iters()) .flatten() .flat_map(|edge| { let curve = edge.oriented_curve(); let division = curve.parameter_division(curve.parameter_range(), desc.polyline_precision); lengths.push(division.len() as u32); division .into_iter() .map(move |t| curve.subs(t).cast().unwrap().into()) }) .collect(); let mut strips = Vec::<u32>::new(); let mut counter = 0_u32; for len in lengths { for i in 1..len { strips.push(counter + i - 1); strips.push(counter + i); } counter += len; } let vertices = BufferHandler::from_slice(&points, handler.device(), BufferUsages::VERTEX); let strips = BufferHandler::from_slice(&strips, handler.device(), BufferUsages::INDEX); WireFrameInstance { vertices: Arc::new(vertices), strips: Arc::new(strips), state: desc.wireframe_state.clone(), shaders: shaders.clone(), id: RenderID::gen(), } } } impl<C, S> IntoInstance<WireFrameInstance> for Solid<Point3, C, S> where C: PolylineableCurve, { type Descriptor = ShapeWireFrameDescriptor; fn into_instance( &self, handler: &DeviceHandler, shaders: &WireShaders, desc: &ShapeWireFrameDescriptor, ) -> WireFrameInstance { let mut lengths = Vec::new(); let points: Vec<[f32; 3]> = self .boundaries() .iter() .flatten() .flat_map(|face| face.boundary_iters()) .flatten() .flat_map(|edge| { let curve = edge.oriented_curve(); let division = curve.parameter_division(curve.parameter_range(), desc.polyline_precision); lengths.push(division.len() as u32); division .into_iter() .map(move |t| curve.subs(t).cast().unwrap().into()) }) .collect(); let mut strips = Vec::<u32>::new(); let mut counter = 0_u32; for len in lengths { for i in 1..len { strips.push(counter + i - 1); strips.push(counter + i); } counter += len; } let vertices = BufferHandler::from_slice(&points, handler.device(), BufferUsages::VERTEX); let strips = BufferHandler::from_slice(&strips, handler.device(), BufferUsages::INDEX); WireFrameInstance { vertices: Arc::new(vertices), strips: Arc::new(strips), state: desc.wireframe_state.clone(), shaders: shaders.clone(), id: RenderID::gen(), } } }
use crate::*; use truck_meshalgo::tessellation::*; use truck_topology::*; impl Default for ShapeInstanceDescriptor { #[inline(always)] fn default() -> Self { ShapeInstanceDescriptor { instance_state: Default::default(), mesh_precision: 0.005, } } } impl<Shape: MeshableShape> TryIntoInstance<PolygonInstance> for Shape { type Descriptor = ShapeInstanceDescriptor; fn try_into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> Option<PolygonInstance> { let polygon = self.triangulation(desc.mesh_precision)?.into_polygon(); Some(polygon.into_instance( handler, shaders, &PolygonInstanceDescriptor { instance_state: desc.instance_state.clone(), }, )) } } impl<P, C, S> IntoInstance<PolygonInstance> for Shell<P, C, S> where Shell<P, C, S>: MeshableShape, { type Descriptor = ShapeInstanceDescriptor; #[inline(always)] fn into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> PolygonInstance { self.try_into_instance(handler, shaders, desc) .expect("failed to create instance") } } impl<P, C, S> IntoInstance<PolygonInstance> for Solid<P, C, S> where Solid<P, C, S>: MeshableShape, { type Descriptor = ShapeInstanceDescriptor; #[inline(always)] fn into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> PolygonInstance { self.try_into_instance(handler, shaders, desc) .expect("failed to create instance") } } impl<C, S> IntoInstance<WireFrameInstance> for Shell<Point3, C, S> where C: PolylineableCurve, { type Descriptor = ShapeWireFrameDescriptor;
} impl<C, S> IntoInstance<WireFrameInstance> for Solid<Point3, C, S> where C: PolylineableCurve, { type Descriptor = ShapeWireFrameDescriptor; fn into_instance( &self, handler: &DeviceHandler, shaders: &WireShaders, desc: &ShapeWireFrameDescriptor, ) -> WireFrameInstance { let mut lengths = Vec::new(); let points: Vec<[f32; 3]> = self .boundaries() .iter() .flatten() .flat_map(|face| face.boundary_iters()) .flatten() .flat_map(|edge| { let curve = edge.oriented_curve(); let division = curve.parameter_division(curve.parameter_range(), desc.polyline_precision); lengths.push(division.len() as u32); division .into_iter() .map(move |t| curve.subs(t).cast().unwrap().into()) }) .collect(); let mut strips = Vec::<u32>::new(); let mut counter = 0_u32; for len in lengths { for i in 1..len { strips.push(counter + i - 1); strips.push(counter + i); } counter += len; } let vertices = BufferHandler::from_slice(&points, handler.device(), BufferUsages::VERTEX); let strips = BufferHandler::from_slice(&strips, handler.device(), BufferUsages::INDEX); WireFrameInstance { vertices: Arc::new(vertices), strips: Arc::new(strips), state: desc.wireframe_state.clone(), shaders: shaders.clone(), id: RenderID::gen(), } } }
fn into_instance( &self, handler: &DeviceHandler, shaders: &WireShaders, desc: &ShapeWireFrameDescriptor, ) -> WireFrameInstance { let mut lengths = Vec::new(); let points: Vec<[f32; 3]> = self .face_iter() .flat_map(|face| face.boundary_iters()) .flatten() .flat_map(|edge| { let curve = edge.oriented_curve(); let division = curve.parameter_division(curve.parameter_range(), desc.polyline_precision); lengths.push(division.len() as u32); division .into_iter() .map(move |t| curve.subs(t).cast().unwrap().into()) }) .collect(); let mut strips = Vec::<u32>::new(); let mut counter = 0_u32; for len in lengths { for i in 1..len { strips.push(counter + i - 1); strips.push(counter + i); } counter += len; } let vertices = BufferHandler::from_slice(&points, handler.device(), BufferUsages::VERTEX); let strips = BufferHandler::from_slice(&strips, handler.device(), BufferUsages::INDEX); WireFrameInstance { vertices: Arc::new(vertices), strips: Arc::new(strips), state: desc.wireframe_state.clone(), shaders: shaders.clone(), id: RenderID::gen(), } }
function_block-full_function
[ { "content": "fn nontex_inst_desc() -> PolygonInstanceDescriptor {\n\n PolygonInstanceDescriptor {\n\n instance_state: InstanceState {\n\n matrix: Matrix4::from_cols(\n\n [1.0, 2.0, 3.0, 4.0].into(),\n\n [5.0, 6.0, 7.0, 8.0].into(),\n\n [9.0, 10.0, 11.0, 12.0].into(),\n\n [13.0, 14.0, 15.0, 16.0].into(),\n\n ),\n\n material: Material {\n\n albedo: Vector4::new(0.2, 0.4, 0.6, 1.0),\n\n roughness: 0.31415,\n\n reflectance: 0.29613,\n\n ambient_ratio: 0.92,\n\n alpha_blend: false,\n\n },\n\n texture: None,\n\n backface_culling: true,\n\n },\n\n }\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 0, "score": 253546.48288508411 }, { "content": "fn bgcheck_shaders(handler: &DeviceHandler) -> PolygonShaders {\n\n let source = include_str!(\"shaders/mesh-bindgroup.wgsl\");\n\n let module = Arc::new(\n\n handler\n\n .device()\n\n .create_shader_module(&ShaderModuleDescriptor {\n\n source: ShaderSource::Wgsl(source.into()),\n\n label: None,\n\n }),\n\n );\n\n PolygonShaders::new(\n\n Arc::clone(&module),\n\n \"vs_main\",\n\n Arc::clone(&module),\n\n \"nontex_main\",\n\n Arc::clone(&module),\n\n \"tex_main\",\n\n )\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 1, "score": 230550.43115886612 }, { "content": "fn bgcheck_anti_shaders(handler: &DeviceHandler) -> PolygonShaders {\n\n let source = include_str!(\"shaders/mesh-bindgroup.wgsl\");\n\n let module = Arc::new(\n\n handler\n\n .device()\n\n .create_shader_module(&ShaderModuleDescriptor {\n\n source: ShaderSource::Wgsl(source.into()),\n\n label: None,\n\n }),\n\n );\n\n PolygonShaders::new(\n\n Arc::clone(&module),\n\n \"vs_main\",\n\n Arc::clone(&module),\n\n \"nontex_main_anti\",\n\n Arc::clone(&module),\n\n \"tex_main_anti\",\n\n )\n\n}\n\n\n\nconst ATTRS_OBJ: &str = \"\n\nv -1.0 2.0 -1.0\\nv 1.0 2.0 -1.0\\nv -1.0 2.0 1.0\\nv 1.0 2.0 1.0\n\nvt -1.0 -1.0\\nvt 1.0 -1.0\\nvt 1.0 1.0\\nvt -1.0 1.0\n\nvn -1.0 0.2 -1.0\\nvn 1.0 0.2 -1.0\\nvn -1.0 0.2 1.0\\nvn 1.0 0.2 1.0\n\n\";\n\nconst TRIS_OBJ: &str = \"f 1/1/1 2/2/3 3/4/2\\nf 3/4/2 2/2/3 4/3/4\\n\";\n\nconst QUADS_OBJ: &str = \"f 1/1/1 2/2/3 4/3/4 3/4/2\\n\";\n\n\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 2, "score": 227229.3823287432 }, { "content": "fn sub_connect_wires<P: Clone, C: Clone, S: Clone, CP: Fn(&P, &P) -> C, CC: Fn(&C, &C) -> S>(\n\n edge0: &Edge<P, C>,\n\n edge1: &Edge<P, C>,\n\n connect_points: &CP,\n\n connect_curves: &CC,\n\n vemap: &mut HashMap<VertexID<P>, Edge<P, C>>,\n\n) -> Face<P, C, S>\n\n{\n\n let edge2 = match vemap.get(&edge0.front().id()) {\n\n Some(edge) => edge.clone(),\n\n None => {\n\n let edge = connect_vertices(edge0.front(), edge1.front(), connect_points);\n\n vemap.insert(edge0.front().id(), edge.clone());\n\n edge\n\n }\n\n };\n\n let edge3 = match vemap.get(&edge0.back().id()) {\n\n Some(edge) => edge.clone(),\n\n None => {\n\n let edge = connect_vertices(edge0.back(), edge1.back(), connect_points);\n", "file_path": "truck-modeling/src/topo_impls.rs", "rank": 3, "score": 185068.64733555255 }, { "content": "fn exec_cut_random_test<C>(curve: &C)\n\nwhere\n\n C: Cut,\n\n C::Point: Debug + Tolerance,\n\n C::Vector: Debug + Tolerance, {\n\n let mut part0 = curve.clone();\n\n let (t0, t1) = curve.parameter_range();\n\n let p = rand::random::<f64>();\n\n let t = t0 * (1.0 - p) + t1 * p;\n\n let part1 = part0.cut(t);\n\n assert_near!(part0.parameter_range().0, t0);\n\n assert_near!(part0.parameter_range().1, t);\n\n assert_near!(part1.parameter_range().0, t);\n\n assert_near!(part1.parameter_range().1, t1);\n\n\n\n let p = rand::random::<f64>();\n\n let s = t0 * (1.0 - p) + t * p;\n\n assert_near!(part0.subs(s), curve.subs(s));\n\n assert_near!(part0.der(s), curve.der(s));\n\n assert_near!(part0.der2(s), curve.der2(s));\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 4, "score": 147787.33613616356 }, { "content": "fn exec_parameter_transform_random_test<C>(curve: &C)\n\nwhere\n\n C: ParameterTransform,\n\n C::Point: Debug + Tolerance,\n\n C::Vector: Debug + Tolerance + std::ops::Mul<f64, Output = C::Vector>, {\n\n let a = rand::random::<f64>() + 0.5;\n\n let b = rand::random::<f64>() * 2.0;\n\n let transformed = curve.parameter_transformed(a, b);\n\n\n\n let (t0, t1) = curve.parameter_range();\n\n assert_near!(transformed.parameter_range().0, t0 * a + b);\n\n assert_near!(transformed.parameter_range().1, t1 * a + b);\n\n let p = rand::random::<f64>();\n\n let t = (1.0 - p) * t0 + p * t1;\n\n assert_near!(transformed.subs(t * a + b), curve.subs(t));\n\n assert_near!(transformed.der(t * a + b) * a, curve.der(t));\n\n assert_near!(transformed.der2(t * a + b) * a * a, curve.der2(t));\n\n assert_near!(transformed.front(), curve.front());\n\n assert_near!(transformed.back(), curve.back());\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 5, "score": 146032.67379107594 }, { "content": "/// positive test implementation for `Cut` by random transformation\n\npub fn cut_random_test<C>(curve: &C, trials: usize)\n\nwhere\n\n C: Cut,\n\n C::Point: Debug + Tolerance,\n\n C::Vector: Debug + Tolerance, {\n\n (0..trials).for_each(move |_| exec_cut_random_test(curve))\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 6, "score": 135166.48556130409 }, { "content": "/// positive test implementation for `ParameterTransform` by random transformation\n\npub fn parameter_transform_random_test<C>(curve: &C, trials: usize)\n\nwhere\n\n C: ParameterTransform,\n\n C::Point: Debug + Tolerance,\n\n C::Vector: Debug + Tolerance + std::ops::Mul<f64, Output = C::Vector>, {\n\n (0..trials).for_each(move |_| exec_parameter_transform_random_test(curve))\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 7, "score": 133557.77594861228 }, { "content": "/// Searches the nearest parameter by Newton's method.\n\npub fn search_nearest_parameter<C>(\n\n curve: &C,\n\n point: C::Point,\n\n hint: f64,\n\n trials: usize,\n\n) -> Option<f64>\n\nwhere\n\n C: ParametricCurve,\n\n C::Point: EuclideanSpace<Scalar = f64, Diff = C::Vector>,\n\n C::Vector: InnerSpace<Scalar = f64> + Tolerance,\n\n{\n\n let pt = curve.subs(hint);\n\n let der = curve.der(hint);\n\n let der2 = curve.der2(hint);\n\n let f = der.dot(pt - point);\n\n let fprime = der2.dot(pt - point) + der.magnitude2();\n\n if f.so_small2() || fprime.so_small() {\n\n return Some(hint);\n\n } else if trials == 0 {\n\n None\n\n } else {\n\n search_nearest_parameter(curve, point, hint - f / fprime, trials - 1)\n\n }\n\n}\n\n\n", "file_path": "truck-geotrait/src/algo/curve.rs", "rank": 8, "score": 133377.71264151583 }, { "content": "/// Divides the domain into equal parts, examines all the values, and returns `t` such that `curve.subs(t)` is closest to `point`.\n\n/// This method is useful to get an efficient hint of `search_nearest_parameter`.\n\npub fn presearch<C>(curve: &C, point: C::Point, range: (f64, f64), division: usize) -> f64\n\nwhere\n\n C: ParametricCurve,\n\n C::Point: MetricSpace<Metric = f64> + Copy, {\n\n let (t0, t1) = range;\n\n let mut res = t0;\n\n let mut min = std::f64::INFINITY;\n\n for i in 0..=division {\n\n let p = i as f64 / division as f64;\n\n let t = t0 * (1.0 - p) + t1 * p;\n\n let dist = curve.subs(t).distance2(point);\n\n if dist < min {\n\n min = dist;\n\n res = t;\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "truck-geotrait/src/algo/curve.rs", "rank": 9, "score": 127120.85326810353 }, { "content": "/// Searches the parameter by Newton's method.\n\npub fn search_parameter<C>(curve: &C, point: C::Point, hint: f64, trials: usize) -> Option<f64>\n\nwhere\n\n C: ParametricCurve,\n\n C::Point: EuclideanSpace<Scalar = f64, Diff = C::Vector>,\n\n C::Vector: InnerSpace<Scalar = f64> + Tolerance, {\n\n search_nearest_parameter(curve, point, hint, trials).and_then(|t| {\n\n match point.to_vec().near(&curve.subs(t).to_vec()) {\n\n true => Some(t),\n\n false => None,\n\n }\n\n })\n\n}\n\n\n", "file_path": "truck-geotrait/src/algo/curve.rs", "rank": 10, "score": 125863.21130952184 }, { "content": "#[allow(dead_code)]\n\nfn emap_subroutin<P, Q, C, D>(\n\n edge0: &Edge<P, C>,\n\n edge1: &Edge<Q, D>,\n\n vmap: &mut HashMap<VertexID<P>, VertexID<Q>>,\n\n emap: &mut HashMap<EdgeID<C>, EdgeID<D>>,\n\n) -> bool {\n\n match emap.get(&edge0.id()) {\n\n Some(got) => *got == edge1.id(),\n\n None => {\n\n emap.insert(edge0.id(), edge1.id());\n\n vmap_subroutin(edge0.front(), edge1.front(), vmap)\n\n && vmap_subroutin(edge0.back(), edge1.back(), vmap)\n\n }\n\n }\n\n}\n\n\n", "file_path": "truck-topology/src/compress.rs", "rank": 11, "score": 125690.50086193292 }, { "content": "#[inline(always)]\n\npub fn read<R: Read>(reader: R, stl_type: STLType) -> Result<PolygonMesh> {\n\n STLReader::new(reader, stl_type)?.collect()\n\n}\n", "file_path": "truck-polymesh/src/stl.rs", "rank": 12, "score": 123492.01189566552 }, { "content": "pub fn read_texture(handler: &DeviceHandler, texture: &Texture) -> Vec<u8> {\n\n let (device, queue, config) = (handler.device(), handler.queue(), handler.config());\n\n let size = (config.width * config.height * 4) as u64;\n\n let buffer = device.create_buffer(&BufferDescriptor {\n\n label: None,\n\n mapped_at_creation: false,\n\n usage: BufferUsages::COPY_DST | BufferUsages::MAP_READ,\n\n size,\n\n });\n\n let mut encoder = device.create_command_encoder(&CommandEncoderDescriptor { label: None });\n\n encoder.copy_texture_to_buffer(\n\n texture_copy_view(&texture),\n\n buffer_copy_view(&buffer, (config.width, config.height)),\n\n Extent3d {\n\n width: config.width,\n\n height: config.height,\n\n depth_or_array_layers: 1,\n\n },\n\n );\n\n queue.submit(Some(encoder.finish()));\n\n read_buffer(device, &buffer)\n\n}\n\n\n", "file_path": "truck-platform/tests/common.rs", "rank": 13, "score": 122548.83272352826 }, { "content": "pub fn read_texture(handler: &DeviceHandler, texture: &Texture) -> Vec<u8> {\n\n let (device, queue, config) = (handler.device(), handler.queue(), handler.config());\n\n let size = (config.width * config.height * 4) as u64;\n\n let buffer = device.create_buffer(&BufferDescriptor {\n\n label: None,\n\n mapped_at_creation: false,\n\n usage: BufferUsages::COPY_DST | BufferUsages::MAP_READ,\n\n size,\n\n });\n\n let mut encoder = device.create_command_encoder(&CommandEncoderDescriptor { label: None });\n\n encoder.copy_texture_to_buffer(\n\n texture_copy_view(&texture),\n\n buffer_copy_view(&buffer, (config.width, config.height)),\n\n Extent3d {\n\n width: config.width,\n\n height: config.height,\n\n depth_or_array_layers: 1,\n\n },\n\n );\n\n queue.submit(Some(encoder.finish()));\n\n read_buffer(device, &buffer)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 14, "score": 122548.83272352826 }, { "content": "fn nontex_shape(scene: &mut Scene, creator: &InstanceCreator) -> Vec<u8> {\n\n let (device, config) = (scene.device(), scene.config());\n\n let texture = device.create_texture(&common::texture_descriptor(&config));\n\n let cube: PolygonInstance = creator.create_instance(\n\n &shape_cube(),\n\n &ShapeInstanceDescriptor {\n\n instance_state: InstanceState {\n\n material: Material {\n\n albedo: Vector4::new(1.0, 1.0, 1.0, 1.0),\n\n roughness: 0.5,\n\n reflectance: 0.25,\n\n ambient_ratio: 0.02,\n\n alpha_blend: false,\n\n },\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n },\n\n );\n\n common::render_one(scene, &texture, &cube);\n\n common::read_texture(scene.device_handler(), &texture)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/render.rs", "rank": 15, "score": 119844.18196718975 }, { "content": "fn nontex_polygon(scene: &mut Scene, creator: &InstanceCreator) -> Vec<u8> {\n\n let (device, config) = (scene.device(), scene.config());\n\n let texture = device.create_texture(&common::texture_descriptor(&config));\n\n let cube: PolygonInstance = creator.create_instance(\n\n &obj::read(include_bytes!(\"cube.obj\").as_ref()).unwrap(),\n\n &PolygonInstanceDescriptor {\n\n instance_state: InstanceState {\n\n material: Material {\n\n albedo: Vector4::new(1.0, 1.0, 1.0, 1.0),\n\n roughness: 0.5,\n\n reflectance: 0.25,\n\n ambient_ratio: 0.02,\n\n alpha_blend: false,\n\n },\n\n ..Default::default()\n\n },\n\n },\n\n );\n\n common::render_one(scene, &texture, &cube);\n\n common::read_texture(scene.device_handler(), &texture)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/render.rs", "rank": 16, "score": 119723.99027876994 }, { "content": "fn exec_polygon_bgtest(\n\n scene: &mut Scene,\n\n instance: &PolygonInstance,\n\n answer: &Vec<u8>,\n\n id: usize,\n\n out_dir: String,\n\n) -> bool {\n\n let config = scene.config();\n\n let tex_desc = common::texture_descriptor(&config);\n\n let texture = scene.device().create_texture(&tex_desc);\n\n common::render_one(scene, &texture, instance);\n\n let buffer = common::read_texture(scene.device_handler(), &texture);\n\n let path = format!(\"{}polygon-bgtest-{}.png\", out_dir, id);\n\n common::save_buffer(path, &buffer, PICTURE_SIZE);\n\n common::same_buffer(&answer, &buffer)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 17, "score": 119632.7796406099 }, { "content": "fn test_polygons() -> [PolygonMesh; 2] {\n\n [\n\n obj::read((ATTRS_OBJ.to_string() + TRIS_OBJ).as_bytes()).unwrap(),\n\n obj::read((ATTRS_OBJ.to_string() + QUADS_OBJ).as_bytes()).unwrap(),\n\n ]\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 18, "score": 118987.23341426473 }, { "content": "/// Creates the curve division\n\npub fn parameter_division<C>(curve: &C, range: (f64, f64), tol: f64) -> Vec<f64>\n\nwhere\n\n C: ParametricCurve,\n\n C::Point: EuclideanSpace<Scalar = f64> + MetricSpace<Metric = f64>, {\n\n let p = 0.5 + (0.2 * rand::random::<f64>() - 0.1);\n\n let t = range.0 * (1.0 - p) + range.1 * p;\n\n let pt0 = curve.subs(range.0);\n\n let pt1 = curve.subs(range.1);\n\n let mid = pt0 + (pt1 - pt0) * p;\n\n if curve.subs(t).distance(mid) < tol {\n\n vec![range.0, range.1]\n\n } else {\n\n let mid = (range.0 + range.1) / 2.0;\n\n let mut res = parameter_division(curve, (range.0, mid), tol);\n\n let _ = res.pop();\n\n res.extend(parameter_division(curve, (mid, range.1), tol));\n\n res\n\n }\n\n}\n", "file_path": "truck-geotrait/src/algo/curve.rs", "rank": 19, "score": 118568.00013903566 }, { "content": "fn main() {\n\n let args: Vec<_> = std::env::args().collect();\n\n if args.len() < 2 {\n\n panic!(\"usage: tessellate_shape <input json file> <output json file>\\nThe default <output file> is output.obj.\")\n\n }\n\n let file = std::fs::File::open(&args[1]).unwrap();\n\n let solid = Solid::extract(serde_json::from_reader(file).unwrap()).unwrap();\n\n let mut poly = solid.triangulation(0.01).unwrap().into_polygon();\n\n poly.put_together_same_attrs().remove_unused_attrs();\n\n let mut string = Vec::<u8>::new();\n\n truck_polymesh::obj::write(&poly, &mut string).unwrap();\n\n if args.len() > 2 {\n\n std::fs::write(&args[2], &string).unwrap();\n\n } else {\n\n std::fs::write(\"output.obj\", &string).unwrap();\n\n }\n\n}\n", "file_path": "truck-meshalgo/examples/tessellate_shape.rs", "rank": 20, "score": 115008.17413922466 }, { "content": "fn tex_polygon(\n\n scene: &mut Scene,\n\n creator: &InstanceCreator,\n\n gradtex: &Arc<DynamicImage>,\n\n) -> Vec<u8> {\n\n let (device, config) = (scene.device(), scene.config());\n\n let texture = device.create_texture(&common::texture_descriptor(&config));\n\n let attach = creator.create_texture(gradtex);\n\n let cube: PolygonInstance = creator.create_instance(\n\n &obj::read(include_bytes!(\"cube.obj\").as_ref()).unwrap(),\n\n &PolygonInstanceDescriptor {\n\n instance_state: InstanceState {\n\n material: Material {\n\n albedo: Vector4::new(1.0, 1.0, 1.0, 1.0),\n\n roughness: 0.5,\n\n reflectance: 0.25,\n\n ambient_ratio: 0.02,\n\n alpha_blend: false,\n\n },\n\n texture: Some(attach),\n\n ..Default::default()\n\n },\n\n },\n\n );\n\n common::render_one(scene, &texture, &cube);\n\n common::read_texture(scene.device_handler(), &texture)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/render.rs", "rank": 21, "score": 114857.39865682018 }, { "content": "fn main() {\n\n MyApp::run();\n\n}\n", "file_path": "truck-rendimpl/examples/simple-shape-viewer.rs", "rank": 22, "score": 112427.10621485658 }, { "content": "pub fn sphere(center: Point3, radius: f64, udiv: usize, vdiv: usize) -> PolygonMesh {\n\n let positions = (0..udiv)\n\n .flat_map(move |i| {\n\n (0..vdiv).map(move |j| {\n\n let u = 2.0 * PI * i as f64 / udiv as f64;\n\n let v = PI * j as f64 / (vdiv - 1) as f64;\n\n center + radius * Vector3::new(u.cos() * v.sin(), u.sin() * v.sin(), v.cos())\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n let faces = Faces::from_iter((0..udiv).flat_map(move |i| {\n\n (0..vdiv - 1).map(move |j| {\n\n [\n\n i * vdiv + j,\n\n i * vdiv + (j + 1) % vdiv,\n\n (i + 1) % udiv * vdiv + (j + 1) % vdiv,\n\n (i + 1) % udiv * vdiv + j,\n\n ]\n\n })\n\n })); \n\n PolygonMesh::new(positions, Vec::new(), Vec::new(), faces)\n\n}\n", "file_path": "truck-meshalgo/tests/common/shapes.rs", "rank": 23, "score": 109950.93530048442 }, { "content": "fn shape_cube() -> Solid {\n\n let s = builder::vertex(Point3::new(0.0, 0.0, 0.0));\n\n let s = builder::tsweep(&s, Vector3::unit_x());\n\n let s = builder::tsweep(&s, Vector3::unit_y());\n\n builder::tsweep(&s, Vector3::unit_z())\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/render.rs", "rank": 24, "score": 109898.15981916465 }, { "content": "fn create_mesh<S>(surface: &S, div0: Vec<f64>, div1: Vec<f64>) -> StructuredMesh\n\nwhere S: ParametricSurface3D {\n\n let mut positions = vec![Vec::with_capacity(div1.len()); div0.len()];\n\n let mut normals = vec![Vec::with_capacity(div1.len()); div0.len()];\n\n div0.iter()\n\n .zip(positions.iter_mut().zip(normals.iter_mut()))\n\n .for_each(|(u, (prow, nrow))| {\n\n div1.iter().for_each(move|v| {\n\n prow.push(surface.subs(*u, *v));\n\n nrow.push(surface.normal(*u, *v));\n\n })\n\n });\n\n StructuredMesh {\n\n positions: positions,\n\n uv_division: Some((div0, div1)),\n\n normals: Some(normals),\n\n }\n\n}\n", "file_path": "truck-polymesh/src/meshing_shape.rs", "rank": 25, "score": 107970.76328121149 }, { "content": "fn tex_shape(scene: &mut Scene, creator: &InstanceCreator, gradtex: &Arc<DynamicImage>) -> Vec<u8> {\n\n let (device, config) = (scene.device(), scene.config());\n\n let texture = device.create_texture(&common::texture_descriptor(&config));\n\n let attach = creator.create_texture(gradtex);\n\n let cube: PolygonInstance = creator.create_instance(\n\n &shape_cube(),\n\n &ShapeInstanceDescriptor {\n\n instance_state: InstanceState {\n\n material: Material {\n\n albedo: Vector4::new(1.0, 1.0, 1.0, 1.0),\n\n roughness: 0.5,\n\n reflectance: 0.25,\n\n ambient_ratio: 0.02,\n\n alpha_blend: false,\n\n },\n\n texture: Some(attach),\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n },\n\n );\n\n common::render_one(scene, &texture, &cube);\n\n common::read_texture(scene.device_handler(), &texture)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/render.rs", "rank": 26, "score": 107873.3772749806 }, { "content": "fn all_nor_mut(faces: &mut Faces) -> impl Iterator<Item = &mut usize> {\n\n faces\n\n .face_iter_mut()\n\n .flatten()\n\n .filter_map(move |v| v.nor.as_mut())\n\n}\n\n\n\nimpl OptimizingFilter for PolygonMesh {\n\n fn remove_unused_attrs(&mut self) -> &mut Self {\n\n let mesh = self.debug_editor();\n\n let pos_iter = all_pos_mut(mesh.faces);\n\n let idcs = sub_remove_unused_attrs(pos_iter, mesh.positions.len());\n\n *mesh.positions = idcs.iter().map(|i| mesh.positions[*i]).collect();\n\n let uv_iter = all_uv_mut(mesh.faces);\n\n let idcs = sub_remove_unused_attrs(uv_iter, mesh.uv_coords.len());\n\n *mesh.uv_coords = idcs.iter().map(|i| mesh.uv_coords[*i]).collect();\n\n let nor_iter = all_nor_mut(mesh.faces);\n\n let idcs = sub_remove_unused_attrs(nor_iter, mesh.normals.len());\n\n *mesh.normals = idcs.iter().map(|i| mesh.normals[*i]).collect();\n\n drop(mesh);\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 27, "score": 107673.3813462841 }, { "content": "#[allow(dead_code)]\n\nfn same_topology<P, C, S, Q, D, T>(one: &Shell<P, C, S>, other: &Shell<Q, D, T>) -> bool {\n\n let mut vmap = HashMap::<VertexID<P>, VertexID<Q>>::new();\n\n let mut emap = HashMap::<EdgeID<C>, EdgeID<D>>::new();\n\n if one.len() != other.len() {\n\n return false;\n\n }\n\n for (face0, face1) in one.iter().zip(other.iter()) {\n\n let biters0 = face0.boundary_iters();\n\n let biters1 = face1.boundary_iters();\n\n if biters0.len() != biters1.len() {\n\n return false;\n\n }\n\n for (biter0, biter1) in biters0.into_iter().zip(biters1) {\n\n if biter0.len() != biter1.len() {\n\n return false;\n\n }\n\n for (edge0, edge1) in biter0.zip(biter1) {\n\n if !emap_subroutin(&edge0, &edge1, &mut vmap, &mut emap) {\n\n return false;\n\n }\n\n }\n\n }\n\n }\n\n true\n\n}\n", "file_path": "truck-topology/src/compress.rs", "rank": 28, "score": 106297.35259065396 }, { "content": "fn all_pos_mut(faces: &mut Faces) -> impl Iterator<Item = &mut usize> {\n\n faces.face_iter_mut().flatten().map(move |v| &mut v.pos)\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 29, "score": 106022.54960899129 }, { "content": "fn all_uv_mut(faces: &mut Faces) -> impl Iterator<Item = &mut usize> {\n\n faces\n\n .face_iter_mut()\n\n .flatten()\n\n .filter_map(move |v| v.uv.as_mut())\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 30, "score": 106022.54960899129 }, { "content": "fn create_readme(path: &str) {\n\n let mut readme = std::fs::File::create(\"README.md\").unwrap();\n\n let output = Command::new(\"cargo\").args(&[\"readme\"]).output().unwrap();\n\n let output = String::from_utf8(output.stdout).unwrap();\n\n let lines: Vec<_> = output.split(\"\\n\").collect();\n\n readme\n\n .write_fmt(format_args!(\n\n \"{}\\n\\n{}\\n\\n{}\\n\",\n\n lines[0],\n\n badge_url(path),\n\n lines[2]\n\n ))\n\n .unwrap();\n\n let dir = match std::fs::read_dir(\"examples\") {\n\n Ok(got) => got,\n\n Err(_) => return,\n\n };\n\n\n\n readme\n\n .write_fmt(format_args!(\"\\n## Sample Codes\\n\"))\n", "file_path": "readme-generator/src/main.rs", "rank": 31, "score": 105397.1540293894 }, { "content": "#[test]\n\nfn polymesh_nontex_bind_group_test() {\n\n common::os_alt_exec_test(exec_polymesh_nontex_bind_group_test)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 32, "score": 103389.38674279934 }, { "content": "#[test]\n\nfn polymesh_tex_bind_group_test() {\n\n common::os_alt_exec_test(exec_polymesh_tex_bind_group_test)\n\n}\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 33, "score": 103389.38674279934 }, { "content": "pub fn texture_descriptor(config: &SurfaceConfiguration) -> TextureDescriptor<'static> {\n\n TextureDescriptor {\n\n label: None,\n\n size: Extent3d {\n\n width: config.width,\n\n height: config.height,\n\n depth_or_array_layers: 1,\n\n },\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: TextureDimension::D2,\n\n format: config.format,\n\n usage: TextureUsages::RENDER_ATTACHMENT | TextureUsages::COPY_SRC,\n\n }\n\n}\n\n\n", "file_path": "truck-platform/tests/common.rs", "rank": 34, "score": 102792.25170867638 }, { "content": "pub fn texture_descriptor(config: &SurfaceConfiguration) -> TextureDescriptor<'static> {\n\n TextureDescriptor {\n\n label: None,\n\n size: Extent3d {\n\n width: config.width,\n\n height: config.height,\n\n depth_or_array_layers: 1,\n\n },\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: TextureDimension::D2,\n\n format: config.format,\n\n usage: TextureUsages::RENDER_ATTACHMENT | TextureUsages::COPY_SRC,\n\n }\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 35, "score": 102792.25170867638 }, { "content": "fn colliding_segment_pairs(sort_endpoints: Vec<EndPoint>) -> impl Iterator<Item = (usize, usize)> {\n\n let mut current = [Vec::<usize>::new(), Vec::<usize>::new()];\n\n sort_endpoints\n\n .into_iter()\n\n .filter_map(\n\n move |EndPoint {\n\n r#type,\n\n segnum,\n\n index,\n\n ..\n\n }| match r#type {\n\n EndPointType::Front => {\n\n current[segnum].push(index);\n\n Some(current[1 - segnum].clone().into_iter().map(move |i| {\n\n if segnum == 0 {\n\n (index, i)\n\n } else {\n\n (i, index)\n\n }\n\n }))\n", "file_path": "truck-meshalgo/src/analyzers/collision.rs", "rank": 36, "score": 101526.09246377081 }, { "content": "pub fn init_device(instance: &Instance) -> (Arc<Device>, Arc<Queue>) {\n\n futures::executor::block_on(async {\n\n let adapter = instance\n\n .request_adapter(&RequestAdapterOptions {\n\n power_preference: PowerPreference::HighPerformance,\n\n compatible_surface: None,\n\n })\n\n .await\n\n .unwrap();\n\n writeln!(&mut std::io::stderr(), \"{:?}\", adapter.get_info()).unwrap();\n\n let (device, queue) = adapter\n\n .request_device(\n\n &DeviceDescriptor {\n\n features: Default::default(),\n\n limits: Default::default(),\n\n label: None,\n\n },\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n (Arc::new(device), Arc::new(queue))\n\n })\n\n}\n\n\n", "file_path": "truck-platform/tests/common.rs", "rank": 37, "score": 100308.36456501884 }, { "content": "pub fn init_device(instance: &Instance) -> (Arc<Device>, Arc<Queue>) {\n\n futures::executor::block_on(async {\n\n let adapter = instance\n\n .request_adapter(&RequestAdapterOptions {\n\n power_preference: PowerPreference::HighPerformance,\n\n compatible_surface: None,\n\n })\n\n .await\n\n .unwrap();\n\n writeln!(&mut std::io::stderr(), \"{:?}\", adapter.get_info()).unwrap();\n\n let (device, queue) = adapter\n\n .request_device(\n\n &DeviceDescriptor {\n\n features: Default::default(),\n\n limits: Limits::default(),\n\n label: None,\n\n },\n\n None,\n\n )\n\n .await\n\n .unwrap();\n\n (Arc::new(device), Arc::new(queue))\n\n })\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 38, "score": 100308.36456501884 }, { "content": "#[inline(always)]\n\npub fn image2texture(device_handler: &DeviceHandler, image: &DynamicImage) -> Texture {\n\n let buffer = image.to_rgba8();\n\n imagebuffer2texture(device_handler, &buffer, TextureFormat::Rgba8Unorm)\n\n}\n\n\n", "file_path": "truck-rendimpl/src/image2texture.rs", "rank": 39, "score": 99881.04682598091 }, { "content": "type MeshedShell = Shell<Point3, PolylineCurve, PolygonMesh>;\n\n\n\n/// Tessellates faces\n\npub(super) fn tessellation<'a, C, S>(shell: &Shell<Point3, C, S>, tol: f64) -> Option<MeshedShell>\n\nwhere\n\n C: PolylineableCurve + 'a,\n\n S: MeshableSurface + 'a, {\n\n let mut shell0 = Shell::new();\n\n let mut vmap: HashMap<VertexID<Point3>, Vertex<Point3>> = HashMap::new();\n\n for vertex in shell.vertex_iter() {\n\n if vmap.get(&vertex.id()).is_none() {\n\n let new_vertex = vertex.mapped(Point3::clone);\n\n vmap.insert(vertex.id(), new_vertex);\n\n }\n\n }\n\n let mut edge_map: HashMap<EdgeID<C>, Edge<Point3, PolylineCurve>> = HashMap::new();\n\n for face in shell.face_iter() {\n\n let mut wires = Vec::new();\n\n for biter in face.absolute_boundaries() {\n\n let mut wire = Wire::new();\n", "file_path": "truck-meshalgo/src/tessellation/triangulation.rs", "rank": 40, "score": 97608.94748750178 }, { "content": "pub fn pointcloud_in_polygon_neighborhood(\n\n polygon: &PolygonMesh,\n\n points: &Vec<Point3>,\n\n tol: f64,\n\n) -> bool {\n\n let triangulate = Triangulate::new(polygon);\n\n let mut current = Vec::new();\n\n sorted_endpoints_by_polymesh_points(&triangulate, points, tol)\n\n .into_iter()\n\n .all(move |EndPoint { r#type, index, .. }| match r#type {\n\n EndPointType::Front => {\n\n current.push(index);\n\n true\n\n }\n\n EndPointType::Back => {\n\n let i = current\n\n .iter()\n\n .enumerate()\n\n .find(|(_, idx)| **idx == index)\n\n .unwrap()\n", "file_path": "truck-meshalgo/src/analyzers/point_cloud/sort_end_points.rs", "rank": 41, "score": 97108.58954290685 }, { "content": "fn collision(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Vec<(Point3, Point3)> {\n\n let tris0 = Triangulate::new(poly0);\n\n let tris1 = Triangulate::new(poly1);\n\n let iter0 = tris0.into_iter().map(|face| {\n\n [\n\n poly0.positions()[face[0].pos],\n\n poly0.positions()[face[1].pos],\n\n poly0.positions()[face[2].pos],\n\n ]\n\n });\n\n let iter1 = tris1.into_iter().map(|face| {\n\n [\n\n poly1.positions()[face[0].pos],\n\n poly1.positions()[face[1].pos],\n\n poly1.positions()[face[2].pos],\n\n ]\n\n });\n\n colliding_segment_pairs(sorted_endpoints(iter0, iter1))\n\n .filter_map(|(idx0, idx1)| {\n\n let face0 = tris0.get(idx0);\n", "file_path": "truck-meshalgo/src/analyzers/collision.rs", "rank": 42, "score": 96918.95877339292 }, { "content": "fn are_colliding(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Option<(Point3, Point3)> {\n\n let tris0 = Triangulate::new(poly0);\n\n let tris1 = Triangulate::new(poly1);\n\n let iter0 = tris0.into_iter().map(|face| {\n\n [\n\n poly0.positions()[face[0].pos],\n\n poly0.positions()[face[1].pos],\n\n poly0.positions()[face[2].pos],\n\n ]\n\n });\n\n let iter1 = tris1.into_iter().map(|face| {\n\n [\n\n poly1.positions()[face[0].pos],\n\n poly1.positions()[face[1].pos],\n\n poly1.positions()[face[2].pos],\n\n ]\n\n });\n\n colliding_segment_pairs(sorted_endpoints(iter0, iter1)).find_map(|(idx0, idx1)| {\n\n let face0 = tris0.get(idx0);\n\n let tri0 = [\n", "file_path": "truck-meshalgo/src/analyzers/collision.rs", "rank": 43, "score": 96918.95877339292 }, { "content": "fn degenerate_quadrangle(quad: [Vertex; 4]) -> QuadrangleType {\n\n if quad[0].pos == quad[2].pos || quad[1].pos == quad[3].pos {\n\n QuadrangleType::TotallyDegenerate\n\n } else if quad[0].pos == quad[1].pos && quad[2].pos == quad[3].pos {\n\n QuadrangleType::TotallyDegenerate\n\n } else if quad[1].pos == quad[2].pos && quad[3].pos == quad[0].pos {\n\n QuadrangleType::TotallyDegenerate\n\n } else if quad[0].pos == quad[1].pos || quad[1].pos == quad[2].pos {\n\n QuadrangleType::Triangle([quad[0], quad[2], quad[3]])\n\n } else if quad[2].pos == quad[3].pos || quad[3].pos == quad[0].pos {\n\n QuadrangleType::Triangle([quad[0], quad[1], quad[2]])\n\n } else {\n\n QuadrangleType::NonDegenerate\n\n }\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 44, "score": 95420.9466625175 }, { "content": "fn closed_polyline_orientation(pts: &Vec<Point3>) -> bool {\n\n pts.windows(2).fold(0.0, |sum, pt| {\n\n sum + (pt[1][0] + pt[0][0]) * (pt[1][1] - pt[0][1])\n\n }) >= 0.0\n\n}\n\n\n\npub(super) fn attach_plane(mut pts: Vec<Point3>) -> Option<Plane> {\n\n let center = pts.iter().fold(Point3::origin(), |sum, pt| sum + pt.to_vec()) / pts.len() as f64;\n\n let normal = pts.windows(2).fold(Vector3::zero(), |sum, pt| {\n\n sum + (pt[0] - center).cross(pt[1] - center)\n\n });\n\n let n = match normal.so_small() {\n\n true => return None,\n\n false => normal.normalize(),\n\n };\n\n let a = match (n[2].abs() - 1.0).so_small() {\n\n true => Vector3::new(0.0, n[2], -n[1]).normalize(),\n\n false => Vector3::new(n[1], -n[0], 0.0).normalize(),\n\n };\n\n let mat: Matrix4 = Matrix3::from_cols(a, n.cross(a), n).into();\n", "file_path": "truck-modeling/src/geom_impls.rs", "rank": 45, "score": 93463.647822336 }, { "content": "pub fn swap_chain_descriptor(size: (u32, u32)) -> SurfaceConfiguration {\n\n SurfaceConfiguration {\n\n usage: TextureUsages::RENDER_ATTACHMENT,\n\n format: TextureFormat::Rgba8Unorm,\n\n width: size.0,\n\n height: size.1,\n\n present_mode: PresentMode::Mailbox,\n\n }\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 46, "score": 90249.39020676001 }, { "content": "/// Reads mesh data from wavefront obj file.\n\npub fn read<R: Read>(reader: R) -> Result<PolygonMesh> {\n\n let mut positions = Vec::new();\n\n let mut uv_coords = Vec::new();\n\n let mut normals = Vec::new();\n\n let mut faces = Faces::default();\n\n let reader = BufReader::new(reader);\n\n for line in reader.lines().map(|s| s.unwrap()) {\n\n let mut args = line.split_whitespace();\n\n if let Some(first_str) = args.next() {\n\n if first_str == \"v\" {\n\n let x = args.next().unwrap().parse::<f64>()?;\n\n let y = args.next().unwrap().parse::<f64>()?;\n\n let z = args.next().unwrap().parse::<f64>()?;\n\n positions.push(Point3::new(x, y, z));\n\n } else if first_str == \"vt\" {\n\n let u = args.next().unwrap().parse::<f64>()?;\n\n let v = args.next().unwrap().parse::<f64>()?;\n\n uv_coords.push(Vector2::new(u, v));\n\n } else if first_str == \"vn\" {\n\n let x = args.next().unwrap().parse::<f64>()?;\n", "file_path": "truck-polymesh/src/obj.rs", "rank": 47, "score": 87717.30259151848 }, { "content": "use crate::*;\n\n\n\nimpl Default for Material {\n\n #[inline(always)]\n\n fn default() -> Material {\n\n Material {\n\n albedo: Vector4::new(1.0, 1.0, 1.0, 1.0),\n\n roughness: 0.5,\n\n reflectance: 0.25,\n\n ambient_ratio: 0.02,\n\n alpha_blend: false,\n\n }\n\n }\n\n}\n\n\n\nimpl Material {\n\n /// Creates a `UNIFORM` buffer of material.\n\n ///\n\n /// The bind group provided by the instances holds this uniform buffer.\n\n /// # Shader Examples\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 48, "score": 87567.94812361298 }, { "content": " InstanceState {\n\n matrix: Matrix4::identity(),\n\n material: Default::default(),\n\n texture: None,\n\n backface_culling: true,\n\n }\n\n }\n\n}\n\n\n\nimpl InstanceState {\n\n /// Creates a `UNIFORM` buffer of instance matrix.\n\n ///\n\n /// The bind group provided by the instances holds this uniform buffer.\n\n /// # Shader Examples\n\n /// ```glsl\n\n /// layout(set = 1, binding = 0) uniform ModelMatrix {\n\n /// mat4 uniform_matrix;\n\n /// };\n\n /// ```\n\n #[inline(always)]\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 49, "score": 87560.6885829115 }, { "content": " }\n\n\n\n #[doc(hidden)]\n\n #[inline(always)]\n\n pub fn bgl_entry() -> PreBindGroupLayoutEntry {\n\n PreBindGroupLayoutEntry {\n\n visibility: ShaderStages::FRAGMENT,\n\n ty: BindingType::Buffer {\n\n ty: BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: None,\n\n },\n\n count: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for InstanceState {\n\n #[inline(always)]\n\n fn default() -> InstanceState {\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 50, "score": 87560.20845772878 }, { "content": " }\n\n\n\n /// Creates texture view and sampler of the instance's texture image.\n\n ///\n\n /// The bind group provided by the instances holds this uniform buffer.\n\n /// # Shader Examples\n\n /// ```glsl\n\n /// layout(set = 1, binding = 2) uniform texture2D texture_view;\n\n /// layout(set = 1, binding = 3) uniform sampler texture_sampler;\n\n /// ```\n\n pub fn textureview_and_sampler(&self, device: &Device) -> (TextureView, Sampler) {\n\n let texture = self.texture.as_ref().unwrap();\n\n let view = texture.create_view(&Default::default());\n\n let sampler = device.create_sampler(&SamplerDescriptor {\n\n mag_filter: FilterMode::Linear,\n\n min_filter: FilterMode::Nearest,\n\n mipmap_filter: FilterMode::Nearest,\n\n lod_min_clamp: -100.0,\n\n lod_max_clamp: 100.0,\n\n ..Default::default()\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 51, "score": 87558.57072349495 }, { "content": " pub fn matrix_buffer(&self, device: &Device) -> BufferHandler {\n\n let matrix_data: [[f32; 4]; 4] = self.matrix.cast::<f32>().unwrap().into();\n\n BufferHandler::from_slice(&matrix_data, device, BufferUsages::UNIFORM)\n\n }\n\n\n\n #[doc(hidden)]\n\n #[inline(always)]\n\n pub fn matrix_bgl_entry() -> PreBindGroupLayoutEntry {\n\n PreBindGroupLayoutEntry {\n\n visibility: ShaderStages::VERTEX | ShaderStages::FRAGMENT,\n\n ty: BindingType::Buffer {\n\n ty: BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: None,\n\n },\n\n count: None,\n\n }\n\n }\n\n\n\n /// Creates a `UNIFORM` buffer of material.\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 52, "score": 87556.10893165975 }, { "content": " ///\n\n /// The bind group provided by the instances holds this uniform buffer.\n\n /// # Shader Examples\n\n /// ```glsl\n\n /// layout(set = 1, binding = 1) uniform Material {\n\n /// vec4 albedo;\n\n /// float roughness;\n\n /// float reflectance;\n\n /// float ambient_ratio;\n\n /// };\n\n /// ```\n\n #[inline(always)]\n\n pub fn material_buffer(&self, device: &Device) -> BufferHandler {\n\n self.material.buffer(device)\n\n }\n\n\n\n #[doc(hidden)]\n\n #[inline(always)]\n\n pub fn material_bgl_entry() -> PreBindGroupLayoutEntry {\n\n Material::bgl_entry()\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 53, "score": 87550.9360721911 }, { "content": " });\n\n (view, sampler)\n\n }\n\n\n\n #[doc(hidden)]\n\n #[inline(always)]\n\n pub fn textureview_bgl_entry() -> PreBindGroupLayoutEntry {\n\n PreBindGroupLayoutEntry {\n\n visibility: ShaderStages::FRAGMENT,\n\n ty: BindingType::Texture {\n\n view_dimension: TextureViewDimension::D2,\n\n sample_type: TextureSampleType::Float { filterable: true },\n\n multisampled: false,\n\n },\n\n count: None,\n\n }\n\n }\n\n\n\n #[doc(hidden)]\n\n #[inline(always)]\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 54, "score": 87549.84863576519 }, { "content": " pub fn sampler_bgl_entry() -> PreBindGroupLayoutEntry {\n\n PreBindGroupLayoutEntry {\n\n visibility: ShaderStages::FRAGMENT,\n\n ty: BindingType::Sampler {\n\n filtering: true,\n\n comparison: false,\n\n },\n\n count: None,\n\n }\n\n }\n\n}\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 55, "score": 87549.32529068901 }, { "content": " /// ```glsl\n\n /// layout(set = 1, binding = 1) uniform Material {\n\n /// vec4 albedo;\n\n /// float roughness;\n\n /// float reflectance;\n\n /// float ambient_ratio;\n\n /// };\n\n /// ```\n\n #[inline(always)]\n\n pub fn buffer(&self, device: &Device) -> BufferHandler {\n\n let material_data: [f32; 7] = [\n\n self.albedo[0] as f32,\n\n self.albedo[1] as f32,\n\n self.albedo[2] as f32,\n\n self.albedo[3] as f32,\n\n self.roughness as f32,\n\n self.reflectance as f32,\n\n self.ambient_ratio as f32,\n\n ];\n\n BufferHandler::from_slice(&material_data, device, BufferUsages::UNIFORM)\n", "file_path": "truck-rendimpl/src/instance_descriptor.rs", "rank": 56, "score": 87545.88733037162 }, { "content": "use crate::*;\n\n\n\nimpl PolygonInstance {\n\n /// Clone the instance as another drawn element.\n\n #[inline(always)]\n\n pub fn clone_instance(&self) -> PolygonInstance {\n\n PolygonInstance {\n\n polygon: self.polygon.clone(),\n\n state: self.state.clone(),\n\n shaders: self.shaders.clone(),\n\n id: RenderID::gen(),\n\n }\n\n }\n\n /// Returns a reference to the instance descriptor.\n\n #[inline(always)]\n\n pub fn instance_state(&self) -> &InstanceState {\n\n &self.state\n\n }\n\n /// Returns the mutable reference to instance descriptor.\n\n #[inline(always)]\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 57, "score": 87361.14302869227 }, { "content": "}\n\n\n\nimpl Rendered for PolygonInstance {\n\n impl_render_id!(id);\n\n\n\n #[inline(always)]\n\n fn vertex_buffer(&self, _: &DeviceHandler) -> (Arc<BufferHandler>, Option<Arc<BufferHandler>>) {\n\n let polygon = self.polygon.clone();\n\n (polygon.0, Some(polygon.1))\n\n }\n\n #[inline(always)]\n\n fn bind_group_layout(&self, device_handler: &DeviceHandler) -> Arc<BindGroupLayout> {\n\n Arc::new(match self.state.texture.is_some() {\n\n true => self.textured_bdl(device_handler.device()),\n\n false => self.non_textured_bdl(device_handler.device()),\n\n })\n\n }\n\n #[inline(always)]\n\n fn bind_group(\n\n &self,\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 58, "score": 87349.50941912766 }, { "content": " pub fn instance_state_mut(&mut self) -> &mut InstanceState {\n\n &mut self.state\n\n }\n\n\n\n /// swap vertex buffers\n\n #[inline(always)]\n\n pub fn swap_vertex(&mut self, other: &mut PolygonInstance) {\n\n let polygon = self.polygon.clone();\n\n self.polygon = other.polygon.clone();\n\n other.polygon = polygon;\n\n }\n\n\n\n #[inline(always)]\n\n fn non_textured_bdl(&self, device: &Device) -> BindGroupLayout {\n\n bind_group_util::create_bind_group_layout(device, {\n\n &[\n\n InstanceState::matrix_bgl_entry(),\n\n InstanceState::material_bgl_entry(),\n\n ]\n\n })\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 59, "score": 87345.60896466061 }, { "content": " self.shaders.tex_fragment_entry,\n\n ),\n\n false => (&self.shaders.fragment_module, self.shaders.fragment_entry),\n\n };\n\n let cull_mode = match self.state.backface_culling {\n\n true => Some(wgpu::Face::Back),\n\n false => None,\n\n };\n\n let blend = match self.state.material.alpha_blend {\n\n true => Some(BlendState::ALPHA_BLENDING),\n\n false => Some(BlendState::REPLACE),\n\n };\n\n let pipeline = device.create_render_pipeline(&RenderPipelineDescriptor {\n\n layout: Some(layout),\n\n vertex: VertexState {\n\n module: &self.shaders.vertex_module,\n\n entry_point: self.shaders.vertex_entry,\n\n buffers: &[VertexBufferLayout {\n\n array_stride: std::mem::size_of::<AttrVertex>() as BufferAddress,\n\n step_mode: VertexStepMode::Vertex,\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 60, "score": 87341.46677873145 }, { "content": " }\n\n\n\n #[inline(always)]\n\n fn textured_bdl(&self, device: &Device) -> BindGroupLayout {\n\n bind_group_util::create_bind_group_layout(\n\n device,\n\n &[\n\n InstanceState::matrix_bgl_entry(),\n\n InstanceState::material_bgl_entry(),\n\n InstanceState::textureview_bgl_entry(),\n\n InstanceState::sampler_bgl_entry(),\n\n ],\n\n )\n\n }\n\n\n\n #[inline(always)]\n\n fn non_textured_bg(&self, device: &Device, layout: &BindGroupLayout) -> BindGroup {\n\n bind_group_util::create_bind_group(\n\n device,\n\n layout,\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 61, "score": 87340.56133002746 }, { "content": " device_handler: &DeviceHandler,\n\n layout: &BindGroupLayout,\n\n ) -> Arc<BindGroup> {\n\n Arc::new(match self.state.texture.is_some() {\n\n true => self.textured_bg(device_handler.device(), layout),\n\n false => self.non_textured_bg(&device_handler.device(), layout),\n\n })\n\n }\n\n #[inline(always)]\n\n fn pipeline(\n\n &self,\n\n device_handler: &DeviceHandler,\n\n layout: &PipelineLayout,\n\n sample_count: u32,\n\n ) -> Arc<RenderPipeline> {\n\n let device = device_handler.device();\n\n let config = device_handler.config();\n\n let (fragment_module, fragment_entry) = match self.state.texture.is_some() {\n\n true => (\n\n &self.shaders.tex_fragment_module,\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 62, "score": 87339.84523708509 }, { "content": " module: fragment_module,\n\n entry_point: fragment_entry,\n\n targets: &[ColorTargetState {\n\n format: config.format,\n\n blend,\n\n write_mask: ColorWrites::ALL,\n\n }],\n\n }),\n\n primitive: PrimitiveState {\n\n topology: PrimitiveTopology::TriangleList,\n\n front_face: FrontFace::Ccw,\n\n cull_mode,\n\n polygon_mode: PolygonMode::Fill,\n\n clamp_depth: false,\n\n ..Default::default()\n\n },\n\n depth_stencil: Some(DepthStencilState {\n\n format: TextureFormat::Depth32Float,\n\n depth_write_enabled: true,\n\n depth_compare: wgpu::CompareFunction::Less,\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 63, "score": 87338.00394516064 }, { "content": " attributes: &[\n\n VertexAttribute {\n\n format: VertexFormat::Float32x3,\n\n offset: 0,\n\n shader_location: 0,\n\n },\n\n VertexAttribute {\n\n format: VertexFormat::Float32x2,\n\n offset: 3 * 4,\n\n shader_location: 1,\n\n },\n\n VertexAttribute {\n\n format: VertexFormat::Float32x3,\n\n offset: 2 * 4 + 3 * 4,\n\n shader_location: 2,\n\n },\n\n ],\n\n }],\n\n },\n\n fragment: Some(FragmentState {\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 64, "score": 87336.61035413474 }, { "content": " stencil: Default::default(),\n\n bias: Default::default(),\n\n }),\n\n multisample: MultisampleState {\n\n count: sample_count,\n\n mask: !0,\n\n alpha_to_coverage_enabled: true,\n\n },\n\n label: None,\n\n });\n\n Arc::new(pipeline)\n\n }\n\n}\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 65, "score": 87336.36394266375 }, { "content": " vec![\n\n self.state.matrix_buffer(device).binding_resource(),\n\n self.state.material.buffer(device).binding_resource(),\n\n ],\n\n )\n\n }\n\n #[inline(always)]\n\n fn textured_bg(&self, device: &Device, layout: &BindGroupLayout) -> BindGroup {\n\n let (view, sampler) = self.state.textureview_and_sampler(device);\n\n bind_group_util::create_bind_group(\n\n device,\n\n layout,\n\n vec![\n\n self.state.matrix_buffer(device).binding_resource(),\n\n self.state.material.buffer(device).binding_resource(),\n\n BindingResource::TextureView(&view),\n\n BindingResource::Sampler(&sampler),\n\n ],\n\n )\n\n }\n", "file_path": "truck-rendimpl/src/polygon_instance.rs", "rank": 66, "score": 87333.93203893249 }, { "content": "fn exec_polymesh_nontex_bind_group_test(backend: Backends, out_dir: &str) {\n\n let out_dir = out_dir.to_string();\n\n std::fs::create_dir_all(&out_dir).unwrap();\n\n let instance = wgpu::Instance::new(backend);\n\n let (device, queue) = common::init_device(&instance);\n\n let config = Arc::new(Mutex::new(common::swap_chain_descriptor(PICTURE_SIZE)));\n\n let handler = DeviceHandler::new(device, queue, config);\n\n let mut scene = Scene::new(handler, &Default::default());\n\n let answer = common::nontex_answer_texture(&mut scene);\n\n let answer = common::read_texture(scene.device_handler(), &answer);\n\n let inst_desc = nontex_inst_desc();\n\n test_polygons()\n\n .iter()\n\n .enumerate()\n\n .for_each(move |(i, polygon)| {\n\n let instance: PolygonInstance = polygon.into_instance(\n\n scene.device_handler(),\n\n &bgcheck_shaders(scene.device_handler()),\n\n &inst_desc,\n\n );\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 67, "score": 86547.61703576036 }, { "content": "fn exec_polymesh_tex_bind_group_test(backend: Backends, out_dir: &str) {\n\n let out_dir = out_dir.to_string();\n\n std::fs::create_dir_all(&out_dir).unwrap();\n\n let instance = wgpu::Instance::new(backend);\n\n let (device, queue) = common::init_device(&instance);\n\n let config = Arc::new(Mutex::new(common::swap_chain_descriptor(PICTURE_SIZE)));\n\n let handler = DeviceHandler::new(device, queue, config);\n\n let mut scene = Scene::new(handler, &Default::default());\n\n let answer = common::random_texture(&mut scene);\n\n let buffer = common::read_texture(scene.device_handler(), &answer);\n\n let pngpath = out_dir.clone() + \"random-texture.png\";\n\n common::save_buffer(pngpath, &buffer, PICTURE_SIZE);\n\n let mut desc = nontex_inst_desc();\n\n let image_buffer =\n\n ImageBuffer::<Rgba<_>, _>::from_raw(PICTURE_SIZE.0, PICTURE_SIZE.1, buffer.clone())\n\n .unwrap();\n\n let attach = image2texture::image2texture(\n\n scene.device_handler(),\n\n &DynamicImage::ImageRgba8(image_buffer),\n\n );\n", "file_path": "truck-rendimpl/tests/polygon_bind_group.rs", "rank": 68, "score": 86547.61703576036 }, { "content": "fn circum_center(pt0: Point3, pt1: Point3, pt2: Point3) -> Point3 {\n\n let vec0 = pt1 - pt0;\n\n let vec1 = pt2 - pt0;\n\n let a2 = vec0.dot(vec0);\n\n let ab = vec0.dot(vec1);\n\n let b2 = vec1.dot(vec1);\n\n let det = a2 * b2 - ab * ab;\n\n let u = (b2 * a2 - ab * b2) / (2.0 * det);\n\n let v = (-ab * a2 + b2 * a2) / (2.0 * det);\n\n pt0 + u * vec0 + v * vec1\n\n}\n\n\n\npub(super) fn circle_arc(\n\n point: Vector4,\n\n origin: Point3,\n\n axis: Vector3,\n\n angle: Rad<f64>,\n\n) -> BSplineCurve<Vector4> {\n\n let tmp = Point3::from_homogeneous(point);\n\n let origin = origin + (axis.dot(tmp - origin)) * axis;\n", "file_path": "truck-modeling/src/geom_impls.rs", "rank": 69, "score": 86118.37077283204 }, { "content": "/// Writes obj data to output stream\n\n/// # Examples\n\n/// ```\n\n/// use truck_polymesh::*;\n\n/// let positions = vec![\n\n/// Point3::new(0.0, 0.0, 0.0),\n\n/// Point3::new(1.0, 0.0, 0.0),\n\n/// Point3::new(0.0, 1.0, 0.0),\n\n/// Point3::new(0.0, 0.0, 1.0),\n\n/// Point3::new(1.0, 1.0, 0.0),\n\n/// Point3::new(1.0, 0.0, 1.0),\n\n/// Point3::new(0.0, 1.0, 1.0),\n\n/// Point3::new(1.0, 1.0, 1.0),\n\n/// ];\n\n/// let normals = vec![\n\n/// Vector3::new(1.0, 0.0, 0.0),\n\n/// Vector3::new(0.0, 1.0, 0.0),\n\n/// Vector3::new(0.0, 0.0, 1.0),\n\n/// Vector3::new(-1.0, 0.0, 0.0),\n\n/// Vector3::new(0.0, -1.0, 0.0),\n\n/// Vector3::new(0.0, 0.0, -1.0),\n\n/// ];\n\n/// let faces = Faces::from_iter(&[\n\n/// [(0, None, Some(5)), (1, None, Some(5)), (2, None, Some(5))],\n\n/// [(4, None,Some(5)), (2, None, Some(5)), (1, None, Some(5))],\n\n/// [(1, None, Some(4)), (0, None, Some(4)), (3, None, Some(4))],\n\n/// [(1, None, Some(4)), (3, None, Some(4)), (5, None, Some(4))],\n\n/// [(1, None, Some(0)), (5, None, Some(0)), (4, None, Some(0))],\n\n/// [(4, None, Some(0)), (5, None, Some(0)), (7, None, Some(0))],\n\n/// [(2, None, Some(1)), (4, None, Some(1)), (7, None, Some(1))],\n\n/// [(2, None, Some(1)), (7, None, Some(1)), (6, None, Some(1))],\n\n/// [(0, None, Some(3)), (2, None, Some(3)), (6, None, Some(3))],\n\n/// [(0, None, Some(3)), (6, None, Some(3)), (3, None, Some(3))],\n\n/// [(3, None, Some(2)), (6, None, Some(2)), (7, None, Some(2))],\n\n/// [(3, None, Some(2)), (7, None, Some(2)), (5, None, Some(2))],\n\n/// ]);\n\n/// let mesh = PolygonMesh::new(positions, Vec::new(), normals, faces);\n\n/// obj::write(&mesh, std::fs::File::create(\"meshdata.obj\").unwrap());\n\n/// ```\n\npub fn write<W: Write>(mesh: &PolygonMesh, writer: W) -> Result<()> {\n\n sub_write(mesh, &mut BufWriter::new(writer))\n\n}\n\n\n", "file_path": "truck-polymesh/src/obj.rs", "rank": 70, "score": 84995.61097042129 }, { "content": "/// Tessellates one surface trimmed by polyline.\n\nfn trimming_tessellation<S>(surface: &S, polyline: &Polyline, tol: f64) -> PolygonMesh\n\nwhere S: MeshableSurface {\n\n let mut triangulation = CDT::<[f64; 2], FloatKernel>::new();\n\n polyline.insert_to(&mut triangulation);\n\n insert_surface(&mut triangulation, surface, polyline, tol);\n\n let mut mesh = triangulation_into_polymesh(\n\n triangulation.vertices(),\n\n triangulation.triangles(),\n\n surface,\n\n polyline,\n\n );\n\n mesh.make_face_compatible_to_normal();\n\n mesh\n\n}\n\n\n", "file_path": "truck-meshalgo/src/tessellation/triangulation.rs", "rank": 71, "score": 81769.02725578031 }, { "content": "/// Writes obj data to output stream\n\npub fn write_vec<W: Write>(mesh: &Vec<PolygonMesh>, writer: W) -> Result<()> {\n\n let mut writer = BufWriter::new(writer);\n\n for (i, mesh) in mesh.iter().enumerate() {\n\n writer.write_fmt(format_args!(\"g {}\\n\", i))?;\n\n sub_write(mesh, &mut writer)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "truck-polymesh/src/obj.rs", "rank": 72, "score": 80867.63367257663 }, { "content": "fn sphere(center: Point3, radius: f64, udiv: usize, vdiv: usize) -> PolygonMesh {\n\n let positions = (0..udiv)\n\n .flat_map(move |i| {\n\n (0..vdiv).map(move |j| {\n\n let u = 2.0 * PI * i as f64 / udiv as f64;\n\n let v = PI * j as f64 / (vdiv - 1) as f64;\n\n center + radius * Vector3::new(u.cos() * v.sin(), u.sin() * v.sin(), v.cos())\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n let normals = (0..udiv)\n\n .flat_map(move |i| {\n\n (0..vdiv).map(move |j| {\n\n let u = 2.0 * PI * i as f64 / udiv as f64;\n\n let v = PI * j as f64 / (vdiv - 1) as f64;\n\n Vector3::new(u.cos() * v.sin(), u.sin() * v.sin(), v.cos())\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n let faces = Faces::from_iter((0..udiv).flat_map(move |i| {\n", "file_path": "truck-rendimpl/examples/collision-sphere.rs", "rank": 73, "score": 80867.63367257663 }, { "content": "fn create_one_component<T>(adjacency: &mut HashMap<T, Vec<T>>) -> Vec<T>\n\nwhere T: Eq + Hash + Clone {\n\n let mut iter = adjacency.keys();\n\n let first = match iter.next() {\n\n Some(key) => key.clone(),\n\n None => return Vec::new(),\n\n };\n\n let mut stack = vec![first];\n\n let mut res = Vec::new();\n\n while !stack.is_empty() {\n\n let i = stack.pop().unwrap();\n\n if let Some(vec) = adjacency.remove(&i) {\n\n res.push(i);\n\n for j in vec {\n\n stack.push(j);\n\n }\n\n }\n\n }\n\n res\n\n}\n", "file_path": "truck-topology/src/shell.rs", "rank": 74, "score": 79545.34593958942 }, { "content": "fn sub_write<W: Write>(mesh: &PolygonMesh, writer: &mut BufWriter<W>) -> Result<()> {\n\n write3vec(writer, mesh.positions(), \"v\")?;\n\n write2vec(writer, mesh.uv_coords(), \"vt\")?;\n\n write3vec(writer, mesh.normals(), \"vn\")?;\n\n mesh.faces.write(writer)\n\n}\n\n\n", "file_path": "truck-polymesh/src/obj.rs", "rank": 75, "score": 79373.53089529177 }, { "content": "fn create_components<T>(adjacency: &mut HashMap<T, Vec<T>>) -> Vec<Vec<T>>\n\nwhere T: Eq + Clone + Hash {\n\n let mut res = Vec::new();\n\n loop {\n\n let component = create_one_component(adjacency);\n\n match component.is_empty() {\n\n true => break,\n\n false => res.push(component),\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "truck-topology/src/shell.rs", "rank": 76, "score": 78728.31764011737 }, { "content": "#[inline(always)]\n\nfn pos_to_face(a: Point3, b: Point3, c: Point3) -> STLFace {\n\n let normal = (b - a).cross(c - a).normalize().cast().unwrap().into();\n\n let vertices = [\n\n a.cast().unwrap().into(),\n\n b.cast().unwrap().into(),\n\n c.cast().unwrap().into(),\n\n ];\n\n STLFace { normal, vertices }\n\n}\n\n\n\nimpl<'a> Iterator for PolygonMeshSTLFaceIterator<'a> {\n\n type Item = STLFace;\n\n fn next(&mut self) -> Option<STLFace> {\n\n if self.current_face.is_none() {\n\n self.current_face = if let Some(face) = self.tri_faces.next() {\n\n Some(face)\n\n } else if let Some(face) = self.quad_faces.next() {\n\n Some(face)\n\n } else if let Some(face) = self.other_faces.next() {\n\n Some(face)\n", "file_path": "truck-polymesh/src/stl.rs", "rank": 77, "score": 78667.99377511643 }, { "content": " /// closed sweep, builds a closed torus, and so on.\n\n pub trait ClosedSweep<P, C, S>: MultiSweep<P, C, S> {\n\n /// Transform topologies and connect vertices and edges in boundaries.\n\n fn closed_sweep<\n\n FP: Fn(&P) -> P,\n\n FC: Fn(&C) -> C,\n\n FS: Fn(&S) -> S,\n\n CP: Fn(&P, &P) -> C,\n\n CE: Fn(&C, &C) -> S,\n\n >(\n\n &self,\n\n point_mapping: &FP,\n\n curve_mapping: &FC,\n\n surface_mapping: &FS,\n\n connect_points: &CP,\n\n connect_curves: &CE,\n\n division: usize,\n\n ) -> Self::Swept;\n\n }\n\n}\n\npub use topo_traits::*;\n", "file_path": "truck-modeling/src/lib.rs", "rank": 78, "score": 78277.79995282978 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Boundaries<C> {\n\n checked: HashSet<EdgeID<C>>,\n\n boundaries: HashMap<EdgeID<C>, bool>,\n\n condition: ShellCondition,\n\n}\n\n\n\nimpl<C> Boundaries<C> {\n\n #[inline(always)]\n\n fn new() -> Self {\n\n Self {\n\n checked: Default::default(),\n\n boundaries: Default::default(),\n\n condition: ShellCondition::Oriented,\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn insert<P>(&mut self, edge: &Edge<P, C>) {\n\n self.condition = self.condition\n\n & match (\n", "file_path": "truck-topology/src/shell.rs", "rank": 79, "score": 77506.96416361944 }, { "content": "/// Concats two curves\n\npub trait Concat<Rhs: ParametricCurve<Point = Self::Point, Vector = Self::Vector>>:\n\n ParametricCurve\n\nwhere Self::Point: Debug {\n\n /// The result of concat two curves\n\n type Output: ParametricCurve<Point = Self::Point, Vector = Self::Vector>;\n\n /// Try concat two curves.\n\n /// # Failure\n\n /// Returns `None` if `self.parameter_range().1 != rhs.parameter_range().0`.\n\n fn try_concat(&self, rhs: &Rhs) -> Result<Self::Output, ConcatError<Self::Point>>;\n\n /// Try concat two curves.\n\n /// # Panic\n\n /// Panic occurs if `self.parameter_range().1 != rhs.parameter_range().0`.\n\n fn concat(&self, rhs: &Rhs) -> Self::Output {\n\n self.try_concat(rhs).unwrap_or_else(|err| panic!(\"{}\", err))\n\n }\n\n}\n\n\n\n/// Error for concat curves\n\n#[derive(Clone, Copy, PartialEq, Debug, Error)]\n\npub enum ConcatError<Point: Debug> {\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 80, "score": 76614.89567785819 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct CompressedEdge<C> {\n\n vertices: (usize, usize),\n\n curve: C,\n\n}\n\n\n\nimpl<C> CompressedEdge<C> {\n\n fn create_edge<P>(self, v: &Vec<Vertex<P>>) -> Result<Edge<P, C>> {\n\n let front = &v[self.vertices.0];\n\n let back = &v[self.vertices.1];\n\n Edge::try_new(front, back, self.curve)\n\n }\n\n}\n\n\n", "file_path": "truck-topology/src/compress.rs", "rank": 81, "score": 76199.58295101239 }, { "content": "fn main() {\n\n for path in &WORKSPACES {\n\n std::env::set_current_dir(path).unwrap();\n\n create_readme(path);\n\n std::env::set_current_dir(\"..\").unwrap();\n\n }\n\n}\n", "file_path": "readme-generator/src/main.rs", "rank": 82, "score": 73987.86361020614 }, { "content": "fn main() {\n\n let v0 = builder::vertex(Point3::new(0.0, 0.5, 0.0));\n\n let v1 = builder::vertex(Point3::new(0.0, -0.5, 0.5));\n\n let v2 = builder::vertex(Point3::new(0.0, -0.5, 0.0));\n\n let wire: Wire = vec![builder::line(&v0, &v1), builder::line(&v1, &v2)].into();\n\n let shell = builder::cone(&wire, Vector3::unit_y(), Rad(7.0));\n\n let cone = Solid::new(vec![shell]);\n\n assert!(cone.is_geometric_consistent());\n\n let json = serde_json::to_vec_pretty(&cone.compress()).unwrap();\n\n std::fs::write(\"cone.json\", &json).unwrap();\n\n}\n", "file_path": "truck-modeling/examples/cone.rs", "rank": 83, "score": 73987.86361020614 }, { "content": "fn main() {\n\n let bottle = bottle(1.4, 1.0, 0.6);\n\n let json = serde_json::to_vec_pretty(&bottle.compress()).unwrap();\n\n std::fs::write(\"bottle.json\", &json).unwrap();\n\n}\n", "file_path": "truck-modeling/examples/bottle.rs", "rank": 84, "score": 73987.86361020614 }, { "content": "#[allow(dead_code)]\n\nfn main() {\n\n struct MyApp;\n\n impl App for MyApp {\n\n fn init(_: &DeviceHandler, _: AdapterInfo) -> MyApp {\n\n MyApp\n\n }\n\n }\n\n MyApp::run()\n\n}\n", "file_path": "truck-rendimpl/examples/app.rs", "rank": 85, "score": 73987.86361020614 }, { "content": "fn main() {\n\n let v = builder::vertex(Point3::new(0.5, 0.0, 0.0));\n\n let w = builder::rsweep(&v, Point3::new(0.75, 0.0, 0.0), Vector3::unit_y(), Rad(7.0));\n\n let shell = builder::rsweep(&w, Point3::origin(), Vector3::unit_z(), Rad(7.0));\n\n let torus = Solid::new(vec![shell]);\n\n let json = serde_json::to_vec_pretty(&torus.compress()).unwrap();\n\n std::fs::write(\"torus.json\", &json).unwrap();\n\n}\n", "file_path": "truck-modeling/examples/torus.rs", "rank": 86, "score": 73987.86361020614 }, { "content": "fn main() {\n\n let v0 = builder::vertex(Point3::new(1.0, 1.0, 0.0));\n\n let v1 = builder::vertex(Point3::new(0.0, -1.0, 1.0));\n\n let line = builder::line(&v0, &v1);\n\n let mut shell = builder::rsweep(&line, Point3::origin(), Vector3::unit_y(), Rad(7.0));\n\n let wires = shell.extract_boundaries();\n\n shell.push(\n\n builder::try_attach_plane(&vec![wires[0].clone()])\n\n .unwrap()\n\n .inverse(),\n\n );\n\n shell.push(\n\n builder::try_attach_plane(&vec![wires[1].clone()])\n\n .unwrap()\n\n .inverse(),\n\n );\n\n let solid = Solid::new(vec![shell]);\n\n assert!(solid.is_geometric_consistent());\n\n let json = serde_json::to_vec_pretty(&solid.compress()).unwrap();\n\n std::fs::write(\"tsudumi.json\", &json).unwrap();\n\n}\n", "file_path": "truck-modeling/examples/tsudumi.rs", "rank": 87, "score": 73987.86361020614 }, { "content": "#[test]\n\nfn polysurface() {\n\n let coef0 = vec![\n\n Vector3::new(1.0, 0.0, 1.0),\n\n Vector3::new(3.0, 1.0, 0.0),\n\n Vector3::new(2.0, 0.0, 0.0),\n\n ];\n\n let coef1 = vec![\n\n Vector3::new(2.0, 1.0, 0.0),\n\n Vector3::new(-6.0, 0.0, 1.0),\n\n Vector3::new(4.0, 0.0, 0.0),\n\n ];\n\n let poly = PolySurface(PolyCurve(coef0), PolyCurve(coef1));\n\n for i in 0..5 {\n\n let u = i as f64;\n\n for j in 0..5 {\n\n let v = j as f64;\n\n assert_eq!(\n\n poly.subs(u, v)[0],\n\n (2.0 * u * u + 3.0 * u + 1.0) * (4.0 * v * v - 6.0 * v + 2.0)\n\n );\n", "file_path": "truck-geotrait/tests/surface.rs", "rank": 88, "score": 73987.86361020614 }, { "content": "fn main() {\n\n let v = builder::vertex(Point3::new(-0.5, -0.5, -0.5));\n\n let e = builder::tsweep(&v, Vector3::unit_x());\n\n let f = builder::tsweep(&e, Vector3::unit_y());\n\n let cube = builder::tsweep(&f, Vector3::unit_z());\n\n let json = serde_json::to_vec_pretty(&cube.compress()).unwrap();\n\n std::fs::write(\"cube.json\", &json).unwrap();\n\n}\n", "file_path": "truck-modeling/examples/cube.rs", "rank": 89, "score": 73987.86361020614 }, { "content": "fn main() {\n\n let file = std::fs::File::open(\"examples/data/teapot.obj\").unwrap();\n\n let mut mesh = obj::read(file).unwrap();\n\n\n\n mesh.put_together_same_attrs()\n\n .add_smooth_normals(std::f64::consts::PI / 3.0, true)\n\n .quadrangulate(0.1, 1.0);\n\n let file = std::fs::File::create(\"quaded_pot.obj\").unwrap();\n\n obj::write(&mesh, file).unwrap()\n\n}\n", "file_path": "truck-meshalgo/examples/teapot.rs", "rank": 90, "score": 73987.86361020614 }, { "content": "#[test]\n\n#[cfg_attr(rustfmt, rustfmt_skip)]\n\nfn print_messages() {\n\n use std::io::Write;\n\n let stderr = &mut std::io::stderr();\n\n writeln!(stderr, \"****** test of the expressions of error messages ******\\n\").unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::ZeroRange).unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::DifferentBackFront(0.0, 1.0)).unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::NotClampedKnotVector).unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::NotSortedVector).unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::TooLargeDegree(1, 2)).unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::CannotRemoveKnot(7)).unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::EmptyControlPoints).unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::TooShortKnotVector(1, 2)).unwrap();\n\n writeln!(stderr, \"{}\\n\", Error::IrregularControlPoints).unwrap();\n\n writeln!(stderr, \"*******************************************************\").unwrap();\n\n}\n", "file_path": "truck-geometry/src/errors.rs", "rank": 91, "score": 72629.44457716696 }, { "content": "fn signup_vertex(\n\n polymesh: &PolygonMesh,\n\n vertex: Vertex,\n\n glpolymesh: &mut ExpandedPolygon<AttrVertex>,\n\n vertex_map: &mut HashMap<Vertex, u32>,\n\n) {\n\n let idx = match vertex_map.get(&vertex) {\n\n Some(idx) => *idx,\n\n None => {\n\n let idx = glpolymesh.vertices.len() as u32;\n\n let position = polymesh.positions()[vertex.pos].cast().unwrap().into();\n\n let uv_coord = match vertex.uv {\n\n Some(uv) => polymesh.uv_coords()[uv].cast().unwrap().into(),\n\n None => [0.0, 0.0],\n\n };\n\n let normal = match vertex.nor {\n\n Some(nor) => polymesh.normals()[nor].cast().unwrap().into(),\n\n None => [0.0, 0.0, 0.0],\n\n };\n\n let wgpuvertex = AttrVertex {\n", "file_path": "truck-rendimpl/src/polyrend.rs", "rank": 92, "score": 72629.44457716696 }, { "content": "#[test]\n\nfn partial_torus() {\n\n let v = vertex(Point3::new(0.5, 0.0, 0.0));\n\n let w = rsweep(&v, Point3::new(0.75, 0.0, 0.0), Vector3::unit_y(), Rad(7.0));\n\n let face = try_attach_plane(&vec![w]).unwrap();\n\n let torus = rsweep(&face, Point3::origin(), Vector3::unit_z(), Rad(2.0));\n\n assert!(torus.is_geometric_consistent());\n\n let torus = rsweep(&face, Point3::origin(), Vector3::unit_z(), Rad(5.0));\n\n assert!(torus.is_geometric_consistent());\n\n let torus = rsweep(&face, Point3::origin(), Vector3::unit_z(), Rad(-2.0));\n\n assert!(torus.is_geometric_consistent());\n\n let torus = rsweep(&face, Point3::origin(), Vector3::unit_z(), Rad(-5.0));\n\n assert!(torus.is_geometric_consistent());\n\n}\n\n\n", "file_path": "truck-modeling/src/builder.rs", "rank": 93, "score": 72629.44457716696 }, { "content": "fn main() {\n\n let event_loop = winit::event_loop::EventLoop::new();\n\n let mut wb = winit::window::WindowBuilder::new();\n\n wb = wb.with_title(\"wGSL Sandbox\");\n\n let window = wb.build(&event_loop).unwrap();\n\n let size = window.inner_size();\n\n let instance = Instance::new(Backends::PRIMARY);\n\n let surface = unsafe { instance.create_surface(&window) };\n\n\n\n let (device, queue) = futures::executor::block_on(async {\n\n let adapter = instance\n\n .request_adapter(&RequestAdapterOptions {\n\n power_preference: PowerPreference::HighPerformance,\n\n compatible_surface: Some(&surface),\n\n })\n\n .await\n\n .unwrap();\n\n\n\n adapter\n\n .request_device(\n", "file_path": "truck-platform/examples/wgsl-sandbox.rs", "rank": 94, "score": 72629.44457716696 }, { "content": "fn main() {\n\n let v = builder::vertex(Point3::new(-0.5, -0.5, -0.5));\n\n let edge = builder::tsweep(&v, Vector3::unit_x());\n\n let mut face = builder::tsweep(&edge, Vector3::unit_y());\n\n let v = builder::vertex(Point3::new(0.2, 0.0, -0.5));\n\n let edge0 = builder::tsweep(&v, Vector3::new(-0.2, 0.2, 0.0));\n\n let edge1 = builder::rsweep(\n\n edge0.back(),\n\n Point3::origin(),\n\n Vector3::unit_z(),\n\n Rad(std::f64::consts::PI / 2.0),\n\n )\n\n .pop_back()\n\n .unwrap();\n\n let edge2 = builder::tsweep(edge1.back(), Vector3::new(0.2, -0.2, 0.0));\n\n let edge3 = builder::rsweep(\n\n edge2.back(),\n\n Point3::origin(),\n\n Vector3::unit_z(),\n\n Rad(std::f64::consts::PI / 2.0),\n", "file_path": "truck-modeling/examples/punched-cube.rs", "rank": 95, "score": 72629.44457716696 }, { "content": "#[test]\n\nfn through_polymesh() {\n\n let iter = STLReader::<&[u8]>::new(include_bytes!(\"data/bunny_binary.stl\"), STLType::Automatic)\n\n .unwrap();\n\n let polymesh: PolygonMesh = iter.map(|face| face.unwrap()).collect();\n\n let mesh: Vec<STLFace> = polymesh.into_iter().collect();\n\n let iter = STLReader::<&[u8]>::new(include_bytes!(\"data/bunny_binary.stl\"), STLType::Automatic)\n\n .unwrap();\n\n for (face0, face1) in mesh.iter().zip(iter) {\n\n let face1 = face1.unwrap();\n\n assert_near!(face0.vertices[0][0] as f64, face1.vertices[0][0] as f64);\n\n assert_near!(face0.vertices[0][1] as f64, face1.vertices[0][1] as f64);\n\n assert_near!(face0.vertices[0][2] as f64, face1.vertices[0][2] as f64);\n\n assert_near!(face0.vertices[1][0] as f64, face1.vertices[1][0] as f64);\n\n assert_near!(face0.vertices[1][1] as f64, face1.vertices[1][1] as f64);\n\n assert_near!(face0.vertices[1][2] as f64, face1.vertices[1][2] as f64);\n\n assert_near!(face0.vertices[2][0] as f64, face1.vertices[2][0] as f64);\n\n assert_near!(face0.vertices[2][1] as f64, face1.vertices[2][1] as f64);\n\n assert_near!(face0.vertices[2][2] as f64, face1.vertices[2][2] as f64);\n\n // This is not assert_near, since VTK is single precision.\n\n assert!(f32::abs(face0.normal[0] - face1.normal[0]) < 1.0e-4);\n\n assert!(f32::abs(face0.normal[1] - face1.normal[1]) < 1.0e-4);\n\n assert!(f32::abs(face0.normal[2] - face1.normal[2]) < 1.0e-4);\n\n }\n\n}\n", "file_path": "truck-polymesh/tests/stl-io.rs", "rank": 96, "score": 72629.44457716696 }, { "content": "#[test]\n\nfn compress_extract() {\n\n let cube = solid::cube();\n\n let shell0 = &cube.boundaries()[0];\n\n let shell1 = Shell::extract(shell0.compress()).unwrap();\n\n assert!(same_topology(&shell0, &shell1));\n\n}\n\n\n", "file_path": "truck-topology/src/compress.rs", "rank": 97, "score": 72629.44457716696 }, { "content": "#[test]\n\nfn print_messages() {\n\n use std::io::Write;\n\n writeln!(\n\n &mut std::io::stderr(),\n\n \"****** test of the expressions of error messages ******\\n\"\n\n )\n\n .unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::SameVertex).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::EmptyWire).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::NotClosedWire).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::NotSimpleWire).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::NotDisjointWires).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::EmptyShell).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::NotConnected).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::NotClosedShell).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::NotManifold).unwrap();\n\n writeln!(\n\n &mut std::io::stderr(),\n\n \"*******************************************************\"\n\n )\n\n .unwrap();\n\n}\n", "file_path": "truck-topology/src/errors.rs", "rank": 98, "score": 72629.44457716696 }, { "content": "#[test]\n\nfn print_messages() {\n\n use std::io::Write;\n\n writeln!(&mut std::io::stderr(), \"****** test of the expressions of error messages ******\\n\").unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::FromTopology(truck_topology::errors::Error::SameVertex)).unwrap();\n\n writeln!(&mut std::io::stderr(), \"{}\\n\", Error::WireNotInOnePlane).unwrap();\n\n writeln!(&mut std::io::stderr(), \"*******************************************************\").unwrap();\n\n}\n", "file_path": "truck-modeling/src/errors.rs", "rank": 99, "score": 72629.44457716696 } ]
Rust
src/units/units.rs
zboldyga/rustysd
ecc35896385d5909686f275dc6356226c9e5113b
use crate::fd_store::FDStore; use crate::platform::EventFd; use crate::services::Service; use crate::sockets::{Socket, SocketKind, SpecializedSocketConfig}; use crate::units::*; use nix::unistd::Pid; use std::collections::HashMap; use std::sync::{Arc, Mutex, RwLock}; use std::{fmt, path::PathBuf}; #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum UnitIdKind { Target, Socket, Service, } #[derive(Clone, Copy, Eq, PartialEq, Hash)] pub struct UnitId(pub UnitIdKind, pub u64); impl fmt::Debug for UnitId { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(format!("{}", self.1).as_str()) } } impl fmt::Display for UnitId { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(format!("{:?}", self).as_str()) } } impl std::cmp::PartialOrd for UnitId { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.1.partial_cmp(&other.1) } } impl std::cmp::Ord for UnitId { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.1.cmp(&other.1) } } pub type UnitTable = HashMap<UnitId, Arc<Mutex<Unit>>>; pub type ArcMutUnitTable = Arc<RwLock<UnitTable>>; pub type StatusTable = HashMap<UnitId, Arc<Mutex<UnitStatus>>>; pub type ArcMutStatusTable = Arc<RwLock<StatusTable>>; pub type PidTable = HashMap<Pid, PidEntry>; pub type ArcMutPidTable = Arc<Mutex<PidTable>>; pub type ArcMutFDStore = Arc<RwLock<FDStore>>; pub struct RuntimeInfo { pub unit_table: ArcMutUnitTable, pub status_table: ArcMutStatusTable, pub pid_table: ArcMutPidTable, pub fd_store: ArcMutFDStore, pub config: crate::config::Config, pub last_id: Arc<Mutex<u64>>, } pub type ArcRuntimeInfo = Arc<RuntimeInfo>; pub fn lock_all( units: &mut Vec<(UnitId, Arc<Mutex<Unit>>)>, ) -> HashMap<UnitId, std::sync::MutexGuard<'_, Unit>> { let mut units_locked = HashMap::new(); units.sort_by(|(lid, _), (rid, _)| lid.cmp(rid)); for (id, unit) in units { trace!("Lock unit: {:?}", id); let other_unit_locked = unit.lock().unwrap(); trace!("Locked unit: {:?}", id); units_locked.insert(id.clone(), other_unit_locked); } units_locked } #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum PidEntry { Service(UnitId, ServiceType), OneshotExited(crate::signal_handler::ChildTermination), Helper(UnitId, String), HelperExited(crate::signal_handler::ChildTermination), } #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum UnitStatus { NeverStarted, Starting, Started, StartedWaitingForSocket, Stopping, Stopped, StoppedFinal(String), } #[derive(Debug)] pub enum UnitSpecialized { Socket(Socket), Service(Service), Target, } #[derive(Debug, Default)] pub struct Install { pub wants: Vec<UnitId>, pub requires: Vec<UnitId>, pub wanted_by: Vec<UnitId>, pub required_by: Vec<UnitId>, pub before: Vec<UnitId>, pub after: Vec<UnitId>, pub install_config: Option<InstallConfig>, } pub struct Unit { pub id: UnitId, pub conf: UnitConfig, pub specialized: UnitSpecialized, pub install: Install, } impl Unit { pub fn is_service(&self) -> bool { if let UnitSpecialized::Service(_) = self.specialized { true } else { false } } pub fn is_socket(&self) -> bool { if let UnitSpecialized::Socket(_) = self.specialized { true } else { false } } pub fn is_target(&self) -> bool { if let UnitSpecialized::Target = self.specialized { true } else { false } } pub fn dedup_dependencies(&mut self) { self.install.wants.sort(); self.install.wanted_by.sort(); self.install.required_by.sort(); self.install.before.sort(); self.install.after.sort(); self.install.requires.sort(); self.install.wants.dedup(); self.install.requires.dedup(); self.install.wanted_by.dedup(); self.install.required_by.dedup(); self.install.before.dedup(); self.install.after.dedup(); } pub fn activate( &mut self, run_info: ArcRuntimeInfo, notification_socket_path: std::path::PathBuf, eventfds: &[EventFd], allow_ignore: bool, ) -> Result<UnitStatus, UnitOperationError> { match &mut self.specialized { UnitSpecialized::Target => trace!("Reached target {}", self.conf.name()), UnitSpecialized::Socket(sock) => { sock.open_all( self.conf.name(), self.id, &mut *run_info.fd_store.write().unwrap(), ) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::SocketOpenError(format!("{}", e)), })?; } UnitSpecialized::Service(srvc) => { match srvc .start( self.id, &self.conf.name(), run_info, notification_socket_path, eventfds, allow_ignore, ) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::ServiceStartError(e), })? { crate::services::StartResult::Started => return Ok(UnitStatus::Started), crate::services::StartResult::WaitingForSocket => { return Ok(UnitStatus::StartedWaitingForSocket) } } } } Ok(UnitStatus::Started) } pub fn deactivate(&mut self, run_info: ArcRuntimeInfo) -> Result<(), UnitOperationError> { trace!("Deactivate unit: {}", self.conf.name()); match &mut self.specialized { UnitSpecialized::Target => { /* nothing to do */ } UnitSpecialized::Socket(sock) => { sock.close_all(self.conf.name(), &mut *run_info.fd_store.write().unwrap()) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::SocketCloseError(e), })?; } UnitSpecialized::Service(srvc) => { srvc.kill(self.id, &self.conf.name(), run_info) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::ServiceStopError(e), })?; } } Ok(()) } } #[derive(Debug)] pub struct UnitConfig { pub filepath: PathBuf, pub description: String, pub wants: Vec<String>, pub requires: Vec<String>, pub before: Vec<String>, pub after: Vec<String>, } impl UnitConfig { pub fn name(&self) -> String { let name = self .filepath .file_name() .unwrap() .to_str() .unwrap() .to_owned(); name } pub fn name_without_suffix(&self) -> String { let name = self.name(); let split: Vec<_> = name.split('.').collect(); split[0..split.len() - 1].join(".") } } #[derive(Clone)] pub struct SocketConfig { pub kind: SocketKind, pub specialized: SpecializedSocketConfig, } impl fmt::Debug for SocketConfig { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { write!( f, "SocketConfig {{ kind: {:?}, specialized: {:?} }}", self.kind, self.specialized )?; Ok(()) } } unsafe impl Send for SocketConfig {} #[derive(Debug)] pub struct InstallConfig { pub wanted_by: Vec<String>, pub required_by: Vec<String>, } #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum ServiceType { Simple, Notify, Dbus, OneShot, } #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum NotifyKind { Main, Exec, All, None, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum ServiceRestart { Always, No, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum Timeout { Duration(std::time::Duration), Infinity, } #[derive(Clone, Eq, PartialEq, Debug)] pub struct ExecConfig { pub user: Option<String>, pub group: Option<String>, pub supplementary_groups: Vec<String>, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum CommandlinePrefix { AtSign, Minus, Colon, Plus, Exclamation, DoubleExclamation, } #[derive(Clone, Eq, PartialEq, Debug)] pub struct Commandline { pub cmd: String, pub args: Vec<String>, pub prefixes: Vec<CommandlinePrefix>, } impl ToString for Commandline { fn to_string(&self) -> String { format!("{:?}", self) } } #[derive(Clone, Eq, PartialEq, Debug)] pub struct ServiceConfig { pub restart: ServiceRestart, pub accept: bool, pub notifyaccess: NotifyKind, pub exec: Commandline, pub stop: Vec<Commandline>, pub stoppost: Vec<Commandline>, pub startpre: Vec<Commandline>, pub startpost: Vec<Commandline>, pub srcv_type: ServiceType, pub starttimeout: Option<Timeout>, pub stoptimeout: Option<Timeout>, pub generaltimeout: Option<Timeout>, pub exec_config: ExecConfig, pub dbus_name: Option<String>, pub sockets: Vec<String>, }
use crate::fd_store::FDStore; use crate::platform::EventFd; use crate::services::Service; use crate::sockets::{Socket, SocketKind, SpecializedSocketConfig}; use crate::units::*; use nix::unistd::Pid; use std::collections::HashMap; use std::sync::{Arc, Mutex, RwLock}; use std::{fmt, path::PathBuf}; #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum UnitIdKind { Target, Socket, Service, } #[derive(Clone, Copy, Eq, PartialEq, Hash)] pub struct UnitId(pub UnitIdKind, pub u64); impl fmt::Debug for UnitId { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(format!("{}", self.1).as_str()) } } impl fmt::Display for UnitId { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(format!("{:?}", self).as_str()) } } impl std::cmp::PartialOrd for UnitId { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.1.partial_cmp(&other.1) } } impl std::cmp::Ord for UnitId { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.1.cmp(&other.1) } } pub type UnitTable = HashMap<UnitId, Arc<Mutex<Unit>>>; pub type ArcMutUnitTable = Arc<RwLock<UnitTable>>; pub type StatusTable = HashMap<UnitId, Arc<Mutex<UnitStatus>>>; pub type ArcMutStatusTable = Arc<RwLock<StatusTable>>; pub type PidTable = HashMap<Pid, PidEntry>; pub type ArcMutPidTable = Arc<Mutex<PidTable>>; pub type ArcMutFDStore = Arc<RwLock<FDStore>>; pub struct RuntimeInfo { pub unit_table: ArcMutUnitTable, pub status_table: ArcMutStatusTable, pub pid_table: ArcMutPidTable, pub fd_store: ArcMutFDStore, pub config: crate::config::Config, pub last_id: Arc<Mutex<u64>>, } pub type ArcRuntimeInfo = Arc<RuntimeInfo>; pub fn lock_all( units: &mut Vec<(UnitId, Arc<Mutex<Unit>>)>, ) -> HashMap<UnitId, std::sync::MutexGuard<'_, Unit>> { let mut units_locked = HashMap::new(); units.sort_by(|(lid, _), (rid, _)| lid.cmp(rid)); for (id, unit) in units { trace!("Lock unit: {:?}", id); let other_unit_locked = unit.lock().unwrap(); trace!("Locked unit: {:?}", id); units_locked.insert(id.clone(), other_unit_locked); } units_locked } #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum PidEntry { Service(UnitId, ServiceType), OneshotExited(crate::signal_handler::ChildTermination), Helper(UnitId, String), HelperExited(crate::signal_handler::ChildTermination), } #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum UnitStatus { NeverStarted, Starting, Started, StartedWaitingForSocket, Stopping, Stopped, StoppedFinal(String), } #[derive(Debug)] pub enum UnitSpecialized { Socket(Socket), Service(Service), Target, } #[derive(Debug, Default)] pub struct Install { pub wants: Vec<UnitId>, pub requires: Vec<UnitId>, pub wanted_by: Vec<UnitId>, pub required_by: Vec<UnitId>, pub before: Vec<UnitId>, pub after: Vec<UnitId>, pub install_config: Option<InstallConfig>, } pub struct Unit { pub id: UnitId, pub conf: UnitConfig, pub specialized: UnitSpecialized, pub install: Install, } impl Unit { pub fn is_service(&self) -> bool { if let UnitSpecialized::Service(_) = self.specialized { true } else { false } } pub fn is_socket(&self) -> bool { if let UnitSpecialized::Socket(_) = self.specialized { true } else { false } } pub fn is_target(&self) -> bool { if let UnitSpecialized::Target = self.specialized { true } else { false } } pub fn dedup_dependencies(&mut self) { self.install.wants.sort(); self.install.wanted_by.sort(); self.install.required_by.sort(); self.install.before.sort(); self.install.after.sort(); self.install.requires.sort(); self.install.wants.dedup(); self.install.requires.dedup(); self.install.wanted_by.dedup(); self.install.required_by.dedup(); self.install.before.dedup(); self.install.after.dedup(); } pub fn activate( &mut self, run_info: ArcRuntimeInfo, notification_socket_path: std::path::PathBuf, eventfds: &[EventFd], allow_ignore: bool, ) -> Result<UnitStatus, UnitOperationError> { match &mut self.specialized { UnitSpecialized::Target => trace!("Reached target {}", self.conf.name()), UnitSpecialized::Socket(sock) => { sock.open_all( self.conf.name(), self.id, &mut *run_info.fd_store.write().unwrap(), ) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::SocketOpenError(format!("{}", e)), })?; } UnitSpecialized::Service(srvc) => { match srvc .start( self.id, &self.conf.name(), run_info, notification_socket_path, eventfds, allow_ignore, ) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::ServiceStartError(e), })? { crate::services::StartResult::Started => return Ok(UnitStatus::Started), crate::services::StartResult::WaitingForSocket => { return Ok(UnitStatus::StartedWaitingForSocket) } } } } Ok(UnitStatus::Started) }
} #[derive(Debug)] pub struct UnitConfig { pub filepath: PathBuf, pub description: String, pub wants: Vec<String>, pub requires: Vec<String>, pub before: Vec<String>, pub after: Vec<String>, } impl UnitConfig { pub fn name(&self) -> String { let name = self .filepath .file_name() .unwrap() .to_str() .unwrap() .to_owned(); name } pub fn name_without_suffix(&self) -> String { let name = self.name(); let split: Vec<_> = name.split('.').collect(); split[0..split.len() - 1].join(".") } } #[derive(Clone)] pub struct SocketConfig { pub kind: SocketKind, pub specialized: SpecializedSocketConfig, } impl fmt::Debug for SocketConfig { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { write!( f, "SocketConfig {{ kind: {:?}, specialized: {:?} }}", self.kind, self.specialized )?; Ok(()) } } unsafe impl Send for SocketConfig {} #[derive(Debug)] pub struct InstallConfig { pub wanted_by: Vec<String>, pub required_by: Vec<String>, } #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum ServiceType { Simple, Notify, Dbus, OneShot, } #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum NotifyKind { Main, Exec, All, None, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum ServiceRestart { Always, No, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum Timeout { Duration(std::time::Duration), Infinity, } #[derive(Clone, Eq, PartialEq, Debug)] pub struct ExecConfig { pub user: Option<String>, pub group: Option<String>, pub supplementary_groups: Vec<String>, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum CommandlinePrefix { AtSign, Minus, Colon, Plus, Exclamation, DoubleExclamation, } #[derive(Clone, Eq, PartialEq, Debug)] pub struct Commandline { pub cmd: String, pub args: Vec<String>, pub prefixes: Vec<CommandlinePrefix>, } impl ToString for Commandline { fn to_string(&self) -> String { format!("{:?}", self) } } #[derive(Clone, Eq, PartialEq, Debug)] pub struct ServiceConfig { pub restart: ServiceRestart, pub accept: bool, pub notifyaccess: NotifyKind, pub exec: Commandline, pub stop: Vec<Commandline>, pub stoppost: Vec<Commandline>, pub startpre: Vec<Commandline>, pub startpost: Vec<Commandline>, pub srcv_type: ServiceType, pub starttimeout: Option<Timeout>, pub stoptimeout: Option<Timeout>, pub generaltimeout: Option<Timeout>, pub exec_config: ExecConfig, pub dbus_name: Option<String>, pub sockets: Vec<String>, }
pub fn deactivate(&mut self, run_info: ArcRuntimeInfo) -> Result<(), UnitOperationError> { trace!("Deactivate unit: {}", self.conf.name()); match &mut self.specialized { UnitSpecialized::Target => { /* nothing to do */ } UnitSpecialized::Socket(sock) => { sock.close_all(self.conf.name(), &mut *run_info.fd_store.write().unwrap()) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::SocketCloseError(e), })?; } UnitSpecialized::Service(srvc) => { srvc.kill(self.id, &self.conf.name(), run_info) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::ServiceStopError(e), })?; } } Ok(()) }
function_block-full_function
[ { "content": "// make edges between units visible on bot sides: required <-> required_by after <-> before\n\npub fn fill_dependencies(units: &mut HashMap<UnitId, Unit>) {\n\n let mut name_to_id = HashMap::new();\n\n\n\n for (id, unit) in &*units {\n\n let name = unit.conf.name();\n\n name_to_id.insert(name, *id);\n\n }\n\n\n\n let mut required_by = Vec::new();\n\n let mut wanted_by: Vec<(UnitId, UnitId)> = Vec::new();\n\n let mut before = Vec::new();\n\n let mut after = Vec::new();\n\n\n\n for unit in (*units).values_mut() {\n\n let conf = &unit.conf;\n\n for name in &conf.wants {\n\n let id = name_to_id[name.as_str()];\n\n unit.install.wants.push(id);\n\n wanted_by.push((id, unit.id));\n\n }\n", "file_path": "src/units/dependency_resolving.rs", "rank": 0, "score": 258442.297560774 }, { "content": "// add after/before relations for required_by/wanted_by relations after pruning\n\npub fn add_implicit_before_after(units: &mut HashMap<UnitId, Unit>) {\n\n let mut name_to_id = HashMap::new();\n\n\n\n for (id, unit) in &*units {\n\n let name = unit.conf.name();\n\n name_to_id.insert(name, *id);\n\n }\n\n\n\n let mut before = Vec::new();\n\n let mut after = Vec::new();\n\n for unit in (*units).values_mut() {\n\n if let Some(conf) = &unit.install.install_config {\n\n for name in &conf.wanted_by {\n\n let id = name_to_id[name.as_str()];\n\n before.push((id, unit.id));\n\n after.push((unit.id, id));\n\n }\n\n for name in &conf.required_by {\n\n let id = name_to_id[name.as_str()];\n\n before.push((id, unit.id));\n", "file_path": "src/units/dependency_resolving.rs", "rank": 1, "score": 258442.06541937875 }, { "content": "pub fn open_all_sockets(run_info: ArcRuntimeInfo, conf: &crate::config::Config) {\n\n // TODO make configurable\n\n let control_sock_path = conf.notification_sockets_dir.join(\"control.socket\");\n\n if control_sock_path.exists() {\n\n std::fs::remove_file(&control_sock_path).unwrap();\n\n }\n\n use std::os::unix::net::UnixListener;\n\n std::fs::create_dir_all(&conf.notification_sockets_dir).unwrap();\n\n let unixsock = UnixListener::bind(&control_sock_path).unwrap();\n\n accept_control_connections_unix_socket(\n\n run_info.clone(),\n\n conf.notification_sockets_dir.clone(),\n\n unixsock,\n\n );\n\n let tcpsock = std::net::TcpListener::bind(\"127.0.0.1:8080\").unwrap();\n\n accept_control_connections_tcp(\n\n run_info.clone(),\n\n conf.notification_sockets_dir.clone(),\n\n tcpsock,\n\n );\n", "file_path": "src/control/control.rs", "rank": 2, "score": 248387.5822597165 }, { "content": "pub fn pre_fork_os_specific(srvc: &mut Service) -> Result<(), String> {\n\n #[cfg(feature = \"cgroups\")]\n\n {\n\n std::fs::create_dir_all(&srvc.platform_specific.cgroup_path).map_err(|e| {\n\n format!(\n\n \"Couldnt create service cgroup ({:?}): {}\",\n\n srvc.platform_specific.cgroup_path, e\n\n )\n\n })?;\n\n }\n\n let _ = srvc;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/services/fork_os_specific.rs", "rank": 3, "score": 244990.52989789634 }, { "content": "pub fn post_fork_os_specific(srvc: &mut Service) -> Result<(), String> {\n\n #[cfg(feature = \"cgroups\")]\n\n {\n\n trace!(\n\n \"Move service to cgroup: {:?}\",\n\n &srvc.platform_specific.cgroup_path\n\n );\n\n cgroups::move_self_to_cgroup(&srvc.platform_specific.cgroup_path)\n\n .map_err(|e| format!(\"postfork os specific: {}\", e))?;\n\n }\n\n let _ = srvc;\n\n Ok(())\n\n}\n", "file_path": "src/services/fork_os_specific.rs", "rank": 4, "score": 244990.52989789634 }, { "content": "fn prune_unused_sockets(sockets: &mut std::collections::HashMap<UnitId, Unit>) -> Vec<UnitId> {\n\n let mut ids_to_remove = Vec::new();\n\n for unit in sockets.values() {\n\n if let UnitSpecialized::Socket(sock) = &unit.specialized {\n\n if sock.services.is_empty() {\n\n trace!(\n\n \"Prune socket {} because it was not added to any service\",\n\n unit.conf.name()\n\n );\n\n ids_to_remove.push(unit.id);\n\n }\n\n }\n\n }\n\n for id in &ids_to_remove {\n\n sockets.remove(id);\n\n }\n\n ids_to_remove\n\n}\n\n\n", "file_path": "src/units/loading.rs", "rank": 5, "score": 242991.68012204222 }, { "content": "pub fn setup_logging(conf: &crate::config::LoggingConfig) -> Result<(), String> {\n\n let mut logger = fern::Dispatch::new()\n\n .format(|out, message, record| {\n\n out.finish(format_args!(\n\n \"{}[{}][{}] {}\",\n\n chrono::Local::now().format(\"[%Y-%m-%d][%H:%M:%S]\"),\n\n record.target(),\n\n record.level(),\n\n message\n\n ))\n\n })\n\n .level(log::LevelFilter::Trace);\n\n\n\n if conf.log_to_stdout {\n\n logger = logger.chain(std::io::stdout());\n\n }\n\n\n\n if conf.log_to_disk {\n\n unimplemented!(\"Logging to disk is currently not supported. Pipe the stdout logs to your preferred logging solution\");\n\n }\n\n\n\n logger\n\n .apply()\n\n .map_err(|e| format!(\"Error while stting up logger: {}\", e))\n\n}\n", "file_path": "src/logging.rs", "rank": 6, "score": 229744.09645677975 }, { "content": "type SocketTable = HashMap<UnitId, Unit>;\n", "file_path": "src/units/dependency_resolving.rs", "rank": 7, "score": 225600.88256655942 }, { "content": "type ServiceTable = HashMap<UnitId, Unit>;\n\n\n", "file_path": "src/units/dependency_resolving.rs", "rank": 8, "score": 224954.3530721349 }, { "content": "pub fn kill(srvc: &mut Service, sig: nix::sys::signal::Signal) -> Result<(), String> {\n\n #[cfg(feature = \"cgroups\")]\n\n {\n\n cgroups::freeze_kill_thaw_cgroup(&srvc.platform_specific.cgroup_path, sig)\n\n .map_err(|e| format!(\"{}\", e))?;\n\n std::fs::remove_dir(&srvc.platform_specific.cgroup_path).map_err(|e| format!(\"{}\", e))?;\n\n }\n\n let _ = srvc;\n\n let _ = sig;\n\n Ok(())\n\n}\n", "file_path": "src/services/kill_os_specific.rs", "rank": 9, "score": 216156.64833984672 }, { "content": "pub fn string_to_bool(s: &str) -> bool {\n\n if s.len() == 0 {\n\n return false;\n\n }\n\n\n\n let s_upper = &s.to_uppercase();\n\n let c: char = s_upper.chars().nth(0).unwrap();\n\n\n\n let is_num_and_one = s.len() == 1 && c == '1';\n\n *s_upper == *\"YES\" || *s_upper == *\"TRUE\" || is_num_and_one\n\n}\n\n\n", "file_path": "src/units/unit_parsing/unit_parser.rs", "rank": 10, "score": 216058.40911306537 }, { "content": "pub fn collect_names_needed(new_unit: &units::Unit, names_needed: &mut Vec<String>) {\n\n names_needed.extend(new_unit.conf.after.iter().cloned());\n\n names_needed.extend(new_unit.conf.before.iter().cloned());\n\n\n\n if let Some(conf) = &new_unit.install.install_config {\n\n names_needed.extend(conf.required_by.iter().cloned());\n\n names_needed.extend(conf.wanted_by.iter().cloned());\n\n }\n\n if let units::UnitSpecialized::Socket(sock) = &new_unit.specialized {\n\n names_needed.extend(sock.services.iter().cloned());\n\n }\n\n if let units::UnitSpecialized::Service(srvc) = &new_unit.specialized {\n\n names_needed.extend(srvc.service_config.sockets.iter().cloned());\n\n }\n\n}\n\n\n", "file_path": "src/units/insert_new.rs", "rank": 11, "score": 214260.6298846706 }, { "content": "pub fn handle_notifications_from_buffer(srvc: &mut Service, name: &str) {\n\n while srvc.notifications_buffer.contains('\\n') {\n\n let (line, rest) = srvc\n\n .notifications_buffer\n\n .split_at(srvc.notifications_buffer.find('\\n').unwrap());\n\n let line = line.to_owned();\n\n srvc.notifications_buffer = rest[1..].to_owned();\n\n\n\n handle_notification_message(&line, srvc, name);\n\n }\n\n}\n", "file_path": "src/notification_handler.rs", "rank": 12, "score": 201363.41098528064 }, { "content": "pub fn start_socketactivation_thread(\n\n run_info: ArcRuntimeInfo,\n\n note_sock_path: std::path::PathBuf,\n\n eventfd: crate::platform::EventFd,\n\n eventfds: std::sync::Arc<Vec<crate::platform::EventFd>>,\n\n) {\n\n std::thread::spawn(move || loop {\n\n match wait_for_socket(\n\n eventfd,\n\n run_info.unit_table.clone(),\n\n run_info.fd_store.clone(),\n\n ) {\n\n Ok(ids) => {\n\n for socket_id in ids {\n\n let unit_table_locked = run_info.unit_table.read().unwrap();\n\n {\n\n let socket_name = {\n\n let sock_unit = unit_table_locked.get(&socket_id).unwrap();\n\n let sock_unit_locked = sock_unit.lock().unwrap();\n\n sock_unit_locked.conf.name()\n", "file_path": "src/socket_activation.rs", "rank": 13, "score": 195118.76745332315 }, { "content": "fn shutdown_unit(unit_locked: &mut Unit, run_info: ArcRuntimeInfo) {\n\n {\n\n trace!(\"Get status lock\");\n\n let status_table_locked = match run_info.status_table.write() {\n\n Ok(lock) => lock,\n\n Err(err) => err.into_inner(),\n\n };\n\n trace!(\"Set unit status: {}\", unit_locked.conf.name());\n\n let status = status_table_locked.get(&unit_locked.id).unwrap();\n\n let mut status_locked = status.lock().unwrap();\n\n *status_locked = UnitStatus::Stopping;\n\n }\n\n match &mut unit_locked.specialized {\n\n UnitSpecialized::Service(srvc) => {\n\n let kill_res = srvc.kill(unit_locked.id, &unit_locked.conf.name(), run_info.clone());\n\n match kill_res {\n\n Ok(()) => {\n\n trace!(\"Killed service unit: {}\", unit_locked.conf.name());\n\n }\n\n Err(e) => error!(\"{}\", e),\n", "file_path": "src/shutdown.rs", "rank": 14, "score": 194734.11336218897 }, { "content": "pub fn format_target(socket_unit: &Unit, status: UnitStatus) -> Value {\n\n let mut map = serde_json::Map::new();\n\n map.insert(\"Name\".into(), Value::String(socket_unit.conf.name()));\n\n map.insert(\"Status\".into(), Value::String(format!(\"{:?}\", status)));\n\n Value::Object(map)\n\n}\n\n\n", "file_path": "src/control/control.rs", "rank": 15, "score": 190245.6731471013 }, { "content": "pub fn format_service(srvc_unit: &Unit, status: UnitStatus) -> Value {\n\n let mut map = serde_json::Map::new();\n\n map.insert(\"Name\".into(), Value::String(srvc_unit.conf.name()));\n\n map.insert(\"Status\".into(), Value::String(format!(\"{:?}\", status)));\n\n if let UnitSpecialized::Service(srvc) = &srvc_unit.specialized {\n\n map.insert(\n\n \"Sockets\".into(),\n\n Value::Array(\n\n srvc.socket_names\n\n .iter()\n\n .map(|x| Value::String(x.clone()))\n\n .collect(),\n\n ),\n\n );\n\n if let Some(instant) = srvc.runtime_info.up_since {\n\n map.insert(\n\n \"UpSince\".into(),\n\n Value::String(format!(\"{:?}\", instant.elapsed())),\n\n );\n\n }\n\n map.insert(\n\n \"Restarted\".into(),\n\n Value::String(format!(\"{:?}\", srvc.runtime_info.restarted)),\n\n );\n\n }\n\n Value::Object(map)\n\n}\n\n\n\nuse std::sync::{Arc, Mutex};\n", "file_path": "src/control/control.rs", "rank": 16, "score": 189577.64950842984 }, { "content": "pub fn handle_all_streams(eventfd: EventFd, unit_table: ArcMutUnitTable) {\n\n loop {\n\n // need to collect all again. There might be a newly started service\n\n let fd_to_srvc_id = collect_from_srvc(unit_table.clone(), |map, srvc, id| {\n\n if let Some(socket) = &srvc.notifications {\n\n map.insert(socket.as_raw_fd(), id);\n\n }\n\n });\n\n\n\n let mut fdset = nix::sys::select::FdSet::new();\n\n for fd in fd_to_srvc_id.keys() {\n\n fdset.insert(*fd);\n\n }\n\n fdset.insert(eventfd.read_end());\n\n\n\n let result = nix::sys::select::select(None, Some(&mut fdset), None, None, None);\n\n match result {\n\n Ok(_) => {\n\n if fdset.contains(eventfd.read_end()) {\n\n trace!(\"Interrupted notification select because the eventfd fired\");\n", "file_path": "src/notification_handler.rs", "rank": 17, "score": 189575.25131610507 }, { "content": "pub fn start_service(\n\n srvc: &mut Service,\n\n name: &str,\n\n fd_store: &FDStore,\n\n) -> Result<(), super::RunCmdError> {\n\n start_service_with_filedescriptors(srvc, name, fd_store)?;\n\n srvc.runtime_info.up_since = Some(std::time::Instant::now());\n\n Ok(())\n\n}\n", "file_path": "src/services/start_service.rs", "rank": 18, "score": 189229.5365027394 }, { "content": "fn parse_service_section(mut section: ParsedSection) -> Result<ServiceConfig, ParsingErrorReason> {\n\n let exec = section.remove(\"EXECSTART\");\n\n let stop = section.remove(\"EXECSTOP\");\n\n let stoppost = section.remove(\"EXECSTOPPOST\");\n\n let startpre = section.remove(\"EXECSTARTPRE\");\n\n let startpost = section.remove(\"EXECSTARTPOST\");\n\n let starttimeout = section.remove(\"TIMEOUTSTARTSEC\");\n\n let stoptimeout = section.remove(\"TIMEOUTSTOPSEC\");\n\n let generaltimeout = section.remove(\"TIMEOUTSEC\");\n\n\n\n let restart = section.remove(\"RESTART\");\n\n let sockets = section.remove(\"SOCKETS\");\n\n let notify_access = section.remove(\"NOTIFYACCESS\");\n\n let srcv_type = section.remove(\"TYPE\");\n\n let accept = section.remove(\"ACCEPT\");\n\n let dbus_name = section.remove(\"BUSNAME\");\n\n\n\n let exec_config = super::parse_exec_section(&mut section)?;\n\n\n\n if !section.is_empty() {\n", "file_path": "src/units/unit_parsing/service_unit.rs", "rank": 19, "score": 188828.09165240172 }, { "content": "pub fn activate_units(\n\n run_info: ArcRuntimeInfo,\n\n notification_socket_path: std::path::PathBuf,\n\n eventfds: Vec<EventFd>,\n\n) {\n\n let mut root_units = Vec::new();\n\n\n\n for (id, unit) in &*run_info.unit_table.read().unwrap() {\n\n let unit_locked = unit.lock().unwrap();\n\n if unit_locked.install.after.is_empty() {\n\n root_units.push(*id);\n\n trace!(\"Root unit: {}\", unit_locked.conf.name());\n\n }\n\n }\n\n\n\n // TODO make configurable or at least make guess about amount fo threads\n\n let tpool = ThreadPool::new(6);\n\n let eventfds_arc = Arc::new(eventfds);\n\n let errors = Arc::new(Mutex::new(Vec::new()));\n\n activate_units_recursive(\n", "file_path": "src/units/activate.rs", "rank": 20, "score": 187724.0287910325 }, { "content": "pub fn activate_unit(\n\n id_to_start: UnitId,\n\n run_info: ArcRuntimeInfo,\n\n notification_socket_path: std::path::PathBuf,\n\n eventfds: Arc<Vec<EventFd>>,\n\n allow_ignore: bool,\n\n) -> std::result::Result<StartResult, UnitOperationError> {\n\n trace!(\"Activate id: {:?}\", id_to_start);\n\n\n\n // 1) First lock the unit itself\n\n // 1.5) Check if this unit should be started right now\n\n // 2) Then lock the needed other units (only for sockets of services right now)\n\n // With that we always maintain a consistent order between locks so deadlocks shouldnt occur\n\n let unit = {\n\n let units_locked = run_info.unit_table.read().unwrap();\n\n match units_locked.get(&id_to_start) {\n\n Some(unit) => Arc::clone(unit),\n\n None => {\n\n // If this occurs, there is a flaw in the handling of dependencies\n\n // IDs should be purged globally when units get removed\n", "file_path": "src/units/activate.rs", "rank": 21, "score": 187724.0287910325 }, { "content": "pub fn apply_sockets_to_services(\n\n service_table: &mut ServiceTable,\n\n socket_table: &mut SocketTable,\n\n) -> Result<(), String> {\n\n for sock_unit in socket_table.values_mut() {\n\n let mut counter = 0;\n\n\n\n if let UnitSpecialized::Socket(sock) = &mut sock_unit.specialized {\n\n trace!(\"Searching services for socket: {}\", sock_unit.conf.name());\n\n for srvc_unit in service_table.values_mut() {\n\n let srvc = &mut srvc_unit.specialized;\n\n if let UnitSpecialized::Service(srvc) = srvc {\n\n // add sockets for services with the exact same name\n\n if (srvc_unit.conf.name_without_suffix()\n\n == sock_unit.conf.name_without_suffix())\n\n && !srvc.socket_names.contains(&sock_unit.conf.name())\n\n {\n\n trace!(\n\n \"add socket: {} to service: {}\",\n\n sock_unit.conf.name(),\n", "file_path": "src/units/dependency_resolving.rs", "rank": 22, "score": 186592.2769750366 }, { "content": "pub fn handle_notification_message(msg: &str, srvc: &mut Service, name: &str) {\n\n let split: Vec<_> = msg.split('=').collect();\n\n match split[0] {\n\n \"STATUS\" => {\n\n srvc.status_msgs.push(split[1].to_owned());\n\n trace!(\n\n \"New status message pushed from service {}: {}\",\n\n name,\n\n srvc.status_msgs.last().unwrap()\n\n );\n\n }\n\n \"READY\" => {\n\n srvc.signaled_ready = true;\n\n }\n\n _ => {\n\n warn!(\"Unknown notification name{}\", split[0]);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/notification_handler.rs", "rank": 23, "score": 186077.60508870732 }, { "content": "pub fn parse_exec_section(section: &mut ParsedSection) -> Result<ExecConfig, ParsingErrorReason> {\n\n let user = section.remove(\"USER\");\n\n let group = section.remove(\"GROUP\");\n\n let supplementary_groups = section.remove(\"SUPPLEMENTARYGROUPS\");\n\n\n\n let user = match user {\n\n None => None,\n\n Some(mut vec) => {\n\n if vec.len() == 1 {\n\n Some(vec.remove(0).1)\n\n } else if vec.len() > 1 {\n\n return Err(ParsingErrorReason::SettingTooManyValues(\n\n \"User\".into(),\n\n super::map_tupels_to_second(vec),\n\n ));\n\n } else {\n\n None\n\n }\n\n }\n\n };\n", "file_path": "src/units/unit_parsing/unit_parser.rs", "rank": 24, "score": 185647.46298283502 }, { "content": "pub fn handle_all_std_out(eventfd: EventFd, run_info: ArcRuntimeInfo) {\n\n loop {\n\n // need to collect all again. There might be a newly started service\n\n let fd_to_srvc_id = collect_from_srvc(run_info.unit_table.clone(), |map, srvc, id| {\n\n if let Some(fd) = &srvc.stdout_dup {\n\n map.insert(fd.0, id);\n\n }\n\n });\n\n\n\n let mut fdset = nix::sys::select::FdSet::new();\n\n for fd in fd_to_srvc_id.keys() {\n\n fdset.insert(*fd);\n\n }\n\n fdset.insert(eventfd.read_end());\n\n\n\n let result = nix::sys::select::select(None, Some(&mut fdset), None, None, None);\n\n match result {\n\n Ok(_) => {\n\n if fdset.contains(eventfd.read_end()) {\n\n trace!(\"Interrupted stdout select because the eventfd fired\");\n", "file_path": "src/notification_handler.rs", "rank": 25, "score": 183163.31002847524 }, { "content": "pub fn load_config(config_path: &Option<PathBuf>) -> (LoggingConfig, Result<Config, String>) {\n\n let mut settings: HashMap<String, SettingValue> = HashMap::new();\n\n\n\n let default_config_path_json = PathBuf::from(\"./config/rustysd_config.json\");\n\n let default_config_path_toml = PathBuf::from(\"./config/rustysd_config.toml\");\n\n\n\n let config_path_json = if let Some(config_path) = config_path {\n\n config_path.join(\"rustysd_config.json\")\n\n } else {\n\n default_config_path_json\n\n };\n\n\n\n let config_path_toml = if let Some(config_path) = config_path {\n\n config_path.join(\"rustysd_config.toml\")\n\n } else {\n\n default_config_path_toml.clone()\n\n };\n\n\n\n let json_conf = if config_path_json.exists() {\n\n Some(load_json(&config_path_json, &mut settings))\n", "file_path": "src/config.rs", "rank": 26, "score": 182970.18889349408 }, { "content": "pub fn handle_all_std_err(eventfd: EventFd, run_info: ArcRuntimeInfo) {\n\n loop {\n\n // need to collect all again. There might be a newly started service\n\n let fd_to_srvc_id = collect_from_srvc(run_info.unit_table.clone(), |map, srvc, id| {\n\n if let Some(fd) = &srvc.stderr_dup {\n\n map.insert(fd.0, id);\n\n }\n\n });\n\n\n\n let mut fdset = nix::sys::select::FdSet::new();\n\n for fd in fd_to_srvc_id.keys() {\n\n fdset.insert(*fd);\n\n }\n\n fdset.insert(eventfd.read_end());\n\n\n\n let result = nix::sys::select::select(None, Some(&mut fdset), None, None, None);\n\n match result {\n\n Ok(_) => {\n\n if fdset.contains(eventfd.read_end()) {\n\n trace!(\"Interrupted stderr select because the eventfd fired\");\n", "file_path": "src/notification_handler.rs", "rank": 27, "score": 181041.78027551385 }, { "content": "pub fn parse_target(\n\n parsed_file: ParsedFile,\n\n path: &PathBuf,\n\n chosen_id: UnitId,\n\n) -> Result<Unit, ParsingErrorReason> {\n\n let mut install_config = None;\n\n let mut unit_config = None;\n\n\n\n for (name, section) in parsed_file {\n\n match name.as_str() {\n\n \"[Unit]\" => {\n\n unit_config = Some(parse_unit_section(section, path)?);\n\n }\n\n \"[Install]\" => {\n\n install_config = Some(parse_install_section(section)?);\n\n }\n\n _ => return Err(ParsingErrorReason::UnknownSection(name.to_owned())),\n\n }\n\n }\n\n\n", "file_path": "src/units/unit_parsing/target_unit.rs", "rank": 28, "score": 180159.32492528873 }, { "content": "pub fn parse_socket(\n\n parsed_file: ParsedFile,\n\n path: &PathBuf,\n\n chosen_id: UnitId,\n\n) -> Result<Unit, ParsingErrorReason> {\n\n let mut socket_configs = None;\n\n let mut install_config = None;\n\n let mut unit_config = None;\n\n let mut exec_config = None;\n\n\n\n for (name, mut section) in parsed_file {\n\n match name.as_str() {\n\n \"[Socket]\" => {\n\n exec_config = Some(super::parse_exec_section(&mut section)?);\n\n socket_configs = match parse_socket_section(section) {\n\n Ok(conf) => Some(conf),\n\n Err(e) => return Err(e),\n\n };\n\n }\n\n \"[Unit]\" => {\n", "file_path": "src/units/unit_parsing/socket_unit.rs", "rank": 29, "score": 179570.58924579373 }, { "content": "pub fn wait_for_socket(\n\n eventfd: EventFd,\n\n unit_table: ArcMutUnitTable,\n\n fd_store: ArcMutFDStore,\n\n) -> Result<Vec<UnitId>, String> {\n\n let fd_to_sock_id = fd_store.read().unwrap().global_fds_to_ids();\n\n\n\n let mut fdset = nix::sys::select::FdSet::new();\n\n {\n\n let unit_table_locked = unit_table.read().unwrap();\n\n for (fd, id) in &fd_to_sock_id {\n\n let unit = unit_table_locked.get(id).unwrap();\n\n let unit_locked = unit.lock().unwrap();\n\n if let UnitSpecialized::Socket(sock) = &unit_locked.specialized {\n\n if !sock.activated {\n\n fdset.insert(*fd);\n\n }\n\n }\n\n }\n\n fdset.insert(eventfd.read_end());\n", "file_path": "src/socket_activation.rs", "rank": 30, "score": 179417.72526996894 }, { "content": "pub fn parse_service(\n\n parsed_file: ParsedFile,\n\n path: &PathBuf,\n\n chosen_id: UnitId,\n\n) -> Result<Unit, ParsingErrorReason> {\n\n let mut service_config = None;\n\n let mut install_config = None;\n\n let mut unit_config = None;\n\n\n\n for (name, section) in parsed_file {\n\n match name.as_str() {\n\n \"[Service]\" => {\n\n service_config = Some(parse_service_section(section)?);\n\n }\n\n \"[Unit]\" => {\n\n unit_config = Some(parse_unit_section(section, path)?);\n\n }\n\n \"[Install]\" => {\n\n install_config = Some(parse_install_section(section)?);\n\n }\n", "file_path": "src/units/unit_parsing/service_unit.rs", "rank": 31, "score": 178618.17181560743 }, { "content": "pub fn format_socket(socket_unit: &Unit, status: UnitStatus) -> Value {\n\n let mut map = serde_json::Map::new();\n\n map.insert(\"Name\".into(), Value::String(socket_unit.conf.name()));\n\n map.insert(\"Status\".into(), Value::String(format!(\"{:?}\", status)));\n\n\n\n if let UnitSpecialized::Socket(sock) = &socket_unit.specialized {\n\n map.insert(\n\n \"FileDescriptorname\".into(),\n\n Value::String(sock.name.clone()),\n\n );\n\n map.insert(\n\n \"FileDescriptors\".into(),\n\n Value::Array(\n\n sock.sockets\n\n .iter()\n\n .map(|sock_conf| Value::String(format!(\"{:?}\", sock_conf.specialized)))\n\n .collect(),\n\n ),\n\n );\n\n }\n\n\n\n Value::Object(map)\n\n}\n\n\n", "file_path": "src/control/control.rs", "rank": 32, "score": 175879.752623929 }, { "content": "fn collect_from_srvc<F>(unit_table: ArcMutUnitTable, f: F) -> HashMap<i32, UnitId>\n\nwhere\n\n F: Fn(&mut HashMap<i32, UnitId>, &Service, UnitId),\n\n{\n\n unit_table\n\n .read()\n\n .unwrap()\n\n .iter()\n\n .fold(HashMap::new(), |mut map, (id, srvc_unit)| {\n\n let srvc_unit_locked = srvc_unit.lock().unwrap();\n\n if let UnitSpecialized::Service(srvc) = &srvc_unit_locked.specialized {\n\n f(&mut map, &srvc, id.clone());\n\n }\n\n map\n\n })\n\n}\n\n\n", "file_path": "src/notification_handler.rs", "rank": 33, "score": 173796.8347767285 }, { "content": "pub fn parse_install_section(\n\n mut section: ParsedSection,\n\n) -> Result<InstallConfig, ParsingErrorReason> {\n\n let wantedby = section.remove(\"WANTEDBY\");\n\n let requiredby = section.remove(\"REQUIREDBY\");\n\n\n\n if !section.is_empty() {\n\n return Err(ParsingErrorReason::UnusedSetting(\n\n section.keys().next().unwrap().to_owned(),\n\n ));\n\n }\n\n\n\n Ok(InstallConfig {\n\n wanted_by: map_tupels_to_second(wantedby.unwrap_or_default()),\n\n required_by: map_tupels_to_second(requiredby.unwrap_or_default()),\n\n })\n\n}\n\n\n", "file_path": "src/units/unit_parsing/unit_parser.rs", "rank": 34, "score": 165800.89772523846 }, { "content": "pub fn notify_event_fds(eventfds: &[EventFd]) {\n\n for fd in eventfds {\n\n notify_event_fd(*fd);\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"linux_eventfd\"))]\n\npub use pipe_eventfd::*;\n\n\n\n#[cfg(not(feature = \"linux_eventfd\"))]\n\nmod pipe_eventfd {\n\n use std::os::unix::io::RawFd;\n\n\n\n #[derive(Clone, Copy)]\n\n pub struct EventFd(RawFd, RawFd);\n\n // EventFd(Read,Write)\n\n\n\n impl EventFd {\n\n pub fn read_end(&self) -> RawFd {\n\n self.0\n", "file_path": "src/platform/eventfd.rs", "rank": 35, "score": 155840.66769890182 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn become_subreaper(set: bool) {\n\n unsafe {\n\n // Set subreaper to collect all zombies left behind by the services\n\n let res = if set {\n\n libc::prctl(libc::PR_SET_CHILD_SUBREAPER, 1)\n\n } else {\n\n libc::prctl(libc::PR_SET_CHILD_SUBREAPER, 0)\n\n };\n\n if res < 0 {\n\n error!(\"Couldnt set subreaper for rustysd\");\n\n return;\n\n }\n\n }\n\n}\n\n#[cfg(any(\n\n target_os = \"freebsd\",\n\n target_os = \"openbsd\",\n\n target_os = \"netbsd\",\n\n target_os = \"dragonfly\"\n\n))]\n", "file_path": "src/platform/subreaper.rs", "rank": 36, "score": 151328.3625672164 }, { "content": "type GlobalEntry = Vec<(UnitId, String, Box<dyn AsRawFd + Send + Sync>)>;\n\n\n\n#[derive(Default)]\n\npub struct FDStore {\n\n // Indexed by unit name\n\n global_sockets: HashMap<String, GlobalEntry>,\n\n service_stored_sockets: HashMap<String, HashMap<String, Vec<Box<RawFd>>>>,\n\n}\n\n\n\nimpl FDStore {\n\n pub fn global_fds_to_ids(&self) -> Vec<(RawFd, UnitId)> {\n\n self.global_sockets\n\n .values()\n\n .fold(Vec::new(), |mut acc, fds| {\n\n for (id, _, fd) in fds {\n\n acc.push((fd.as_raw_fd(), *id));\n\n }\n\n acc\n\n })\n\n }\n", "file_path": "src/fd_store.rs", "rank": 37, "score": 151215.65212078736 }, { "content": "fn parse_unix_addr(addr: &str) -> Result<String, ()> {\n\n if addr.starts_with('/') || addr.starts_with(\"./\") {\n\n Ok(addr.to_owned())\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/units/unit_parsing/socket_unit.rs", "rank": 38, "score": 151082.97061071132 }, { "content": "fn can_drop_groups() -> Result<bool, String> {\n\n let kernel_iface_path = std::path::PathBuf::from(\"/proc/self/setgroups\");\n\n\n\n if !kernel_iface_path.exists() {\n\n // assume true since we cant check\n\n Ok(true)\n\n } else {\n\n let mut buf = [0u8; 5];\n\n let mut file = std::fs::File::open(&kernel_iface_path).map_err(|e| {\n\n format!(\n\n \"Error while opening file: {:?} to check if we can call setgroups: {}\",\n\n kernel_iface_path, e\n\n )\n\n })?;\n\n file.read(&mut buf[..]).unwrap();\n\n if buf.eq(&ALLOW_READ) {\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n\n }\n\n}\n", "file_path": "src/platform/drop_privileges.rs", "rank": 40, "score": 149204.74881569284 }, { "content": "fn parse_cmdlines(raw_lines: &Vec<(u32, String)>) -> Result<Vec<Commandline>, ParsingErrorReason> {\n\n let mut cmdlines = Vec::new();\n\n for (_line, cmdline) in raw_lines {\n\n cmdlines.push(parse_cmdline(cmdline)?);\n\n }\n\n Ok(cmdlines)\n\n}\n\n\n", "file_path": "src/units/unit_parsing/service_unit.rs", "rank": 41, "score": 148172.36665435162 }, { "content": "pub fn prepare_service(\n\n srvc: &mut Service,\n\n name: &str,\n\n notification_socket_path: &std::path::PathBuf,\n\n) -> Result<(), String> {\n\n // setup socket for notifications from the service\n\n if !notification_socket_path.exists() {\n\n std::fs::create_dir_all(notification_socket_path).unwrap();\n\n }\n\n let daemon_socket_path = notification_socket_path.join(format!(\"{}.notifiy_socket\", &name));\n\n\n\n // NOTIFY_SOCKET\n\n let notify_socket_env_var = if daemon_socket_path.starts_with(\".\") {\n\n let cur_dir = std::env::current_dir().unwrap();\n\n cur_dir.join(&daemon_socket_path)\n\n } else {\n\n daemon_socket_path\n\n };\n\n\n\n if srvc.notifications.is_none() {\n", "file_path": "src/services/prepare_service.rs", "rank": 42, "score": 147972.13751238422 }, { "content": "pub fn deactivate_units(\n\n ids_to_kill: Vec<UnitId>,\n\n killfinal: bool,\n\n run_info: ArcRuntimeInfo,\n\n) -> Result<(), UnitOperationError> {\n\n for id in ids_to_kill {\n\n deactivate_unit(id, killfinal, run_info.clone())?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/units/deactivate.rs", "rank": 43, "score": 145555.20709326718 }, { "content": "pub fn load_all_units(\n\n paths: &[PathBuf],\n\n base_id: &mut u64,\n\n target_unit: &str,\n\n) -> Result<HashMap<UnitId, Unit>, LoadingError> {\n\n let mut service_unit_table = HashMap::new();\n\n let mut socket_unit_table = HashMap::new();\n\n let mut target_unit_table = HashMap::new();\n\n for path in paths {\n\n parse_all_units(\n\n &mut service_unit_table,\n\n &mut socket_unit_table,\n\n &mut target_unit_table,\n\n path,\n\n base_id,\n\n )?;\n\n }\n\n\n\n let mut unit_table = std::collections::HashMap::new();\n\n unit_table.extend(service_unit_table);\n", "file_path": "src/units/loading.rs", "rank": 44, "score": 145555.20709326718 }, { "content": "pub fn deactivate_unit(\n\n id_to_kill: UnitId,\n\n killfinal: bool,\n\n run_info: ArcRuntimeInfo,\n\n) -> Result<(), UnitOperationError> {\n\n let unit = {\n\n let unit_table_locked = run_info.unit_table.read().unwrap();\n\n unit_table_locked.get(&id_to_kill).unwrap().clone()\n\n };\n\n let unit_locked = &mut *unit.lock().unwrap();\n\n\n\n {\n\n let status_table_locked = run_info.status_table.read().unwrap();\n\n let status = status_table_locked.get(&id_to_kill).unwrap();\n\n let status_locked = &mut *status.lock().unwrap();\n\n match *status_locked {\n\n UnitStatus::Started | UnitStatus::StartedWaitingForSocket | UnitStatus::Starting => {\n\n *status_locked = UnitStatus::Stopping;\n\n }\n\n UnitStatus::NeverStarted\n", "file_path": "src/units/deactivate.rs", "rank": 45, "score": 145555.20709326718 }, { "content": "pub fn reactivate_unit(\n\n id_to_restart: UnitId,\n\n run_info: ArcRuntimeInfo,\n\n notification_socket_path: std::path::PathBuf,\n\n eventfds: Arc<Vec<EventFd>>,\n\n) -> std::result::Result<(), UnitOperationError> {\n\n deactivate_unit(id_to_restart, false, run_info.clone())?;\n\n crate::units::activate_unit(\n\n id_to_restart,\n\n run_info,\n\n notification_socket_path,\n\n eventfds,\n\n true,\n\n )\n\n .map(|_| ())\n\n}\n", "file_path": "src/units/deactivate.rs", "rank": 46, "score": 145555.20709326718 }, { "content": "// TODO maybe this should be available everywhere for situations where normally a panic would occur?\n\npub fn shutdown_sequence(run_info: ArcRuntimeInfo) {\n\n std::thread::spawn(move || {\n\n trace!(\"Shutting down\");\n\n trace!(\"Get unit lock\");\n\n\n\n // Here we need to get the locks regardless of posions.\n\n // At least try to shutdown as much as possible as cleanly as possible\n\n let unit_table_locked = match run_info.unit_table.write() {\n\n Ok(lock) => lock,\n\n Err(err) => err.into_inner(),\n\n };\n\n\n\n trace!(\"Kill all units\");\n\n loop {\n\n let id = {\n\n let status_table_locked = match run_info.status_table.write() {\n\n Ok(lock) => lock,\n\n Err(err) => err.into_inner(),\n\n };\n\n if let Some(id) =\n", "file_path": "src/shutdown.rs", "rank": 47, "score": 144068.40679928553 }, { "content": "pub fn wait_for_service(\n\n srvc: &mut Service,\n\n name: &str,\n\n pid_table: ArcMutPidTable,\n\n) -> Result<(), RunCmdError> {\n\n trace!(\n\n \"[FORK_PARENT] Service: {} forked with pid: {}\",\n\n name,\n\n srvc.pid.unwrap()\n\n );\n\n\n\n let start_time = std::time::Instant::now();\n\n let duration_timeout = srvc.get_start_timeout();\n\n match srvc.service_config.srcv_type {\n\n ServiceType::Notify => {\n\n trace!(\n\n \"[FORK_PARENT] Waiting for a notification for service {}\",\n\n name\n\n );\n\n\n", "file_path": "src/services/fork_parent.rs", "rank": 48, "score": 142923.626004698 }, { "content": "#[allow(dead_code)]\n\npub fn prune_units(\n\n target_unit_name: &str,\n\n unit_table: &mut HashMap<UnitId, Unit>,\n\n) -> Result<(), String> {\n\n let mut ids_to_keep = Vec::new();\n\n let startunit = unit_table.values().fold(None, |mut result, unit| {\n\n if unit.conf.name() == target_unit_name {\n\n result = Some(unit.id);\n\n }\n\n result\n\n });\n\n let startunit_id = if let Some(startunit) = startunit {\n\n startunit\n\n } else {\n\n return Err(format!(\"Target unit {} not found\", target_unit_name));\n\n };\n\n\n\n find_needed_units_recursive(startunit_id, unit_table, &mut ids_to_keep);\n\n\n\n let mut ids_to_remove = Vec::new();\n", "file_path": "src/units/dependency_resolving.rs", "rank": 49, "score": 142047.54458426323 }, { "content": "pub fn deactivate_unit_recursive(\n\n id_to_kill: UnitId,\n\n killfinal: bool,\n\n run_info: ArcRuntimeInfo,\n\n) -> Result<(), UnitOperationError> {\n\n let kill_before_this = {\n\n let unit = {\n\n let unit_table_locked = run_info.unit_table.read().unwrap();\n\n unit_table_locked.get(&id_to_kill).unwrap().clone()\n\n };\n\n let unit_locked = &mut *unit.lock().unwrap();\n\n unit_locked.install.required_by.clone()\n\n };\n\n\n\n deactivate_units_recursive(kill_before_this, killfinal, run_info.clone())?;\n\n\n\n deactivate_unit(id_to_kill, killfinal, run_info.clone())\n\n}\n", "file_path": "src/units/deactivate.rs", "rank": 50, "score": 142047.54458426323 }, { "content": "pub fn deactivate_units_recursive(\n\n ids_to_kill: Vec<UnitId>,\n\n killfinal: bool,\n\n run_info: ArcRuntimeInfo,\n\n) -> Result<(), UnitOperationError> {\n\n for id in ids_to_kill {\n\n deactivate_unit_recursive(id, killfinal, run_info.clone())?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/units/deactivate.rs", "rank": 51, "score": 142047.54458426323 }, { "content": "pub fn service_exit_handler(\n\n pid: nix::unistd::Pid,\n\n code: ChildTermination,\n\n run_info: ArcRuntimeInfo,\n\n notification_socket_path: std::path::PathBuf,\n\n eventfds: &[EventFd],\n\n) -> Result<(), String> {\n\n trace!(\"Exit handler with pid: {}\", pid);\n\n\n\n // Handle exiting of helper processes and oneshot processes\n\n {\n\n let pid_table_locked = &mut *run_info.pid_table.lock().unwrap();\n\n let entry = pid_table_locked.get(&pid);\n\n match entry {\n\n Some(entry) => match entry {\n\n PidEntry::Service(_id, _srvctype) => {\n\n // ignore at this point, will be handled below\n\n }\n\n PidEntry::Helper(_id, srvc_name) => {\n\n trace!(\n", "file_path": "src/services/service_exit_handler.rs", "rank": 52, "score": 141898.8050675012 }, { "content": "fn close_all_unneeded_fds(_srvc: &mut Service, _fd_store: &FDStore) {\n\n // This is not really necessary since we mark all fds with FD_CLOEXEC but just to be safe...\n\n // TODO either shift to fd store or delete\n\n //for (id, sock) in srvc.service_config.unwrap().sockets {\n\n // //trace!(\"[FORK_CHILD {}] CLOSE FDS FOR SOCKET: {}\", name, sock.name);\n\n // if !srvc.socket_ids.contains(id) {\n\n // for conf in &sock.sockets {\n\n // match &conf.fd {\n\n // Some(fd) => {\n\n // let fd: i32 = (**fd).as_raw_fd();\n\n // nix::unistd::close(fd).unwrap();\n\n // //trace!(\"[FORK_CHILD {}] DO CLOSE FD: {}\", name, fd);\n\n // }\n\n // None => {\n\n // //this should not happen but if it does its not too bad\n\n // }\n\n // }\n\n // }\n\n // }\n\n //}\n\n}\n\n\n", "file_path": "src/services/fork_child.rs", "rank": 53, "score": 141709.1015936577 }, { "content": "pub fn parse_unit_section(\n\n mut section: ParsedSection,\n\n path: &PathBuf,\n\n) -> Result<UnitConfig, ParsingErrorReason> {\n\n let wants = section.remove(\"WANTS\");\n\n let requires = section.remove(\"REQUIRES\");\n\n let after = section.remove(\"AFTER\");\n\n let before = section.remove(\"BEFORE\");\n\n let description = section.remove(\"DESCRIPTION\");\n\n\n\n if !section.is_empty() {\n\n return Err(ParsingErrorReason::UnusedSetting(\n\n section.keys().next().unwrap().to_owned(),\n\n ));\n\n }\n\n\n\n Ok(UnitConfig {\n\n filepath: path.clone(),\n\n description: description.map(|x| (x[0]).1.clone()).unwrap_or_default(),\n\n wants: map_tupels_to_second(wants.unwrap_or_default()),\n\n requires: map_tupels_to_second(requires.unwrap_or_default()),\n\n after: map_tupels_to_second(after.unwrap_or_default()),\n\n before: map_tupels_to_second(before.unwrap_or_default()),\n\n })\n\n}\n\n\n", "file_path": "src/units/unit_parsing/unit_parser.rs", "rank": 54, "score": 141516.56043803456 }, { "content": "/// Activates a new unit by\n\n/// 1. (not yet but will be) checking the units referenced by this new unit\n\n/// 1. inserting it into the unit_table of run_info\n\n/// 1. activate the unit\n\n/// 1. removing the unit again if the activation fails\n\npub fn insert_new_units(\n\n new_units: HashMap<units::UnitId, units::Unit>,\n\n run_info: units::ArcRuntimeInfo,\n\n) -> Result<(), String> {\n\n // TODO check if new unit only refs existing units\n\n // TODO check if all ref'd units are not failed\n\n {\n\n let unit_table_locked = &mut *run_info.unit_table.write().unwrap();\n\n trace!(\"Check all names exist\");\n\n check_all_names_exist(&new_units, &unit_table_locked)?;\n\n\n\n for (new_id, mut new_unit) in new_units.into_iter() {\n\n trace!(\"Add new unit: {}\", new_unit.conf.name());\n\n // Setup relations of before <-> after / requires <-> requiredby\n\n for unit in unit_table_locked.values() {\n\n let mut unit_locked = unit.lock().unwrap();\n\n let name = unit_locked.conf.name();\n\n let id = unit_locked.id;\n\n if new_unit.conf.after.contains(&name) {\n\n new_unit.install.after.push(id);\n", "file_path": "src/units/insert_new.rs", "rank": 55, "score": 138791.56601388904 }, { "content": "/// Loads a unit with a given name. It searches all pathes recursively until it finds a file with a matching name\n\npub fn load_new_unit(\n\n unit_dirs: &[PathBuf],\n\n find_name: &str,\n\n next_id: u64,\n\n) -> Result<units::Unit, String> {\n\n if let Some(unit_path) = find_new_unit_path(unit_dirs, find_name)? {\n\n let content = fs::read_to_string(&unit_path).map_err(|e| {\n\n format!(\n\n \"{}\",\n\n units::ParsingError::new(\n\n units::ParsingErrorReason::from(Box::new(e)),\n\n unit_path.clone()\n\n )\n\n )\n\n })?;\n\n let parsed = units::parse_file(&content)\n\n .map_err(|e| format!(\"{}\", units::ParsingError::new(e, unit_path.clone())))?;\n\n let unit = if find_name.ends_with(\".service\") {\n\n units::parse_service(\n\n parsed,\n", "file_path": "src/units/insert_new.rs", "rank": 56, "score": 138789.2605239323 }, { "content": "pub fn get_next_call(source: &mut dyn std::io::Read) -> serde_json::Result<Result<Call, String>> {\n\n match serde_json::from_reader(source) {\n\n Ok(v) => {\n\n let v: Value = v;\n\n Ok(Call::from_json(&v))\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "src/control/jsonrpc2.rs", "rank": 57, "score": 138380.84879767895 }, { "content": "pub fn service_exit_handler_new_thread(\n\n pid: nix::unistd::Pid,\n\n code: ChildTermination,\n\n run_info: ArcRuntimeInfo,\n\n notification_socket_path: std::path::PathBuf,\n\n eventfds: Vec<EventFd>,\n\n) {\n\n std::thread::spawn(move || {\n\n if let Err(e) =\n\n service_exit_handler(pid, code, run_info, notification_socket_path, &eventfds)\n\n {\n\n error!(\"{}\", e);\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/services/service_exit_handler.rs", "rank": 58, "score": 136550.10669038352 }, { "content": "fn find_unit_with_name(unit_name: &str, unit_table_locked: &UnitTable) -> Option<Arc<Mutex<Unit>>> {\n\n trace!(\"Find unit for name: {}\", unit_name);\n\n let mut srvc: Vec<_> = unit_table_locked\n\n .values()\n\n .filter(|unit| {\n\n let name = unit.lock().unwrap().conf.name();\n\n unit_name.starts_with(&name) && unit.lock().unwrap().is_service()\n\n })\n\n .cloned()\n\n .collect();\n\n if srvc.len() != 1 {\n\n None\n\n } else {\n\n Some(srvc.remove(0))\n\n }\n\n}\n\n\n", "file_path": "src/control/control.rs", "rank": 59, "score": 136152.37261603534 }, { "content": "fn setup_env_vars(socket_names: Vec<String>, notify_socket_env_var: &str) {\n\n // The following two lines do deadlock after fork and before exec... I would have loved to just use these\n\n // This has probably something to do with the global env_lock() that is being used in the std\n\n // std::env::set_var(\"LISTEN_FDS\", format!(\"{}\", srvc.file_descriptors.len()));\n\n // std::env::set_var(\"LISTEN_PID\", format!(\"{}\", pid));\n\n\n\n // so lets use some unsafe instead, and use the same libc::setenv that the std uses but we dont care about the lock\n\n // This is the only thread in this process that is still running so we dont need any lock\n\n\n\n // TODO Maybe it would be better to have a simple wrapper that we can exec with a few sensible args\n\n // 1. list filedescriptors to keep open (maybe not event that. FD handling can be done here probably?)\n\n // 2. at least the number of fds\n\n // 3. the actual executable that should be run + their args\n\n //\n\n // This wrapper then does:\n\n // 1. Maybe close and dup2 fds\n\n // 2. Set appropriate env variables\n\n // 3. exec the actual executable we are trying to start here\n\n\n\n // This is all just that complicated because systemd promises to pass the correct PID in the env-var LISTEN_PID...\n", "file_path": "src/services/fork_child.rs", "rank": 60, "score": 135635.87540244125 }, { "content": "fn start_service_with_filedescriptors(\n\n srvc: &mut Service,\n\n name: &str,\n\n fd_store: &FDStore,\n\n) -> Result<(), RunCmdError> {\n\n // check if executable even exists\n\n let cmd = std::path::PathBuf::from(&srvc.service_config.exec.cmd);\n\n if !cmd.exists() {\n\n error!(\n\n \"The service {} specified an executable that does not exist: {:?}\",\n\n name, &srvc.service_config.exec.cmd\n\n );\n\n return Err(RunCmdError::SpawnError(\n\n srvc.service_config.exec.cmd.clone(),\n\n format!(\"Executable does not exist\"),\n\n ));\n\n }\n\n if !cmd.is_file() {\n\n error!(\n\n \"The service {} specified an executable that is not a file: {:?}\",\n", "file_path": "src/services/start_service.rs", "rank": 61, "score": 135119.03644429587 }, { "content": "fn prepare_exec_args(srvc: &Service) -> (std::ffi::CString, Vec<std::ffi::CString>) {\n\n let cmd = std::ffi::CString::new(srvc.service_config.exec.cmd.as_str()).unwrap();\n\n\n\n let exec_name = std::path::PathBuf::from(&srvc.service_config.exec.cmd);\n\n let exec_name = exec_name.file_name().unwrap();\n\n let exec_name: Vec<u8> = exec_name.to_str().unwrap().bytes().collect();\n\n let exec_name = std::ffi::CString::new(exec_name).unwrap();\n\n\n\n let mut args = Vec::new();\n\n args.push(exec_name);\n\n\n\n for word in &srvc.service_config.exec.args {\n\n args.push(std::ffi::CString::new(word.as_str()).unwrap());\n\n }\n\n\n\n (cmd, args)\n\n}\n\n\n", "file_path": "src/services/fork_child.rs", "rank": 62, "score": 134607.28813590147 }, { "content": "pub fn make_seqpacket_socket(path: &std::path::PathBuf) -> Result<RawFd, String> {\n\n //let addr_family = nix::sys::socket::AddressFamily::Unix;\n\n //let sock_type = nix::sys::socket::SockType::SeqPacket;\n\n //let flags = nix::sys::socket::SockFlag::empty(); //flags can be set by using the fnctl calls later if necessary\n\n let protocol = 0; // not really important, used to choose protocol but we dont support sockets where thats relevant\n\n\n\n let unix_addr = nix::sys::socket::UnixAddr::new(path).unwrap();\n\n let sock_addr = nix::sys::socket::SockAddr::Unix(unix_addr);\n\n\n\n let fd = unsafe { libc::socket(libc::AF_UNIX, libc::SOCK_SEQPACKET, protocol) };\n\n if fd < 0 {\n\n return Err(format!(\n\n \"Could not opensequential packet socket. Result was: {}\",\n\n fd,\n\n ));\n\n }\n\n // then bind the socket to the path\n\n nix::sys::socket::bind(fd, &sock_addr).unwrap();\n\n // then make the socket an accepting one\n\n nix::sys::socket::listen(fd, 128).unwrap();\n\n\n\n Ok(fd)\n\n}\n", "file_path": "src/platform/unix_common.rs", "rank": 63, "score": 134260.67455497838 }, { "content": "#[cfg(feature = \"cgroups\")]\n\nfn make_cgroup_path(srvc_name: &str) -> Result<PathBuf, ParsingErrorReason> {\n\n let rustysd_cgroup =\n\n crate::platform::cgroups::get_own_freezer(&PathBuf::from(\"/sys/fs/cgroup\"))\n\n .map_err(|e| ParsingErrorReason::Generic(format!(\"Couldnt get own cgroup: {}\", e)))?;\n\n let service_cgroup = rustysd_cgroup.join(srvc_name);\n\n trace!(\n\n \"Service {} will be moved into cgroup: {:?}\",\n\n srvc_name,\n\n service_cgroup\n\n );\n\n Ok(service_cgroup)\n\n}\n\n\n", "file_path": "src/units/unit_parsing/service_unit.rs", "rank": 64, "score": 133808.58453367878 }, { "content": "#[cfg(not(feature = \"cgroups\"))]\n\nfn make_cgroup_path(_srvc_name: &str) -> Result<PathBuf, ParsingErrorReason> {\n\n // doesnt matter, wont be used anyways\n\n Ok(PathBuf::from(\"/ree\"))\n\n}\n\n\n", "file_path": "src/units/unit_parsing/service_unit.rs", "rank": 65, "score": 133808.58453367878 }, { "content": "pub fn after_fork_child(\n\n srvc: &mut Service,\n\n name: &str,\n\n fd_store: &FDStore,\n\n notify_socket_env_var: &str,\n\n new_stdout: RawFd,\n\n new_stderr: RawFd,\n\n) {\n\n if let Err(e) = super::fork_os_specific::post_fork_os_specific(srvc) {\n\n eprintln!(\"[FORK_CHILD {}] postfork error: {}\", name, e);\n\n std::process::exit(1);\n\n }\n\n\n\n // DO NOT USE THE LOGGER HERE. It aquires a global lock which might be held at the time of forking\n\n // But since this is the only thread that is in the child process the lock will never be released!\n\n move_into_new_process_group();\n\n\n\n // no more logging after this point!\n\n // The filedescriptor used by the logger might have been duped to another\n\n // one and logging into that one would be.... bad\n", "file_path": "src/services/fork_child.rs", "rank": 66, "score": 132886.86830404884 }, { "content": "pub fn parse_file(content: &str) -> Result<ParsedFile, ParsingErrorReason> {\n\n let mut sections = HashMap::new();\n\n let lines: Vec<&str> = content.split('\\n').collect();\n\n let lines: Vec<_> = lines.iter().map(|s| s.trim()).collect();\n\n\n\n let mut lines_left = &lines[..];\n\n\n\n // remove lines before the first section\n\n while !lines_left.is_empty() && !lines_left[0].starts_with('[') {\n\n lines_left = &lines_left[1..];\n\n }\n\n let mut current_section_name: String = lines_left[0].into();\n\n let mut current_section_lines = Vec::new();\n\n\n\n lines_left = &lines_left[1..];\n\n\n\n while !lines_left.is_empty() {\n\n let line = lines_left[0];\n\n\n\n if line.starts_with('[') {\n", "file_path": "src/units/unit_parsing/unit_parser.rs", "rank": 67, "score": 132869.42212995279 }, { "content": "fn activate_units_recursive(\n\n ids_to_start: Vec<UnitId>,\n\n run_info: ArcRuntimeInfo,\n\n tpool: ThreadPool,\n\n notification_socket_path: std::path::PathBuf,\n\n eventfds: Arc<Vec<EventFd>>,\n\n errors: Arc<Mutex<Vec<UnitOperationError>>>,\n\n) {\n\n for id in ids_to_start {\n\n let run_info_copy = run_info.clone();\n\n let tpool_copy = tpool.clone();\n\n let note_sock_copy = notification_socket_path.clone();\n\n let eventfds_copy = eventfds.clone();\n\n let errors_copy = errors.clone();\n\n tpool.execute(move || {\n\n let run_info_copy2 = run_info_copy.clone();\n\n let tpool_copy2 = tpool_copy.clone();\n\n let note_sock_copy2 = note_sock_copy.clone();\n\n let eventfds_copy2 = eventfds_copy.clone();\n\n let errors_copy2 = errors_copy.clone();\n", "file_path": "src/units/activate.rs", "rank": 68, "score": 132376.28255902723 }, { "content": "#[cfg(any(target_os = \"linux\", target_os = \"freebsd\"))]\n\npub fn getgrnam_r(groupname: &str) -> Result<GroupEntry, String> {\n\n let username_i8 = groupname.bytes().map(|x| x as i8).collect::<Vec<_>>();\n\n let pointer: *const i8 = username_i8.as_ptr();\n\n let mut buf_size = 32;\n\n let mut group: libc::group = libc::group {\n\n gr_name: std::ptr::null_mut(),\n\n gr_passwd: std::ptr::null_mut(),\n\n gr_gid: 0,\n\n gr_mem: std::ptr::null_mut(),\n\n };\n\n\n\n let group_ptr = &mut group;\n\n let group_ptr_ptr = &mut (group_ptr as *mut libc::group);\n\n loop {\n\n let mut buf = Vec::with_capacity(buf_size);\n\n buf.resize(buf_size, 0i8);\n\n\n\n let errno = unsafe {\n\n libc::getgrnam_r(\n\n pointer,\n", "file_path": "src/platform/grnam.rs", "rank": 69, "score": 131015.34067385248 }, { "content": "#[cfg(any(target_os = \"freebsd\", target_os = \"linux\"))]\n\npub fn getpwnam_r(username: &str) -> Result<PwEntry, String> {\n\n let username_i8 = username.bytes().map(|x| x as i8).collect::<Vec<_>>();\n\n let pointer: *const i8 = username_i8.as_ptr();\n\n let mut buf_size = 32;\n\n let mut user = make_new_pw();\n\n let user_ptr = &mut user;\n\n let user_ptr_ptr = &mut (user_ptr as *mut libc::passwd);\n\n loop {\n\n let mut buf = Vec::with_capacity(buf_size);\n\n buf.resize(buf_size, 0i8);\n\n\n\n let errno = unsafe {\n\n libc::getpwnam_r(pointer, user_ptr, buf.as_mut_ptr(), buf_size, user_ptr_ptr)\n\n };\n\n\n\n if user_ptr_ptr.is_null() {\n\n // error case\n\n if errno == libc::ERANGE {\n\n // need more bytes in buf\n\n buf_size = buf_size * 2;\n", "file_path": "src/platform/pwnam.rs", "rank": 70, "score": 131015.34067385248 }, { "content": "#[cfg(not(any(target_os = \"linux\", target_os = \"freebsd\")))]\n\npub fn getgrnam_r(_groupname: &str) -> Result<GroupEntry, String> {\n\n compile_error!(\"getgrnam_r is not yet implemented for this platform\");\n\n}\n", "file_path": "src/platform/grnam.rs", "rank": 71, "score": 131015.34067385248 }, { "content": "#[cfg(not(any(target_os = \"linux\", target_os = \"freebsd\")))]\n\npub fn getpwnam_r(_username: &str) -> Result<PwEntry, String> {\n\n compile_error!(\"getpwnam_r is not yet implemented for this platform\");\n\n}\n", "file_path": "src/platform/pwnam.rs", "rank": 72, "score": 131015.34067385248 }, { "content": "pub fn sanity_check_dependencies(\n\n unit_table: &HashMap<UnitId, Unit>,\n\n) -> Result<(), SanityCheckError> {\n\n let mut root_ids = Vec::new();\n\n for unit in unit_table.values() {\n\n if unit.install.after.len() == 0 {\n\n root_ids.push(unit.id);\n\n }\n\n }\n\n // check whether there are cycles in the startup sequence\n\n let mut finished_ids = HashMap::new();\n\n let mut not_finished_ids: HashMap<_, _> =\n\n unit_table.keys().copied().map(|id| (id, ())).collect();\n\n let mut circles = Vec::new();\n\n\n\n loop {\n\n //if no nodes left -> no cycles\n\n let root_id = if not_finished_ids.len() == 0 {\n\n break;\n\n } else {\n", "file_path": "src/units/sanity_check.rs", "rank": 73, "score": 128864.32852449661 }, { "content": "fn parse_socket_section(\n\n mut section: ParsedSection,\n\n) -> Result<(String, Vec<String>, Vec<SocketConfig>), ParsingErrorReason> {\n\n let fdname = section.remove(\"FILEDESCRIPTORNAME\");\n\n let services = section.remove(\"SERVICE\");\n\n let streams = section.remove(\"LISTENSTREAM\");\n\n let datagrams = section.remove(\"LISTENDATAGRAM\");\n\n let seqpacks = section.remove(\"LISTENSEQUENTIALPACKET\");\n\n let fifos = section.remove(\"LISTENFIFO\");\n\n\n\n if !section.is_empty() {\n\n return Err(ParsingErrorReason::UnusedSetting(\n\n section.keys().next().unwrap().to_owned(),\n\n ));\n\n }\n\n let fdname = match fdname {\n\n None => None,\n\n Some(mut vec) => {\n\n if vec.len() > 1 {\n\n return Err(ParsingErrorReason::SettingTooManyValues(\n", "file_path": "src/units/unit_parsing/socket_unit.rs", "rank": 74, "score": 127661.22326801396 }, { "content": "pub fn accept_control_connections_unix_socket(\n\n run_info: ArcRuntimeInfo,\n\n notification_socket_path: std::path::PathBuf,\n\n source: std::os::unix::net::UnixListener,\n\n) {\n\n std::thread::spawn(move || loop {\n\n let stream = Box::new(source.accept().unwrap().0);\n\n listen_on_commands(stream, run_info.clone(), notification_socket_path.clone())\n\n });\n\n}\n\n\n", "file_path": "src/control/control.rs", "rank": 75, "score": 127044.67813636376 }, { "content": "fn dup_fds(name: &str, sockets: Vec<RawFd>) -> Result<(), String> {\n\n // start at 3. 0,1,2 are stdin,stdout,stderr\n\n let file_desc_offset = 3;\n\n let mut fd_idx = 0;\n\n\n\n for old_fd in sockets {\n\n let new_fd = file_desc_offset + fd_idx;\n\n let actual_new_fd = if new_fd as i32 != old_fd {\n\n //ignore output. newfd might already be closed.\n\n // TODO check for actual errors other than bad_fd\n\n let _ = nix::unistd::close(new_fd as i32);\n\n let actual_new_fd = nix::unistd::dup2(old_fd, new_fd as i32)\n\n .map_err(|e| format!(\"Error while duping fd: {}\", e))?;\n\n let _ = nix::unistd::close(old_fd as i32);\n\n actual_new_fd\n\n } else {\n\n new_fd\n\n };\n\n if new_fd != actual_new_fd {\n\n panic!(\n", "file_path": "src/services/fork_child.rs", "rank": 76, "score": 126862.27439455799 }, { "content": "pub fn make_result_response(id: Option<Value>, result: Value) -> Value {\n\n let mut response = serde_json::Map::new();\n\n response.insert(\"jsonrpc\".into(), \"2.0\".into());\n\n response.insert(\"result\".into(), result);\n\n if let Some(id) = id {\n\n response.insert(\"id\".into(), id);\n\n }\n\n\n\n Value::Object(response)\n\n}\n\n\n\npub const PARSE_ERROR: i64 = -32700;\n\npub const INVALID_REQUEST_ERROR: i64 = -32600;\n\npub const METHOD_NOT_FOUND_ERROR: i64 = -32601;\n\npub const INVALID_PARAMS_ERROR: i64 = -32602;\n\npub const SERVER_ERROR: i64 = -32000;\n\n\n\n// not needed right now\n\n#[allow(dead_code)]\n\npub const INTERNAL_ERROR: i64 = -32603;\n\n\n\npub struct Error {\n\n code: i64,\n\n message: String,\n\n data: Option<Value>,\n\n}\n\n\n", "file_path": "src/control/jsonrpc2.rs", "rank": 77, "score": 124044.99935074673 }, { "content": "pub fn make_error_response(id: Option<Value>, error: Error) -> Value {\n\n let mut json_err = serde_json::Map::new();\n\n json_err.insert(\n\n \"code\".into(),\n\n Value::Number(serde_json::Number::from(error.code)),\n\n );\n\n json_err.insert(\"message\".into(), Value::String(error.message.clone()));\n\n\n\n if let Some(data) = error.data {\n\n json_err.insert(\"data\".into(), data.clone());\n\n }\n\n\n\n let mut response = serde_json::Map::new();\n\n response.insert(\"jsonrpc\".into(), \"2.0\".into());\n\n response.insert(\"error\".into(), Value::Object(json_err));\n\n if let Some(id) = id {\n\n response.insert(\"id\".into(), id);\n\n }\n\n\n\n Value::Object(response)\n\n}\n\n\n", "file_path": "src/control/jsonrpc2.rs", "rank": 78, "score": 124044.99935074673 }, { "content": "pub fn parse_section(lines: &[&str]) -> ParsedSection {\n\n let mut entries: ParsedSection = HashMap::new();\n\n\n\n let mut entry_number = 0;\n\n for line in lines {\n\n //ignore comments\n\n if line.starts_with('#') {\n\n continue;\n\n }\n\n\n\n //check if this is a key value pair\n\n let pos = if let Some(pos) = line.find(|c| c == '=') {\n\n pos\n\n } else {\n\n continue;\n\n };\n\n let (name, value) = line.split_at(pos);\n\n\n\n let value = value.trim_start_matches('=');\n\n let value = value.trim();\n", "file_path": "src/units/unit_parsing/unit_parser.rs", "rank": 79, "score": 122792.24113964441 }, { "content": "pub fn get_file_list(path: &PathBuf) -> Result<Vec<std::fs::DirEntry>, ParsingErrorReason> {\n\n if !path.exists() {\n\n return Err(ParsingErrorReason::Generic(format!(\n\n \"Path to services does not exist: {:?}\",\n\n path\n\n )));\n\n }\n\n if !path.is_dir() {\n\n return Err(ParsingErrorReason::Generic(format!(\n\n \"Path to services does not exist: {:?}\",\n\n path\n\n )));\n\n }\n\n let mut files: Vec<_> = match std::fs::read_dir(path) {\n\n Ok(iter) => {\n\n let files_vec = iter.fold(Ok(Vec::new()), |acc, file| {\n\n if let Ok(mut files) = acc {\n\n match file {\n\n Ok(f) => {\n\n files.push(f);\n", "file_path": "src/units/unit_parsing/unit_parser.rs", "rank": 80, "score": 117536.49922240071 }, { "content": "fn parse_cmdline(raw_line: &str) -> Result<Commandline, ParsingErrorReason> {\n\n let mut split = shlex::split(raw_line).ok_or(ParsingErrorReason::Generic(format!(\n\n \"Could not parse cmdline: {}\",\n\n raw_line\n\n )))?;\n\n let mut cmd = split.remove(0);\n\n\n\n let mut prefixes = Vec::new();\n\n loop {\n\n let prefix = match &cmd[..1] {\n\n \"-\" => {\n\n cmd = cmd[1..].to_owned();\n\n CommandlinePrefix::Minus\n\n }\n\n \"+\" => {\n\n return Err(ParsingErrorReason::UnsupportedSetting(\n\n \"The prefix '+' for cmdlines is currently not supported\".into(),\n\n ));\n\n //cmd = cmd[1..].to_owned();\n\n //CommandlinePrefix::Plus\n", "file_path": "src/units/unit_parsing/service_unit.rs", "rank": 81, "score": 117267.299395213 }, { "content": "pub fn make_error(code: i64, message: String, data: Option<Value>) -> Error {\n\n Error {\n\n code,\n\n message,\n\n data,\n\n }\n\n}\n\n\n", "file_path": "src/control/jsonrpc2.rs", "rank": 82, "score": 116239.42704445726 }, { "content": "fn cleanup_removed_ids(\n\n units: &mut std::collections::HashMap<UnitId, Unit>,\n\n removed_ids: &Vec<UnitId>,\n\n) {\n\n for unit in units.values_mut() {\n\n for id in removed_ids {\n\n while let Some(idx) = unit.install.after.iter().position(|el| *el == *id) {\n\n unit.install.after.remove(idx);\n\n }\n\n while let Some(idx) = unit.install.before.iter().position(|el| *el == *id) {\n\n unit.install.before.remove(idx);\n\n }\n\n while let Some(idx) = unit.install.wants.iter().position(|el| *el == *id) {\n\n unit.install.wants.remove(idx);\n\n }\n\n while let Some(idx) = unit.install.requires.iter().position(|el| *el == *id) {\n\n unit.install.requires.remove(idx);\n\n }\n\n while let Some(idx) = unit.install.wanted_by.iter().position(|el| *el == *id) {\n\n unit.install.wanted_by.remove(idx);\n\n }\n\n while let Some(idx) = unit.install.required_by.iter().position(|el| *el == *id) {\n\n unit.install.required_by.remove(idx);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/units/loading.rs", "rank": 83, "score": 112596.96489309613 }, { "content": "fn use_v2(cgroup_path: &std::path::PathBuf) -> bool {\n\n let freeze_file = cgroup_path.join(\"cgroup.freeze\");\n\n let exists = freeze_file.exists();\n\n trace!(\"{:?} exists: {}\", freeze_file, exists);\n\n exists\n\n}\n\n\n\nconst OWN_CGROUP_NAME: &str = \"rustysd_self\";\n\n\n", "file_path": "src/platform/cgroups/mod.rs", "rank": 84, "score": 110950.88350509008 }, { "content": "enum WaitResult {\n\n TimedOut,\n\n InTime(std::io::Result<crate::signal_handler::ChildTermination>),\n\n}\n\n\n", "file_path": "src/services/services.rs", "rank": 85, "score": 108400.34298089647 }, { "content": "/// This sequence should drop all privileges the root process might have had. I think this is how systemd does it too.\n\n/// They additionally have some checking if setgroups is possible\n\n///\n\n/// I dont think this needs to explicitly drop any capabilities on linux. At least thats how I understood the man page\n\npub fn drop_privileges(gid: Gid, supp_gids: &Vec<Gid>, uid: Uid) -> Result<(), String> {\n\n setresgid(gid, gid, gid).map_err(|e| format!(\"Error while setting groupid: {}\", e))?;\n\n maybe_set_groups(supp_gids)?;\n\n setresuid(uid, uid, uid).map_err(|e| format!(\"Error while setting userid: {}\", e))?;\n\n Ok(())\n\n}\n\n\n\nconst ALLOW_READ: [u8; 5] = [b'a', b'l', b'l', b'o', b'w'];\n\n\n", "file_path": "src/platform/drop_privileges.rs", "rank": 86, "score": 108184.63143803498 }, { "content": "fn parse_timeout(descr: &str) -> Timeout {\n\n if descr.to_uppercase() == \"INFINITY\" {\n\n Timeout::Infinity\n\n } else {\n\n match descr.parse::<u64>() {\n\n Ok(secs) => Timeout::Duration(std::time::Duration::from_secs(secs)),\n\n Err(_) => {\n\n let mut sum = 0;\n\n let split = descr.split(' ').collect::<Vec<_>>();\n\n for t in &split {\n\n if t.ends_with(\"min\") {\n\n let mins = t[0..t.len() - 3].parse::<u64>().unwrap();\n\n sum += mins * 60;\n\n } else if t.ends_with(\"hrs\") {\n\n let hrs = t[0..t.len() - 3].parse::<u64>().unwrap();\n\n sum += hrs * 60 * 60;\n\n } else if t.ends_with(\"s\") {\n\n let secs = t[0..t.len() - 1].parse::<u64>().unwrap();\n\n sum += secs;\n\n }\n\n }\n\n Timeout::Duration(std::time::Duration::from_secs(sum))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/units/unit_parsing/service_unit.rs", "rank": 87, "score": 107291.80655138563 }, { "content": "pub fn handle_signals(\n\n signals: Signals,\n\n run_info: ArcRuntimeInfo,\n\n notification_socket_path: std::path::PathBuf,\n\n eventfds: Vec<EventFd>,\n\n) {\n\n loop {\n\n // Pick up new signals\n\n for signal in signals.forever() {\n\n match signal as libc::c_int {\n\n signal_hook::SIGCHLD => {\n\n std::iter::from_fn(get_next_exited_child)\n\n .take_while(Result::is_ok)\n\n .for_each(|val| {\n\n let note_sock_path = notification_socket_path.clone();\n\n let eventfds_clone = eventfds.clone();\n\n let run_info_clone = run_info.clone();\n\n match val {\n\n Ok((pid, code)) => services::service_exit_handler_new_thread(\n\n pid,\n", "file_path": "src/signal_handler.rs", "rank": 88, "score": 105697.36962830494 }, { "content": "pub fn execute_command(\n\n cmd: Command,\n\n run_info: ArcRuntimeInfo,\n\n notification_socket_path: std::path::PathBuf,\n\n) -> Result<serde_json::Value, String> {\n\n let mut result_vec = Value::Array(Vec::new());\n\n match cmd {\n\n Command::Shutdown => {\n\n crate::shutdown::shutdown_sequence(run_info);\n\n }\n\n Command::Restart(unit_name) => {\n\n let id = if let Some(unit) =\n\n find_unit_with_name(&unit_name, &*run_info.unit_table.read().unwrap())\n\n {\n\n unit.lock().unwrap().id\n\n } else {\n\n return Err(format!(\"No unit found with name: {}\", unit_name));\n\n };\n\n\n\n crate::units::reactivate_unit(\n", "file_path": "src/control/control.rs", "rank": 89, "score": 105697.36962830494 }, { "content": "fn find_new_unit_path(unit_dirs: &[PathBuf], find_name: &str) -> Result<Option<PathBuf>, String> {\n\n for dir in unit_dirs {\n\n for entry in\n\n fs::read_dir(dir).map_err(|e| format!(\"Error while opening dir {:?}: {}\", dir, e))?\n\n {\n\n let entry = entry.unwrap();\n\n let meta = entry.metadata().unwrap();\n\n if meta.file_type().is_file() {\n\n if entry.file_name() == find_name {\n\n return Ok(Some(entry.path()));\n\n }\n\n }\n\n if meta.file_type().is_dir() {\n\n if let Some(p) = find_new_unit_path(&[entry.path()], find_name)? {\n\n return Ok(Some(p));\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(None)\n\n}\n\n\n", "file_path": "src/units/insert_new.rs", "rank": 90, "score": 105625.7215467683 }, { "content": "fn add_sock_srvc_relations(\n\n srvc_id: UnitId,\n\n srvc_install: &mut Install,\n\n sock_id: UnitId,\n\n sock_install: &mut Install,\n\n) {\n\n srvc_install.after.push(sock_id);\n\n srvc_install.requires.push(sock_id);\n\n sock_install.before.push(srvc_id);\n\n sock_install.required_by.push(srvc_id);\n\n}\n\n\n", "file_path": "src/units/dependency_resolving.rs", "rank": 91, "score": 105491.2378515259 }, { "content": "pub fn map_tupels_to_second<X, Y: Clone>(v: Vec<(X, Y)>) -> Vec<Y> {\n\n v.iter().map(|(_, scnd)| scnd.clone()).collect()\n\n}\n\n\n", "file_path": "src/units/unit_parsing/unit_parser.rs", "rank": 92, "score": 103301.98534605448 }, { "content": "/// kill all processes that are currently in this cgroup.\n\n/// You should use wait_frozen before or make in another way sure\n\n/// there are no more processes spawned while killing\n\npub fn kill_cgroup(\n\n cgroup_path: &std::path::PathBuf,\n\n sig: nix::sys::signal::Signal,\n\n) -> Result<(), CgroupError> {\n\n // TODO figure out how to freeze a cgroup so no new processes can be spawned while killing\n\n let pids = get_all_procs(cgroup_path)?;\n\n for pid in &pids {\n\n nix::sys::signal::kill(*pid, sig).map_err(|e| CgroupError::NixErr(e))?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/platform/cgroups/mod.rs", "rank": 93, "score": 103253.7832726845 }, { "content": "/// retrieve all pids that are currently in this cgroup\n\npub fn get_all_procs(\n\n cgroup_path: &std::path::PathBuf,\n\n) -> Result<Vec<nix::unistd::Pid>, CgroupError> {\n\n let mut pids = Vec::new();\n\n let cgroup_procs = cgroup_path.join(\"cgroup.procs\");\n\n let mut f = fs::File::open(&cgroup_procs)\n\n .map_err(|e| CgroupError::IOErr(e, format!(\"{:?}\", cgroup_procs)))?;\n\n let mut buf = String::new();\n\n f.read_to_string(&mut buf)\n\n .map_err(|e| CgroupError::IOErr(e, format!(\"{:?}\", cgroup_procs)))?;\n\n\n\n for pid_str in buf.split('\\n') {\n\n if pid_str.len() == 0 {\n\n break;\n\n }\n\n if let Ok(pid) = pid_str.parse::<i32>() {\n\n pids.push(nix::unistd::Pid::from_raw(pid));\n\n }\n\n }\n\n Ok(pids)\n\n}\n\n\n", "file_path": "src/platform/cgroups/mod.rs", "rank": 94, "score": 103250.1867349936 }, { "content": "#[allow(dead_code)]\n\npub fn disable_controllers(\n\n cgroup_path: &std::path::PathBuf,\n\n controllers: &Vec<String>,\n\n) -> Result<(), CgroupError> {\n\n let cgroup_subtreectl = cgroup_path.join(\"cgroup.subtree_control\");\n\n let mut f = fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(&cgroup_subtreectl)\n\n .map_err(|e| CgroupError::IOErr(e, format!(\"{:?}\", cgroup_subtreectl)))?;\n\n\n\n let mut buf = String::new();\n\n for ctl in controllers {\n\n buf.push_str(\" -\");\n\n buf.push_str(&ctl);\n\n }\n\n f.write_all(buf.as_bytes())\n\n .map_err(|e| CgroupError::IOErr(e, format!(\"{:?}\", cgroup_subtreectl)))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/platform/cgroups/cgroup2.rs", "rank": 95, "score": 103250.1867349936 }, { "content": "#[allow(dead_code)]\n\npub fn enable_controllers(\n\n cgroup_path: &std::path::PathBuf,\n\n controllers: &Vec<String>,\n\n) -> Result<(), CgroupError> {\n\n let cgroup_subtreectl = cgroup_path.join(\"cgroup.subtree_control\");\n\n let mut f = fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(&cgroup_subtreectl)\n\n .map_err(|e| CgroupError::IOErr(e, format!(\"{:?}\", cgroup_subtreectl)))?;\n\n\n\n let mut buf = String::new();\n\n for ctl in controllers {\n\n buf.push_str(\" +\");\n\n buf.push_str(&ctl);\n\n }\n\n f.write_all(buf.as_bytes())\n\n .map_err(|e| CgroupError::IOErr(e, format!(\"{:?}\", cgroup_subtreectl)))?;\n\n Ok(())\n\n}\n\n\n\n/// disable controllers for child-cgroups\n", "file_path": "src/platform/cgroups/cgroup2.rs", "rank": 96, "score": 103250.1867349936 }, { "content": "#[derive(Debug)]\n\nenum SettingValue {\n\n Str(String),\n\n Array(Vec<SettingValue>),\n\n Boolean(bool),\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 97, "score": 102874.05041126374 }, { "content": "/// move a process into the cgroup. In rustysd the child process will call move_self for convenience\n\npub fn move_pid_to_cgroup(\n\n cgroup_path: &std::path::PathBuf,\n\n pid: nix::unistd::Pid,\n\n) -> Result<(), CgroupError> {\n\n let cgroup_procs = cgroup_path.join(\"cgroup.procs\");\n\n\n\n let mut f = fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(&cgroup_procs)\n\n .map_err(|e| CgroupError::IOErr(e, format!(\"{:?}\", cgroup_procs)))?;\n\n\n\n let pid_str = pid.as_raw().to_string();\n\n f.write(pid_str.as_bytes())\n\n .map_err(|e| CgroupError::IOErr(e, format!(\"{:?}\", cgroup_procs)))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/platform/cgroups/cgroup1.rs", "rank": 98, "score": 100993.98394143296 } ]
Rust
tests/roundtrip.rs
rrbutani/tower-web-protobuf
6787af73a44f5d4873d58dced0cbe11805cf7767
mod common; use common::*; use reqwest::{Client, StatusCode}; use std::io::Read; use std::net::SocketAddr; use tower_web_protobuf::MessagePlus; #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Format { Protobuf, Json, } impl Format { fn get_header(self) -> &'static str { match self { Format::Protobuf => "application/protobuf", Format::Json => "application/json", } } fn encode<M: MessagePlus>(self, data: &M) -> Vec<u8> { match self { Format::Protobuf => { let mut buf = Vec::with_capacity(data.encoded_len()); data.encode(&mut buf).unwrap(); buf } Format::Json => serde_json::to_vec_pretty(&data).unwrap(), } } fn decode<M: MessagePlus>(self, data: &[u8]) -> M { match self { Format::Protobuf => M::decode(data).unwrap(), Format::Json => serde_json::from_slice(data).unwrap(), } } } fn identity_test<T: MessagePlus + PartialEq + Clone>( uri: String, send: Format, receive: Format, socket: SocketAddr, ) -> impl Fn(T) { move |data: T| { let mut buf = Vec::with_capacity(data.encoded_len()); data.encode(&mut buf).unwrap(); let mut resp = Client::new() .get(format!("http://{}:{}{}", socket.ip(), socket.port(), uri).as_str()) .header("Content-Type", send.get_header()) .header("Accept", receive.get_header()) .body(send.encode(&data)) .send() .unwrap(); assert_eq!(resp.status(), StatusCode::OK); assert_eq!( resp.headers() .get("Content-Type") .unwrap() .to_str() .unwrap(), receive.get_header() ); let mut buf = Vec::new(); assert!(resp.read_to_end(&mut buf).is_ok()); assert_eq!(data, receive.decode(&mut buf)); } } #[test] fn identity_tests() { run_service_test((true, true), |socket| { use Format::*; const FORMATS: [Format; 2] = [Json, Protobuf]; fn endpoint_test<T: MessagePlus + PartialEq + Clone>( endpoint: &'static str, socket: SocketAddr, val: T, ) { FORMATS.iter().for_each(|inp| { FORMATS .iter() .for_each(|out| identity_test(endpoint.into(), *inp, *out, socket)(val.clone())) }); } endpoint_test( "/identity/track/", *socket, Track { name: "4′33″".into(), length: (4.0 * 60.0 + 33.333), number: 1, id: 0, }, ); endpoint_test( "/identity/album/", *socket, Album { name: "In Colour".into(), id: 2015, album_type: 2, tracks: vec![ Track { name: "Sleep Sound".into(), length: (3 * 60 + 52) as f32, number: 2, id: 947, }, Track { name: "Loud Places".into(), length: (4 * 60 + 43) as f32, number: 8, id: 1056, }, ], }, ); }); }
mod common; use common::*; use reqwest::{Client, StatusCode}; use std::io::Read; use std::net::SocketAddr; use tower_web_protobuf::MessagePlus; #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Format { Protobuf, Json, } impl Format { fn get_header(self) -> &'static str { match self { Format::Protobuf => "application/protobuf", Format::Json => "application/json", } } fn encode<M: MessagePlus>(self, data: &M) -> Vec<u8> { match self { Format::Protobuf => { let mut buf = Vec::with_capacity(data.encoded_len()); data.encode(&mut buf).unwrap(); buf } Format::Json => serde_json::to_vec_pretty(&data).unwrap(), } } fn decode<M: MessagePlus>(self, data: &[u8]) -> M { match self { Format::Protobuf => M::decode(data).unwrap(), Format::Json => serde_json::from_slice(data).unwrap(), } } } fn identity_test<T: MessagePlus + PartialEq + Clone>( uri: String, send: Format, receive: Format, socket: SocketAddr, ) -> impl Fn(T) { move |data: T| { let mut buf = Vec::with_capacity(data.encoded_len()); data.encode(&mut buf).unwrap(); let mut resp = Client::new() .get(format!("http://{}:{}{}", socket.ip(), socket.port(), uri).as_str()) .header("Content-Type", send.get_header()) .header("Accept", receive.get_header()) .body(send.encode(&data)) .send() .unwrap(); assert_eq!(resp.status(), StatusCode::OK); assert_eq!( resp.headers() .get("Content-Type") .unwrap() .to_str() .unwrap(), receive.get_header() ); let mut buf = Vec::new(); assert!(resp.read_to_end(&mut buf).is_ok()); assert_eq!(data, receive.decode(&mut buf)); } } #[test] fn identity_tests() { run_service_test((true, true),
id: 0, }, ); endpoint_test( "/identity/album/", *socket, Album { name: "In Colour".into(), id: 2015, album_type: 2, tracks: vec![ Track { name: "Sleep Sound".into(), length: (3 * 60 + 52) as f32, number: 2, id: 947, }, Track { name: "Loud Places".into(), length: (4 * 60 + 43) as f32, number: 8, id: 1056, }, ], }, ); }); }
|socket| { use Format::*; const FORMATS: [Format; 2] = [Json, Protobuf]; fn endpoint_test<T: MessagePlus + PartialEq + Clone>( endpoint: &'static str, socket: SocketAddr, val: T, ) { FORMATS.iter().for_each(|inp| { FORMATS .iter() .for_each(|out| identity_test(endpoint.into(), *inp, *out, socket)(val.clone())) }); } endpoint_test( "/identity/track/", *socket, Track { name: "4′33″".into(), length: (4.0 * 60.0 + 33.333), number: 1,
function_block-random_span
[ { "content": "fn setup(options: (bool, bool), socket: &SocketAddr) {\n\n ServiceBuilder::new()\n\n .resource(MusicService::new())\n\n .middleware(ProtobufMiddleware::new(options.0, options.1))\n\n .run(&socket)\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/common.rs", "rank": 1, "score": 117301.4242231852 }, { "content": "fn proto_files_in_dir(dir: &'static str) -> Vec<PathBuf> {\n\n fs::read_dir(Path::new(dir))\n\n .unwrap()\n\n .filter_map(|f| f.ok())\n\n .filter(|f| f.path().extension().is_some())\n\n .filter(|f| f.path().extension().unwrap() == \"proto\")\n\n .map(|f| f.path())\n\n .collect()\n\n}\n\n\n", "file_path": "build.rs", "rank": 3, "score": 108466.42653293433 }, { "content": "pub fn run_service_test(\n\n options: (bool, bool),\n\n test_fn: impl FnOnce(&SocketAddr) + panic::UnwindSafe,\n\n) {\n\n // Setup:\n\n let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));\n\n let socket = SocketAddr::new(ip, get_next_unused_port().unwrap());\n\n\n\n std::thread::spawn(move || setup(options, &socket));\n\n\n\n let result = panic::catch_unwind(|| test_fn(&socket));\n\n\n\n // <teardown>\n\n\n\n assert!(result.is_ok())\n\n}\n", "file_path": "tests/common.rs", "rank": 4, "score": 83169.28194928638 }, { "content": "fn get_next_unused_port() -> Option<u16> {\n\n (1025..65535).find(|p| TcpListener::bind((\"127.0.0.1\", *p)).is_ok())\n\n}\n\n\n", "file_path": "tests/common.rs", "rank": 5, "score": 69850.80350796603 }, { "content": "fn parse_headers<T>(\n\n request: &HttpRequest<T>,\n\n header: HeaderName,\n\n allow_json: bool,\n\n) -> MessageStrategy {\n\n let content_type_headers = request.headers().get_all(header);\n\n\n\n // We're going to be strict about having the right header for JSON:\n\n let json = content_type_headers.iter().any(|h| h == \"application/json\");\n\n\n\n // But somewhat lenient for Protobufs since there isn't an official\n\n // thing. We'll take: \"application/[x-]protobuf[; <message type>]\".\n\n let (proto, name) = content_type_headers\n\n .iter()\n\n .filter_map(|h| match h.to_str() {\n\n Ok(x) => {\n\n let p = x.starts_with(\"application/protobuf\")\n\n || x.starts_with(\"application/x-protobuf\");\n\n\n\n let pair = (p, if p { x.split(';').next() } else { None });\n", "file_path": "src/middleware/protobuf/service.rs", "rank": 7, "score": 53964.037491799085 }, { "content": "//! Some auxiliary stuff for end-to-end tests:\n\n\n\nuse std::collections::HashMap;\n\nuse std::net::{IpAddr, Ipv4Addr, SocketAddr, TcpListener};\n\nuse std::panic;\n\nuse std::sync::Mutex;\n\n\n\nuse ::prost::{Enumeration, Message};\n\nuse tower_web::ServiceBuilder;\n\nuse tower_web::{impl_web, Deserialize, Serialize, *};\n\nuse tower_web_protobuf::{Proto, ProtobufMiddleware};\n\n\n\n// Messages:\n\n\n\n#[derive(Clone, PartialEq, Message, Serialize, Deserialize)]\n\npub struct Track {\n\n #[prost(string, tag = \"1\")]\n\n pub name: String,\n\n #[prost(float)]\n\n pub length: f32,\n", "file_path": "tests/common.rs", "rank": 8, "score": 50233.157288217866 }, { "content": " Single = 0,\n\n Ep = 1,\n\n Lp = 2,\n\n Playlist = 3, // 🙄\n\n}\n\n\n\n// A silly service:\n\n\n\n#[derive(Debug)]\n\npub struct MusicService {\n\n db: Mutex<HashMap<String, Album>>,\n\n}\n\n\n\nimpl MusicService {\n\n fn new() -> Self {\n\n Self {\n\n db: Mutex::new(HashMap::new()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/common.rs", "rank": 9, "score": 50227.97908659108 }, { "content": " #[prost(uint32)]\n\n pub number: u32,\n\n #[prost(uint32)]\n\n pub id: u32,\n\n}\n\n\n\n#[derive(Clone, PartialEq, Message, Serialize, Deserialize)]\n\npub struct Album {\n\n #[prost(string, tag = \"1\")]\n\n pub name: String,\n\n #[prost(uint32)]\n\n pub id: u32,\n\n #[prost(enumeration = \"AlbumType\")]\n\n pub album_type: i32,\n\n #[prost(message, repeated)]\n\n pub tracks: Vec<Track>,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Enumeration, Serialize, Deserialize)]\n\npub enum AlbumType {\n", "file_path": "tests/common.rs", "rank": 10, "score": 50227.9639829435 }, { "content": " fn get_album(&self, album_name: String) -> Res<Album, String> {\n\n self.db.lock()\n\n .unwrap()\n\n .get(&album_name)\n\n .map(|a| a.clone().into())\n\n .ok_or(\"No such album found!\".into())\n\n }\n\n\n\n #[get(\"/track/length/\")]\n\n fn track_length(&self, track: In<Track>) -> Result<String, ()> {\n\n Ok(format!(\"{}\", track.length))\n\n }\n\n }\n\n}\n\n\n\n// Some handy helper functions:\n\n\n", "file_path": "tests/common.rs", "rank": 11, "score": 50226.24908854745 }, { "content": "//! Protobuf middleware.\n\n//!\n\n//! Parses protobuf messages coming in and encodes protobuf messages going out.\n\n//! Note that this piece of middleware only reads and sets headers; it should\n\n//! be used with the protobuf Extractor and Serializer.\n\n\n\nmod middleware;\n\nmod service;\n\n\n\n// pub use self::middleware::ProtobufMiddleware;\n\npub use self::middleware::ProtobufMiddleware;\n\npub use self::service::{Config, ProtobufService};\n", "file_path": "src/middleware/protobuf/mod.rs", "rank": 12, "score": 45592.529881086906 }, { "content": "type In<M> = Proto<M>;\n", "file_path": "tests/common.rs", "rank": 13, "score": 43012.11802629342 }, { "content": "fn main() {\n\n prost_build::Config::new()\n\n .type_attribute(\".\", \"#[derive(Serialize, Deserialize)]\")\n\n .type_attribute(\".\", \"#[serde(rename_all = \\\"snake_case\\\")]\")\n\n .type_attribute(\".\", \"#[serde(deny_unknown_fields)]\")\n\n .compile_protos(\n\n proto_files_in_dir(MESSAGE_DIR).as_slice(),\n\n &[MESSAGE_DIR.into()],\n\n )\n\n .unwrap();\n\n}\n", "file_path": "build.rs", "rank": 14, "score": 41227.15092794763 }, { "content": "type Out<M> = Result<Proto<M>, ()>;\n", "file_path": "tests/common.rs", "rank": 15, "score": 41048.34345565732 }, { "content": "#[inline]\n\nfn serialize_proto<M: MessagePlus>(message: &Proto<M>) -> Result<BytesMut, Error> {\n\n let mut buf = BytesMut::with_capacity(message.encoded_len());\n\n\n\n message\n\n .encode(&mut buf)\n\n .map_err(|err| {\n\n Error::new(\n\n &format!(\"{}\", err),\n\n \"Serialization Error: Insufficient Capacity\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n )\n\n })\n\n .map(|_| buf)\n\n}\n\n\n\nimpl<M> Response for Proto<M>\n\nwhere\n\n M: MessagePlus,\n\n{\n\n type Buf = Cursor<Bytes>;\n", "file_path": "src/response/proto_response.rs", "rank": 16, "score": 37789.22779850257 }, { "content": "pub fn main() {\n\n let addr = \"127.0.0.1:8080\".parse().expect(\"Invalid address\");\n\n\n\n ServiceBuilder::new()\n\n .resource(HelloWorld)\n\n // (true, true) permits JSON decoding (requests) and encoding\n\n // (responses)\n\n .middleware(ProtobufMiddleware::new(true, true))\n\n .run(&addr)\n\n .unwrap();\n\n}\n", "file_path": "examples/identity.rs", "rank": 17, "score": 37249.241033446364 }, { "content": "pub fn main() {\n\n let addr = \"127.0.0.1:8080\".parse().expect(\"Invalid address\");\n\n println!(\"Listening on http://{}\", addr);\n\n\n\n ServiceBuilder::new()\n\n .resource(HelloWorld)\n\n .middleware(ProtobufMiddleware::new(true, true))\n\n .run(&addr)\n\n .unwrap();\n\n}\n", "file_path": "examples/endpoints.rs", "rank": 18, "score": 37249.241033446364 }, { "content": "type Res<M, E> = Result<Proto<M>, E>;\n\n\n\nimpl_web! {\n\n impl MusicService {\n\n #[get(\"/identity/track/\")]\n\n fn track_ident(&self, track: In<Track>) -> Out<Track> { Ok(track) }\n\n\n\n #[get(\"/identity/album/\")]\n\n fn album_ident(&self, album: In<Album>) -> Out<Album> { Ok(album) }\n\n\n\n #[post(\"/add/album/\")]\n\n fn add_album(&self, album: In<Album>) -> Res<Album, String> {\n\n self.db.lock()\n\n .unwrap()\n\n .insert(album.name.clone(), album.move_inner())\n\n .map(|a| a.into())\n\n .ok_or(\"This is a new album!\".into())\n\n }\n\n\n\n #[get(\"/query/album/:album_name\")]\n", "file_path": "tests/common.rs", "rank": 19, "score": 36103.31156138962 }, { "content": "//! Types and friends used to record internal state tied to incoming requests.\n\n\n\nuse std::ops::Deref;\n\n\n\n#[derive(Clone, Debug)]\n\npub(crate) enum MessageStrategy {\n\n NamedProto(String),\n\n Proto,\n\n Json,\n\n Plaintext,\n\n}\n\n\n\nimpl MessageStrategy {\n\n pub(crate) fn content_type(&self) -> &'static str {\n\n use MessageStrategy::*;\n\n\n\n match *self {\n\n NamedProto(_) | Proto => \"application/protobuf\",\n\n Json => \"application/json\",\n\n Plaintext => \"text/plain\",\n", "file_path": "src/extensions/mod.rs", "rank": 25, "score": 24137.913262268947 }, { "content": " }\n\n }\n\n}\n\n\n\n// Extensions rely on TypeIds and simple aliases appear to have the same TypeId\n\n// as the things they alias, so we'll (ab)use the newtype pattern:\n\n#[derive(Clone, Debug)]\n\npub(crate) struct MessageParseStrategy(pub MessageStrategy);\n\n\n\nimpl Deref for MessageParseStrategy {\n\n type Target = MessageStrategy;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Default for MessageParseStrategy {\n\n fn default() -> Self {\n\n Self(MessageStrategy::Plaintext)\n", "file_path": "src/extensions/mod.rs", "rank": 26, "score": 24129.560206213126 }, { "content": " }\n\n}\n\n\n\nimpl From<MessageStrategy> for MessageParseStrategy {\n\n fn from(strat: MessageStrategy) -> Self {\n\n Self(strat)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub(crate) struct MessageEncodeStrategy(pub MessageStrategy);\n\n\n\nimpl Deref for MessageEncodeStrategy {\n\n type Target = MessageStrategy;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "src/extensions/mod.rs", "rank": 27, "score": 24129.089713716636 }, { "content": "//! Some handy Middleware.\n\n\n\npub mod protobuf;\n\n\n\npub use protobuf::ProtobufMiddleware;\n", "file_path": "src/middleware/mod.rs", "rank": 28, "score": 24128.388135997902 }, { "content": "impl From<MessageStrategy> for MessageEncodeStrategy {\n\n fn from(strat: MessageStrategy) -> Self {\n\n Self(strat)\n\n }\n\n}\n\n\n\nimpl Default for MessageEncodeStrategy {\n\n fn default() -> Self {\n\n Self(MessageStrategy::Plaintext)\n\n }\n\n}\n", "file_path": "src/extensions/mod.rs", "rank": 29, "score": 24126.055091164904 }, { "content": "//! Implementations of Extract and associated types.\n\n\n\npub mod proto;\n\npub use self::proto::MessageFuture;\n", "file_path": "src/extractors/mod.rs", "rank": 30, "score": 24125.204322160087 }, { "content": "//! Implements Response for Proto. This handles serializing Messages\n\n//! as sepcified by the Protobuf Middleware.\n\n\n\nmod proto_response;\n", "file_path": "src/response/mod.rs", "rank": 31, "score": 24125.200638070728 }, { "content": "//! Internal Errors.\n\n\n\nmod deserialize_error;\n\n\n\npub(crate) use deserialize_error::{DeserializeError, DeserializeErrorKind};\n", "file_path": "src/errors/mod.rs", "rank": 32, "score": 24124.928239596735 }, { "content": "//! Wrapper Types. For now, just `Proto` and `MessagePlus`.\n\n\n\nmod proto_types;\n\n\n\npub use self::proto_types::{MessagePlus, Proto};\n", "file_path": "src/types/mod.rs", "rank": 33, "score": 24124.801651217935 }, { "content": "use super::{Config, ProtobufService};\n\nuse crate::common::*;\n\n\n\nuse tower_web::middleware::Middleware;\n\n\n\n/// Decorate a service by converting incoming and outgoing Protobuf messages.\n\n#[derive(Debug)]\n\npub struct ProtobufMiddleware {\n\n config: Config,\n\n}\n\n\n\nimpl ProtobufMiddleware {\n\n /// Create a new `ProtobufMiddleware` instance with options.\n\n pub fn new(send_json: bool, receive_json: bool) -> ProtobufMiddleware {\n\n ProtobufMiddleware {\n\n config: Config {\n\n send_json,\n\n receive_json,\n\n },\n\n }\n", "file_path": "src/middleware/protobuf/middleware.rs", "rank": 34, "score": 22693.015511274607 }, { "content": "use crate::common::*;\n\nuse crate::extensions::{MessageEncodeStrategy, MessageParseStrategy, MessageStrategy};\n\n\n\n#[derive(Debug, Copy, Clone)]\n\n/// Configuration options for a [`ProtobufService`](struct.ProtobufService.html).\n\npub struct Config {\n\n /// Allow incoming Protobuf messages to be sent as [protobuf-compliant JSON](https://developers.google.com/protocol-buffers/docs/proto3#json).\n\n pub receive_json: bool,\n\n /// Allow outgoing Protobuf messages to be sent as [protobuf-compliant JSON](https://developers.google.com/protocol-buffers/docs/proto3#json).\n\n pub send_json: bool, // TODO: use\n\n}\n\n\n\n/// Decorates another Service by parsing incoming Protobuf messages and\n\n/// serializing outgoing Protobuf messages.\n\n///\n\n/// Incoming and outgoing messages can be sent as [protobuf-compliant JSON](https://developers.google.com/protocol-buffers/docs/proto3#json), if\n\n/// enabled in the given configuration. Headers are used to figure out whether\n\n/// a message is Protobuf or JSON encoded (Content-Type and Accept headers for\n\n/// receiving and sending, respectively).\n\n///\n", "file_path": "src/middleware/protobuf/service.rs", "rank": 35, "score": 22689.657571587748 }, { "content": "\n\n Some(pair)\n\n }\n\n _ => None,\n\n })\n\n .next()\n\n .unwrap_or((false, None));\n\n\n\n // TODO: a lot more error handling here. For example, specifying multiple content types or\n\n // multiple protobuf message types.\n\n\n\n // Note: for now we disregard message types in the content-type header, but\n\n // in the future we should ensure that this matches the type for the\n\n // endpoint we're hitting (TODO). This is a little tricky since we don't\n\n // have access to that information here.\n\n\n\n use self::MessageStrategy::*;\n\n match (json && allow_json, proto, name) {\n\n (_, true, Some(name)) => NamedProto(String::from(name)),\n\n (_, true, None) => Proto,\n", "file_path": "src/middleware/protobuf/service.rs", "rank": 36, "score": 22688.535172623888 }, { "content": " }\n\n}\n\n\n\nimpl Default for ProtobufMiddleware {\n\n fn default() -> Self {\n\n ProtobufMiddleware::new(true, true)\n\n }\n\n}\n\n\n\nimpl<S, ReqBody, RespBody> Middleware<S> for ProtobufMiddleware\n\nwhere\n\n S: Service<Request = HttpRequest<ReqBody>, Response = HttpResponse<RespBody>>,\n\n S::Future: Future<Item = HttpResponse<RespBody>>,\n\n S::Error: ::std::error::Error,\n\n{\n\n type Request = S::Request;\n\n type Response = S::Response;\n\n type Error = S::Error;\n\n type Service = ProtobufService<S>;\n\n\n\n fn wrap(&self, service: S) -> Self::Service {\n\n ProtobufService::new(service, self.config)\n\n }\n\n}\n", "file_path": "src/middleware/protobuf/middleware.rs", "rank": 37, "score": 22686.819630011083 }, { "content": " (true, false, _) => Json,\n\n (false, false, _) => Plaintext, // TODO: this should actually error\n\n }\n\n}\n\n\n\nimpl<S, ReqBody, RespBody> Service for ProtobufService<S>\n\nwhere\n\n S: Service<Request = HttpRequest<ReqBody>, Response = HttpResponse<RespBody>>,\n\n S::Future: Future<Item = HttpResponse<RespBody>>,\n\n S::Error: ::std::error::Error,\n\n{\n\n type Request = S::Request;\n\n type Response = S::Response;\n\n type Error = S::Error;\n\n type Future = ResponseFuture<S::Future>;\n\n\n\n fn poll_ready(&mut self) -> Poll<(), Self::Error> {\n\n self.inner.poll_ready()\n\n }\n\n\n", "file_path": "src/middleware/protobuf/service.rs", "rank": 38, "score": 22686.341915736448 }, { "content": " /// Modifies the request's headers to signal to the Extractor what to do.\n\n fn call(&mut self, mut request: HttpRequest<ReqBody>) -> Self::Future {\n\n use http::header::{ACCEPT, CONTENT_TYPE};\n\n\n\n // Set MessageParseStrategy:\n\n let mps = parse_headers(&request, CONTENT_TYPE, self.config.receive_json).into();\n\n request\n\n .extensions_mut()\n\n .insert::<MessageParseStrategy>(mps)\n\n .map(|prev| {\n\n // We're using extensions to record what the Extractor should do; this\n\n // is a little janky but it should be fine. This check should warn us\n\n // if somehow we overwrite a value already in extensions.\n\n //\n\n // TODO: this should use log as be marked as a warning\n\n println!(\"eek! we've been made!! {:?}\", prev)\n\n });\n\n\n\n // And likewise for MessageEncodeStrategy:\n\n let mes = parse_headers(&request, ACCEPT, self.config.send_json).into();\n", "file_path": "src/middleware/protobuf/service.rs", "rank": 39, "score": 22685.659782533192 }, { "content": "/// If both JSON (`application/json`) and Protobuf (`application/x-protobuf`)\n\n/// Accept headers are set, Protobuf will be preferred.\n\n#[derive(Debug)]\n\npub struct ProtobufService<S> {\n\n inner: S,\n\n config: Config,\n\n}\n\n\n\nimpl<S> ProtobufService<S> {\n\n pub(super) fn new(inner: S, config: Config) -> ProtobufService<S> {\n\n ProtobufService { inner, config }\n\n }\n\n}\n\n\n", "file_path": "src/middleware/protobuf/service.rs", "rank": 40, "score": 22684.59085749824 }, { "content": " request\n\n .extensions_mut()\n\n .insert::<MessageEncodeStrategy>(mes)\n\n .map(|prev| println!(\"TODO: Re-inserting!! {:?}\", prev));\n\n\n\n // TODO:\n\n Self::Future {\n\n response: self.inner.call(request),\n\n }\n\n }\n\n}\n", "file_path": "src/middleware/protobuf/service.rs", "rank": 41, "score": 22680.029322056373 }, { "content": "# Tower Web Protobuf\n\n\n\nProtobuf middleware and friends for [tower-web](https://github.com/carllerche/tower-web).\n\n\n\n[![Build Status](https://travis-ci.com/rrbutani/tower-web-protobuf.svg?branch=main)](https://travis-ci.com/rrbutani/tower-web-protobuf)\n\n[![Docs](https://img.shields.io/badge/docs-v0.1.0-blue.svg)](https://rrbutani.github.io/tower-web-protobuf/tower-web-protobuf/)\n\n[![Coverage Status](https://coveralls.io/repos/github/rrbutani/tower-web-protobuf/badge.svg?branch=main)](https://coveralls.io/github/rrbutani/tower-web-protobuf?branch=main)\n\n[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)\n\n\n\n## [A Quick Example](examples/identity.rs)\n\n```rust\n\nextern crate prost;\n\n#[macro_use]\n\nextern crate tower_web;\n\nextern crate tower_web_protobuf;\n\n\n\nuse prost::Message;\n\nuse tower_web::ServiceBuilder;\n\nuse tower_web_protobuf::{Proto, ProtobufMiddleware};\n\n\n\n// Messages:\n\n#[derive(Clone, PartialEq, Message, Serialize, Deserialize)]\n\npub struct Hello {\n\n #[prost(string, tag = \"1\")]\n\n pub name: String,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\nstruct HelloWorld;\n\n\n\ntype In<M> = Proto<M>;\n\ntype Out<M> = Result<Proto<M>, ()>;\n\n\n\nimpl_web! {\n\n impl HelloWorld {\n\n // You can provide this endpoint with either a Protobuf or JSON\n\n // encoded body. The endpoint will respond with a Protobuf or JSON\n\n // encoded `Hello` message depending on the Accept header.\n\n #[get(\"/hello/\")]\n\n fn hello(&self, hello: In<Hello>) -> Out<Hello> {\n\n Ok(hello)\n\n }\n\n }\n\n}\n\n\n\npub fn main() {\n\n let addr = \"127.0.0.1:8080\".parse().expect(\"Invalid address\");\n\n\n\n ServiceBuilder::new()\n\n .resource(HelloWorld)\n\n // (true, true) permits JSON decoding (requests) and encoding\n\n // (responses)\n\n .middleware(ProtobufMiddleware::new(true, true))\n\n .run(&addr)\n\n .unwrap();\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 42, "score": 15106.74222808668 }, { "content": "## How does it work?\n\n`tower-web-protobuf` provides a generic `Proto` type that can wrap any Protobuf message struct (really anything that implements prost's `Message` type - unfortunately this does mean that you're forced to use prost with this crate).\n\n\n\n`Proto` types have a custom `Extract` and `Response` implementation that handle serializing and deserializing protobuf messages. By default these implementations will try to parse all incoming messages as protobufs and will encode all responses as protobufs, regardless of the content-type and accept headers that were sent with the corresponding request.\n\n\n\nIn order to actually abide by the headers, we need to introduce [some middleware](src/middlware/protobuf/middleware.rs). Currently, the middleware allows you to enable/disable JSON support for encoding/decoding (shown above).\n\n\n\nTo be clear, it's possible to just use the `Proto` type and to ignore the middlware entirely. If your use case only involves sending and receiving protobufs (no JSON), this may even be ideal.\n\n\n\nThere's also a plaintext serialization/deserialization option based on [serde-plain](https://github.com/mitsuhiko/serde-plain).\n\n\n", "file_path": "README.md", "rank": 43, "score": 15100.670782673958 }, { "content": "## Usage\n\nFirst, add `tower-web-protobuf` to your Cargo.toml:\n\n```toml\n\n[dependencies]\n\ntower-web-protobuf = { git = \"https://github.com/rrbutani/tower-web-protobuf\" }\n\n```\n\n\n\nIn order to use the `Proto` type, your message structs must implement the traits in `MessagePlus`. This means implementing prost's `Message` and serde's `DeserializeOwned` and `Serialize`. For most use cases, this means using prost and adding `#[derive]` attributes for serde's traits.\n\n\n\nWith prost, there are two main ways to do this: add a build.rs file ([like this one](build.rs)) that uses prost_build or add `#[derive]`s on your existing structs to turn them into protobuf messages.\n\n\n\nIf you go the first route, make sure you add something like the following to derive serde's traits for your messages:\n\n```rust\n\n prost_build::Config::new()\n\n .type_attribute(\".\", \"#[derive(Serialize, Deserialize)]\")\n\n .type_attribute(\".\", \"#[serde(rename_all = \\\"snake_case\\\")]\")\n\n .type_attribute(\".\", \"#[serde(deny_unknown_fields)]\")\n\n .compile_protos(\n\n proto_files_in_dir(MESSAGE_DIR).as_slice(),\n\n &[MESSAGE_DIR.into()],\n\n )\n\n .unwrap();\n\n```\n\n\n\n[build.rs](build.rs) has the above code in context and the [endpoints example](examples/endpoints.rs) has a sample usage.\n\n\n\nAn example of the other way is shown in [example above](#A-Quick-Example) which is identical to the [identity example](examples/identity.rs). The key bit is the [`#[derive]` on the message](examples/identity.rs#L11).\n\n\n\nFinally, wrap your message types with `Proto` (or use the `In` and `Out` type aliases shown above) and [add the `ProtobufMiddleware`](examples/identity.rs#L42) if you so desire. It _might_ just work.\n\n\n\nError handling isn't great and logging for serialization/deserialization errors isn't quite there yet either. As is probably obvious, if you're planning to use this for important things expect trouble.\n\n\n\n<todo: replace docs with docs.rs link, add license from crates.io, add crates.io link)>\n\n<todo: link all structs to the official docs.rs doc pages>\n", "file_path": "README.md", "rank": 44, "score": 15096.906055070534 }, { "content": " ErrorParsingJson => (415, \"TODO\"),\n\n };\n\n\n\n let msg = if let Some(ref err) = self.err_message {\n\n let mut e = String::from(msg);\n\n e.push_str(\"; \");\n\n e.push_str(err.as_str());\n\n e\n\n } else {\n\n String::from(msg)\n\n };\n\n\n\n (status, msg)\n\n }\n\n}\n\n\n\nimpl From<&DeserializeError> for String {\n\n fn from(err: &DeserializeError) -> Self {\n\n if let Some(ref msg) = err.err_message {\n\n msg.clone()\n", "file_path": "src/errors/deserialize_error.rs", "rank": 45, "score": 14.546654888714249 }, { "content": " // Protobuf messages can only be extracted from a request body for now.\n\n fn requires_body(_: &CallSite) -> bool {\n\n true\n\n }\n\n}\n\n\n\nimpl<B: BufStream, M: MessagePlus> ExtractFuture for MessageFuture<B, M> {\n\n type Item = Proto<M>;\n\n\n\n fn poll(&mut self) -> Poll<(), Error> {\n\n let resp = self\n\n .collect\n\n .poll()\n\n .map_err(|_| Error::invalid_argument(&String::from(\"internal error\")));\n\n\n\n let bytes: Vec<u8> = try_ready!(resp);\n\n\n\n let msg_res: Result<M, DeserializeError> = match *self.parse_strat {\n\n MessageStrategy::NamedProto(ref name) => {\n\n // TODO: check message name\n", "file_path": "src/extractors/proto.rs", "rank": 46, "score": 12.977210672830827 }, { "content": "extern crate prost;\n\n#[macro_use]\n\nextern crate tower_web;\n\nextern crate tower_web_protobuf;\n\n\n\nuse prost::Message;\n\nuse tower_web::ServiceBuilder;\n\nuse tower_web_protobuf::{Proto, ProtobufMiddleware};\n\n\n\n// Messages:\n\n#[derive(Clone, PartialEq, Message, Serialize, Deserialize)]\n\npub struct Hello {\n\n #[prost(string, tag = \"1\")]\n\n pub name: String,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "examples/identity.rs", "rank": 47, "score": 12.816985965702937 }, { "content": "use core::fmt::Formatter;\n\nuse std::convert::From;\n\nuse std::fmt::Debug;\n\nuse std::ops::Deref;\n\n\n\nuse prost::Message;\n\nuse serde::{de::DeserializeOwned, Serialize};\n\n\n\n/// A wrapper struct for a protobuf message type.\n\n///\n\n/// This has to exist because `impl<T> Trait for T` requires T to be 'covered'\n\n/// by a local type (i.e. Proto<T>), when Self is used (I think). Self _is_\n\n/// used by Extract (Future: ExtractFuture<Item = Self>) which I think is why:\n\n/// ```compile_fail\n\n/// impl<M, B: BufStream> Extract<B> for M\n\n/// where\n\n/// M: 'static + Message + MessageWrapper<M>\n\n/// {\n\n/// type Future = Immediate<M>;\n\n/// }\n", "file_path": "src/types/proto_types.rs", "rank": 48, "score": 12.355082554999578 }, { "content": " impl<F, RespBody> Future for ResponseFuture<F>\n\n where\n\n F: Future<Item = HttpResponse<RespBody>>,\n\n {\n\n type Item = F::Item;\n\n type Error = F::Error;\n\n\n\n // Just pass the response through unmodified:\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n\n self.response.poll()\n\n }\n\n }\n\n}\n\n\n\npub(crate) mod errors;\n\npub(crate) mod extensions;\n\npub(crate) mod extractors;\n\npub(crate) mod middleware;\n\npub(crate) mod response;\n\npub(crate) mod types;\n\n\n\npub use middleware::ProtobufMiddleware;\n\npub use types::{MessagePlus, Proto};\n\n\n\n// TODO: deny missing docs\n\n// TODO: check protobuf message name with type_info (feature gated, perhaps)\n\n// TODO: with fork\n\n// TODO: fix Errors in Tower Web\n", "file_path": "src/lib.rs", "rank": 49, "score": 12.234700299905533 }, { "content": " } else {\n\n \"Unknown Error\".into()\n\n }\n\n }\n\n}\n\n\n\nimpl Display for DeserializeError {\n\n fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), Error> {\n\n let s = String::from(self);\n\n fmt.write_str(s.as_str())\n\n }\n\n}\n\n\n\n// impl<S> IntoCatch<S> for Error {\n\n// type Catch = Self;\n\n\n\n// fn into_catch(self) -> Self {\n\n// self\n\n// }\n\n// }\n", "file_path": "src/errors/deserialize_error.rs", "rank": 50, "score": 11.800879901529623 }, { "content": "// }\n\n\n\nimpl<M: MessagePlus> Deref for Proto<M> {\n\n type Target = M;\n\n\n\n fn deref(&self) -> &M {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<M: MessagePlus> AsRef<M> for Proto<M> {\n\n fn as_ref(&self) -> &M {\n\n self.deref()\n\n }\n\n}\n\n\n\nimpl<M: MessagePlus + Debug> Debug for Proto<M> {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), core::fmt::Error> {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl<M: MessagePlus + Clone> Clone for Proto<M> {\n\n fn clone(&self) -> Self {\n\n Self(M::clone(&self.0))\n\n }\n\n}\n", "file_path": "src/types/proto_types.rs", "rank": 51, "score": 11.348283275404329 }, { "content": "use crate::common::*;\n\nuse std::fmt::{Display, Error};\n\nuse tower_web::error::Catch;\n\nuse tower_web::error::IntoCatch; // TODO\n\nuse tower_web::Error as TowerError;\n\n\n\n#[derive(Clone, Debug)]\n\n/// Represents errors encountered when attempting to deserialize a message\n\npub(crate) enum DeserializeErrorKind {\n\n /// If a request specifies a content type other than JSON or Protobuf\n\n InvalidContentTypeForMessage,\n\n /// TODO: actually use..\n\n InvalidHeadersForMessage,\n\n /// If we hit an error while trying to parse a message as JSON\n\n ErrorParsingJson,\n\n /// If we hit an error while trying to parse a message as a Protobuf message\n\n ErrorParsingProtobuf,\n\n /// If we hit an error while trying to parse a message as Plaintext\n\n ErrorParsingPlaintext,\n\n}\n", "file_path": "src/errors/deserialize_error.rs", "rank": 52, "score": 10.54589130847288 }, { "content": " type Body = BytesWrapper;\n\n\n\n fn into_http<S: Serializer>(\n\n self,\n\n context: &Context<S>,\n\n ) -> Result<HttpResponse<Self::Body>, Error> {\n\n use MessageStrategy::*;\n\n\n\n let strat: MessageEncodeStrategy = context\n\n .request()\n\n .extensions()\n\n .get()\n\n .cloned()\n\n .unwrap_or_default();\n\n let buf = match *strat {\n\n NamedProto(ref name) => {\n\n // TODO: message name check\n\n serialize_proto(&self)?\n\n }\n\n Proto => serialize_proto(&self)?,\n", "file_path": "src/response/proto_response.rs", "rank": 53, "score": 9.824180473792312 }, { "content": "// Let's make a newtype around Bytes:\n\n#[doc(hidden)]\n\n#[derive(Debug)]\n\npub struct BytesWrapper(Bytes);\n\n\n\nimpl Deref for BytesWrapper {\n\n type Target = Bytes;\n\n\n\n fn deref(&self) -> &Bytes {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<Bytes> for BytesWrapper {\n\n fn from(bytes: Bytes) -> Self {\n\n BytesWrapper(bytes)\n\n }\n\n}\n\n\n\n// So that we can implement BufStream on Bytes, our way:\n", "file_path": "src/response/proto_response.rs", "rank": 54, "score": 9.745594944670376 }, { "content": "#[macro_use]\n\nextern crate tower_web;\n\nextern crate http;\n\nextern crate tokio;\n\nextern crate tower_service;\n\n\n\nextern crate prost;\n\n\n\nuse tower_web::ServiceBuilder;\n\nuse tower_web_protobuf::{Proto, ProtobufMiddleware};\n\n\n\npub mod telemetry {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/telemetry.rs\"));\n\n}\n\n\n\nuse telemetry::Position;\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "examples/endpoints.rs", "rank": 55, "score": 9.67287489220298 }, { "content": "use tower_web::codegen::CallSite;\n\nuse tower_web::extract::Context;\n\nuse tower_web::extract::Error;\n\nuse tower_web::extract::Extract;\n\nuse tower_web::extract::ExtractFuture;\n\nuse tower_web::util::buf_stream::{BufStream, Collect};\n\n\n\nuse crate::common::*;\n\nuse crate::errors::{DeserializeError, DeserializeErrorKind};\n\nuse crate::extensions::{MessageParseStrategy, MessageStrategy};\n\nuse crate::types::{MessagePlus, Proto};\n\n\n\n#[derive(Debug)]\n\npub struct MessageFuture<B: BufStream, M: MessagePlus> {\n\n collect: Collect<B, Vec<u8>>,\n\n message: Option<M>,\n\n parse_strat: MessageParseStrategy,\n\n}\n\n\n\nimpl<B, M> Extract<B> for Proto<M>\n", "file_path": "src/extractors/proto.rs", "rank": 56, "score": 9.615760093760613 }, { "content": "extern crate prost_build;\n\n\n\nuse std::fs;\n\nuse std::path::{Path, PathBuf};\n\n\n\nconst MESSAGE_DIR: &'static str = \"examples/messages\";\n\n\n", "file_path": "build.rs", "rank": 57, "score": 9.55411572827844 }, { "content": "// (specialization, but the hard way)\n\nimpl BufStream for BytesWrapper {\n\n type Item = Cursor<Bytes>;\n\n\n\n // This is why we need our own impl; tower-web requires the Body type in a\n\n // Response impl to be of type BufStream<Error = tower_web::error::Error>.\n\n type Error = tower_web::error::Error;\n\n\n\n fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {\n\n // This impl is borrowed basically verbatim from tower-web.\n\n // (https://docs.rs/tower-web/0.3.6/src/tower_web/util/buf_stream/bytes.rs.html#8-24)\n\n use std::mem;\n\n\n\n if self.is_empty() {\n\n return Ok(None.into());\n\n }\n\n\n\n let bytes = mem::replace(self, BytesWrapper(Bytes::new()));\n\n let buf = Cursor::new(bytes.0);\n\n\n\n Ok(Some(buf).into())\n\n }\n\n}\n\n\n\n#[inline]\n", "file_path": "src/response/proto_response.rs", "rank": 58, "score": 9.383810535895435 }, { "content": "\n\n#[derive(Clone, Debug)]\n\n/// Represents an error encountered while attempting to deserialize a message\n\npub(crate) struct DeserializeError {\n\n kind: DeserializeErrorKind,\n\n err_message: Option<String>,\n\n}\n\n\n\nimpl DeserializeError {\n\n #[allow(dead_code)]\n\n pub(crate) fn new(kind: DeserializeErrorKind) -> Self {\n\n Self {\n\n kind,\n\n err_message: None,\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub(crate) fn new_with_message(kind: DeserializeErrorKind, message: String) -> Self {\n\n Self {\n", "file_path": "src/errors/deserialize_error.rs", "rank": 59, "score": 9.192760303223004 }, { "content": "use std::io::Cursor;\n\nuse std::ops::Deref;\n\n\n\nuse bytes::Bytes;\n\nuse bytes::BytesMut;\n\nuse http::status::StatusCode;\n\nuse serde_json;\n\nuse serde_plain;\n\nuse tower_web::error::Error;\n\nuse tower_web::response::Context;\n\nuse tower_web::response::Response;\n\nuse tower_web::response::Serializer;\n\nuse tower_web::util::buf_stream::BufStream;\n\n\n\nuse crate::common::*;\n\nuse crate::extensions::MessageEncodeStrategy;\n\nuse crate::extensions::MessageStrategy;\n\nuse crate::types::MessagePlus;\n\nuse crate::types::Proto;\n\n\n", "file_path": "src/response/proto_response.rs", "rank": 60, "score": 9.035712242712776 }, { "content": "#![deny(missing_docs, missing_debug_implementations)]\n\n\n\n//! Middleware and friends that help deal with protobuf messages in tower-web.\n\n//!\n\n//! <TODO>\n\n\n\n#[macro_use(try_ready)]\n\nextern crate futures;\n\n\n\npub(crate) mod common {\n\n pub use futures::future::{Err as FutErr, Future, FutureResult, Ok as FutOk};\n\n pub use futures::Poll;\n\n pub use http::{header::HeaderName, Request as HttpRequest, Response as HttpResponse};\n\n pub use tower_service::Service;\n\n\n\n #[derive(Debug)]\n\n pub struct ResponseFuture<T> {\n\n pub response: T,\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 61, "score": 8.556349214781903 }, { "content": "\n\nimpl Catch for DeserializeError {\n\n type Body = String;\n\n type Future = FutureResult<http::Response<Self::Body>, TowerError>;\n\n\n\n fn catch(&mut self, _request: &http::Request<()>, error: TowerError) -> Self::Future {\n\n let (status, msg) = self.get_code_and_message();\n\n\n\n let response = http::response::Builder::new()\n\n .status(status)\n\n .header(\"content-type\", \"text/plain\")\n\n .body(msg);\n\n\n\n if response.is_ok() {\n\n futures::future::ok(response.unwrap())\n\n } else {\n\n futures::future::err(error)\n\n }\n\n }\n\n}\n", "file_path": "src/errors/deserialize_error.rs", "rank": 62, "score": 8.231455498134519 }, { "content": " kind,\n\n err_message: Some(message),\n\n }\n\n }\n\n\n\n pub(crate) fn new_with_error<T: Display>(kind: DeserializeErrorKind, err: T) -> Self {\n\n Self {\n\n kind,\n\n err_message: Some(format!(\"{}\", err)),\n\n }\n\n }\n\n\n\n pub(crate) fn get_code_and_message(&self) -> (u16, String) {\n\n use DeserializeErrorKind::*;\n\n\n\n let (status, msg) = match &self.kind {\n\n InvalidHeadersForMessage => (415, \"TODO\"),\n\n InvalidContentTypeForMessage => (415, \"TODO\"),\n\n ErrorParsingPlaintext => (415, \"TODO\"),\n\n ErrorParsingProtobuf => (415, \"TODO\"),\n", "file_path": "src/errors/deserialize_error.rs", "rank": 63, "score": 7.934000559958392 }, { "content": " M::decode(bytes).map_err(|e| {\n\n DeserializeError::new_with_error(DeserializeErrorKind::ErrorParsingProtobuf, e)\n\n })\n\n }\n\n\n\n MessageStrategy::Proto => M::decode(bytes).map_err(|e| {\n\n DeserializeError::new_with_error(DeserializeErrorKind::ErrorParsingProtobuf, e)\n\n }),\n\n MessageStrategy::Json => serde_json::from_slice(&bytes).map_err(|e| {\n\n DeserializeError::new_with_error(DeserializeErrorKind::ErrorParsingJson, e)\n\n }),\n\n MessageStrategy::Plaintext => serde_plain::from_str(\n\n String::from_utf8(bytes)\n\n .map_err(|e| Error::invalid_argument(&e))?\n\n .as_str(),\n\n )\n\n .map_err(|e| {\n\n DeserializeError::new_with_error(DeserializeErrorKind::ErrorParsingPlaintext, e)\n\n }),\n\n };\n", "file_path": "src/extractors/proto.rs", "rank": 64, "score": 7.226793901955011 }, { "content": "where\n\n B: BufStream,\n\n M: 'static + MessagePlus,\n\n{\n\n type Future = MessageFuture<B, M>;\n\n\n\n fn extract(_ctx: &Context) -> Self::Future {\n\n // Since protobuf messages can only be extracted from the body, we\n\n // should never get here.\n\n\n\n // unimplemented!(\"Err: {:?}\", ctx.callsite().source())\n\n unimplemented!(\"Err: can only extract protobuf messages from a request body\")\n\n }\n\n\n\n fn extract_body(ctx: &Context, body: B) -> Self::Future {\n\n // We _should_ be trying to parse the message from the body:\n\n // let source = ctx.callsite().source();\n\n // if source != tower_web::codegen::Source::Body {\n\n // unimplemented!(\"Err: {:?}\" ctx.callsite().source())\n\n // }\n", "file_path": "src/extractors/proto.rs", "rank": 65, "score": 6.733433494302807 }, { "content": " Json => serde_json::to_vec_pretty(&*self)\n\n .map(|vec| vec.into())\n\n .map_err(|err| {\n\n Error::new(\n\n &format!(\"{}\", err),\n\n \"Serialization Error: serde_json\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n )\n\n })?,\n\n Plaintext => serde_plain::to_string(&*self)\n\n .map(|str| str.into())\n\n .map_err(|err| {\n\n Error::new(\n\n &format!(\"{}\", err),\n\n \"Serialization Error: serde_plain\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n )\n\n })?,\n\n };\n\n\n", "file_path": "src/response/proto_response.rs", "rank": 66, "score": 6.669773442700539 }, { "content": "\n\n match msg_res {\n\n Ok(msg) => {\n\n self.message = Some(msg);\n\n Ok(futures::Async::Ready(()))\n\n }\n\n\n\n Err(err) => Err(Error::invalid_argument(&err)),\n\n }\n\n }\n\n\n\n fn extract(self) -> Self::Item {\n\n Proto::from(self.message.unwrap())\n\n }\n\n}\n", "file_path": "src/extractors/proto.rs", "rank": 67, "score": 6.334696040565052 }, { "content": "#[derive(Clone, Debug)]\n\nstruct HelloWorld;\n\n\n", "file_path": "examples/identity.rs", "rank": 68, "score": 5.522736947654705 }, { "content": "#[derive(Clone, Debug)]\n\nstruct HelloWorld;\n\n\n", "file_path": "examples/endpoints.rs", "rank": 69, "score": 5.522736947654705 }, { "content": " //\n\n // Unfortunately since callsite().source() is private, we can't do the\n\n // check above.\n\n\n\n // If the user isn't using the Middleware, they get the Default:\n\n let strat = ctx\n\n .request()\n\n .extensions()\n\n .get::<MessageParseStrategy>()\n\n .cloned()\n\n .unwrap_or_default();\n\n\n\n MessageFuture {\n\n collect: body.collect(),\n\n message: None,\n\n parse_strat: strat,\n\n }\n\n }\n\n\n\n // TODO: make sure this is enforced..\n", "file_path": "src/extractors/proto.rs", "rank": 70, "score": 5.298743035788222 }, { "content": " }\n\n}\n\n\n\n// Provides (_:&M).into() -> Proto<M> and Proto::<M>::from(_:&M) -> Proto<M>\n\nimpl<M: MessagePlus> From<M> for Proto<M> {\n\n fn from(message: M) -> Self {\n\n Self::new(message)\n\n }\n\n}\n\n\n\n// // This is bad but I don't know what the right answer is. I'm hoping someone\n\n// // replies to this: https://github.com/rust-lang/rust/issues/46205\n\n// // Also, this is the recommended approach in the docs for `Into`.\n\n// #[allow(incoherent_fundamental_impls)]\n\n// // Provides (_:Proto<M>).into() but probably not M::from(_:Proto<M>) -> M\n\n// // I am okay with this for now.\n\n// impl<M: MessagePlus> Into<M> for Proto<M> {\n\n// fn into(self) -> M {\n\n// self.0\n\n// }\n", "file_path": "src/types/proto_types.rs", "rank": 71, "score": 4.3322169563251505 }, { "content": " let buf = buf.freeze();\n\n\n\n http::Response::builder()\n\n .header(\"Content-Type\", strat.content_type())\n\n .body(buf.into())\n\n .map_err(|err| {\n\n Error::new(\n\n &format!(\"{}\", err),\n\n \"Response Builder Error\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n )\n\n })\n\n }\n\n}\n", "file_path": "src/response/proto_response.rs", "rank": 72, "score": 3.0062321417966755 }, { "content": "/// ```\n\n/// doesn't work.\n\n///\n\n/// Niko's excellent [blog post](http://smallcultfollowing.com/babysteps/blog/2015/01/14/little-orphan-impls/) has a full writeup.\n\n///\n\n/// The effect of this is that you'll have to specify Proto<Message> instead\n\n/// of just Message in your functions within `impl_web!()`. This, in turn hurts\n\n/// testability (you have to wrap your Messages to pass them into the endpoint\n\n/// functions) and kind of works counter to the PORTs (plain old Rust types)\n\n/// philosophy. But, I'm pretty sure it's the best we can do without using\n\n/// macros or modifying tower-web.\n\n///\n\n/// Into<M> for Proto<M> and From<M> for Proto<M> are implemented to ease the\n\n/// pain a little bit. Into<M> for Proto<M> instead of From<Proto<M>> for M\n\n/// for the same reasons we aren't just implementing Extract for M.\n\n///\n\n/// Deref is also implemented it should be possible to use Proto<M> as an M\n\n/// for most everything.\n\n///\n\n// If it helps we could also implement Message on Proto though I can't really\n\n// fathom why this might help anything right now.\n\npub struct Proto<M: MessagePlus>(pub M);\n\n\n\n/// A thin trait alias to make stuff more legible.\n", "file_path": "src/types/proto_types.rs", "rank": 73, "score": 2.9201784182206314 } ]
Rust
oot-explorer-demo/src/reflect_text.rs
mvanbem/oot-explorer
a2574ac5d2c3b2eb8bc6229e887b0ad7ff0fe732
use oot_explorer_read::{FromVrom, ReadError}; use oot_explorer_reflect::{ BitfieldDescriptor, EnumDescriptor, FieldDescriptor, PointerDescriptor, PrimitiveType, StructDescriptor, StructFieldLocation, TypeDescriptor, UnionDescriptor, }; use oot_explorer_segment::{SegmentAddr, SegmentTable}; use oot_explorer_vrom::{Vrom, VromAddr}; pub fn dump( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: TypeDescriptor, addr: VromAddr, indent_level: usize, ) { match desc { TypeDescriptor::Struct(desc) => { dump_struct(vrom, segment_table, desc, addr, indent_level); } TypeDescriptor::Union(desc) => { dump_union(vrom, segment_table, desc, addr, indent_level); } TypeDescriptor::Enum(desc) => dump_enum(vrom, desc, addr), TypeDescriptor::Bitfield(desc) => dump_bitfield(vrom, desc, addr), TypeDescriptor::Primitive(desc) => dump_primitive(vrom, desc, addr), TypeDescriptor::Pointer(desc) => { dump_pointer(vrom, segment_table, desc, addr, indent_level) } } } fn dump_bitfield(vrom: Vrom<'_>, desc: &'static BitfieldDescriptor, addr: VromAddr) -> () { let value = match desc.underlying.read_as_u32(vrom, addr) { Ok(value) => value, Err(e) => { print!("{}", e); return; } }; let mut first = true; for field in desc.fields { if first { first = false; } else { print!(" | "); } let value = (value >> field.shift) & field.mask; print!("{}", value); } } fn dump_enum(vrom: Vrom<'_>, desc: &'static EnumDescriptor, addr: VromAddr) { match desc.underlying.read_as_u32(vrom, addr) { Ok(value) => match desc.values.binary_search_by_key(&value, |&(x, _)| x) { Ok(index) => print!("{}", desc.values[index].1), Err(_) => print!("(unknown value 0x{:x}", value), }, Err(e) => print!("{}", e), } } fn dump_primitive(vrom: Vrom<'_>, desc: PrimitiveType, addr: VromAddr) -> () { let try_print = || { match desc { PrimitiveType::Bool => print!("{}", bool::from_vrom(vrom, addr)?), PrimitiveType::U8 => print!("{}", u8::from_vrom(vrom, addr)?), PrimitiveType::I8 => print!("{}", i8::from_vrom(vrom, addr)?), PrimitiveType::U16 => { print!("{}", u16::from_vrom(vrom, addr)?) } PrimitiveType::I16 => { print!("{}", i16::from_vrom(vrom, addr)?) } PrimitiveType::U32 => { print!("{}", u32::from_vrom(vrom, addr)?) } PrimitiveType::I32 => { print!("{}", i32::from_vrom(vrom, addr)?) } PrimitiveType::VromAddr => { print!("{:?}", VromAddr::from_vrom(vrom, addr)?) } PrimitiveType::SegmentAddr => { print!("{:?}", SegmentAddr::from_vrom(vrom, addr)?) } } Result::<(), ReadError>::Ok(()) }; if let Err(e) = try_print() { print!("{}", e); } } fn dump_pointer( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static PointerDescriptor, addr: VromAddr, indent_level: usize, ) { let segment_addr = match SegmentAddr::from_vrom(vrom, addr) { Ok(segment_addr) => segment_addr, Err(e) => { print!("{}", e); return; } }; let vrom_addr = match segment_table.resolve(segment_addr) { Ok(vrom_addr) => vrom_addr, Err(e) => { print!("{}", e); return; } }; print!("&"); dump(vrom, segment_table, desc.target, vrom_addr, indent_level) } fn dump_struct( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static StructDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); println!("{} {{", desc.name); for field in desc.fields { dump_field(vrom, segment_table, field, addr, indent_level + 1); } print!("{}}}", indent); } fn dump_union( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static UnionDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); println!("{} {{", desc.name); dump_union_body(vrom, segment_table, desc, addr, indent_level + 1); print!("{}}}", indent); } fn dump_union_body( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static UnionDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); let discriminant_addr = addr + desc.discriminant_offset; match desc .discriminant_desc .read_as_u32(vrom, discriminant_addr) .expect("enum discriminant must be readable as u32") { Ok(discriminant) => { print!( "{}(0x{:08x}) discriminant: {} = ", indent, discriminant_addr.0, desc.discriminant_desc.name(), ); dump( vrom, segment_table, desc.discriminant_desc, discriminant_addr, indent_level, ); println!(" (0x{:x})", discriminant); match desc .variants .binary_search_by_key(&discriminant, |&(x, _)| x) { Ok(index) => match desc.variants[index].1 { TypeDescriptor::Struct(desc) => { for field in desc.fields { dump_field(vrom, segment_table, field, addr, indent_level); } } TypeDescriptor::Union(desc) => { dump_union_body(vrom, segment_table, desc, addr, indent_level); } _ => unimplemented!( "variant `{}` of union `{}` is not a struct or union", desc.variants[index].1.name(), desc.name, ), }, Err(_) => { println!("{}(unknown variant)", indent); } } } Err(e) => { println!("{}{}", indent, e); } } } fn dump_field( vrom: Vrom<'_>, segment_table: &SegmentTable, field: &'static FieldDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); match field.location { StructFieldLocation::Simple { offset } => { let addr = addr + offset; print!( "{}(0x{:08x}) {}: {} = ", indent, addr.0, field.name, field.desc.name(), ); dump(vrom, segment_table, field.desc, addr, indent_level); println!(); } StructFieldLocation::Slice { count_offset, count_desc, ptr_offset, } => { let count_addr = addr + count_offset; print!( "{}(0x{:08x}) {}_count: {} = ", indent, count_addr.0, field.name, count_desc.name(), ); let count = match count_desc.read_as_u32(vrom, count_addr) { Ok(count) => { println!("{}", count); Some(count) } Err(e) => { println!("{}", e); None } }; let ptr_addr = addr + ptr_offset; print!( "{}(0x{:08x}) {}_ptr: &{} = ", indent, ptr_addr.0, field.name, field.desc.name(), ); let segment_ptr = match SegmentAddr::from_vrom(vrom, ptr_addr) { Ok(segment_ptr) => { println!("{:?}", segment_ptr); Some(segment_ptr) } Err(e) => { println!("{}", e); None } }; if let (Some(count), Some(segment_ptr)) = (count, segment_ptr) { match segment_table.resolve(segment_ptr) { Ok(mut vrom_addr) => { println!( "{}{}: &[{}; {}] = &[", indent, field.name, field.desc.name(), count, ); for _ in 0..count { print!("{} (0x{:08x}) ", indent, vrom_addr.0); dump(vrom, segment_table, field.desc, vrom_addr, indent_level + 1); println!(); vrom_addr += match field.desc.size() { Some(size) => size, None => panic!( "slice element {} has no size, referenced from field {}", field.desc.name(), field.name, ), }; } println!("{}]", indent) } Err(e) => { print!( "{}{}: &[{}; {}] = {}", indent, field.name, field.desc.name(), count, e, ); } } } } StructFieldLocation::InlineDelimitedList { offset } => { let mut addr = addr + offset; println!( "{}(0x{:08x}) {}: [{}; N] = [", indent, addr.0, field.name, field.desc.name(), ); loop { print!("{} (0x{:08x}) ", indent, addr.0); dump(vrom, segment_table, field.desc, addr, indent_level + 1); println!(); if (field .desc .is_end() .expect("inline delimited list element has no is_end"))( vrom, addr ) { break; } addr += field .desc .size() .expect("inline delimited list element has no size"); } println!("{}]", indent) } } }
use oot_explorer_read::{FromVrom, ReadError}; use oot_explorer_reflect::{ BitfieldDescriptor, EnumDescriptor, FieldDescriptor, PointerDescriptor, PrimitiveType, StructDescriptor, StructFieldLocation, TypeDescriptor, UnionDescriptor, }; use oot_explorer_segment::{SegmentAddr, SegmentTable}; use oot_explorer_vrom::{Vrom, VromAddr}; pub fn dump( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: TypeDescriptor, addr: VromAddr, indent_level: usize, ) { match desc { TypeDescriptor::Struct(desc) => { dump_struct(vrom, segment_table, desc, addr, indent_level); } TypeDescriptor::Union(desc) => { dump_union(vrom, segment_table, desc, addr, indent_level); } TypeDescriptor::Enum(desc) => dump_enum(vrom, desc, addr), TypeDescriptor::Bitfield(desc) => dump_bitfield(vrom, desc, addr), TypeDescriptor::Primitive(desc) => dump_primitive(vrom, desc, addr), TypeDescriptor::Pointer(desc) => { dump_pointer(vrom, segment_table, desc, addr, indent_level) } } } fn dump_bitfield(vrom: Vrom<'_>, desc: &'static BitfieldDescriptor, addr: VromAddr) -> () { let value = match desc.underlying.read_as_u32(vrom, addr) { Ok(value) => value, Err(e) => { print!("{}", e); return; } }; let mut first = true; for field in desc.fields { if first { first = false; } else { print!(" | "); } let value = (value >> field.shift) & field.mask; print!("{}", value); } } fn dump_enum(vrom: Vrom<'_>, desc: &'static EnumDescriptor, addr: VromAddr) { match desc.underlying.read_as_u32(vrom, addr) { Ok(value) =>
, Err(e) => print!("{}", e), } } fn dump_primitive(vrom: Vrom<'_>, desc: PrimitiveType, addr: VromAddr) -> () { let try_print = || { match desc { PrimitiveType::Bool => print!("{}", bool::from_vrom(vrom, addr)?), PrimitiveType::U8 => print!("{}", u8::from_vrom(vrom, addr)?), PrimitiveType::I8 => print!("{}", i8::from_vrom(vrom, addr)?), PrimitiveType::U16 => { print!("{}", u16::from_vrom(vrom, addr)?) } PrimitiveType::I16 => { print!("{}", i16::from_vrom(vrom, addr)?) } PrimitiveType::U32 => { print!("{}", u32::from_vrom(vrom, addr)?) } PrimitiveType::I32 => { print!("{}", i32::from_vrom(vrom, addr)?) } PrimitiveType::VromAddr => { print!("{:?}", VromAddr::from_vrom(vrom, addr)?) } PrimitiveType::SegmentAddr => { print!("{:?}", SegmentAddr::from_vrom(vrom, addr)?) } } Result::<(), ReadError>::Ok(()) }; if let Err(e) = try_print() { print!("{}", e); } } fn dump_pointer( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static PointerDescriptor, addr: VromAddr, indent_level: usize, ) { let segment_addr = match SegmentAddr::from_vrom(vrom, addr) { Ok(segment_addr) => segment_addr, Err(e) => { print!("{}", e); return; } }; let vrom_addr = match segment_table.resolve(segment_addr) { Ok(vrom_addr) => vrom_addr, Err(e) => { print!("{}", e); return; } }; print!("&"); dump(vrom, segment_table, desc.target, vrom_addr, indent_level) } fn dump_struct( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static StructDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); println!("{} {{", desc.name); for field in desc.fields { dump_field(vrom, segment_table, field, addr, indent_level + 1); } print!("{}}}", indent); } fn dump_union( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static UnionDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); println!("{} {{", desc.name); dump_union_body(vrom, segment_table, desc, addr, indent_level + 1); print!("{}}}", indent); } fn dump_union_body( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static UnionDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); let discriminant_addr = addr + desc.discriminant_offset; match desc .discriminant_desc .read_as_u32(vrom, discriminant_addr) .expect("enum discriminant must be readable as u32") { Ok(discriminant) => { print!( "{}(0x{:08x}) discriminant: {} = ", indent, discriminant_addr.0, desc.discriminant_desc.name(), ); dump( vrom, segment_table, desc.discriminant_desc, discriminant_addr, indent_level, ); println!(" (0x{:x})", discriminant); match desc .variants .binary_search_by_key(&discriminant, |&(x, _)| x) { Ok(index) => match desc.variants[index].1 { TypeDescriptor::Struct(desc) => { for field in desc.fields { dump_field(vrom, segment_table, field, addr, indent_level); } } TypeDescriptor::Union(desc) => { dump_union_body(vrom, segment_table, desc, addr, indent_level); } _ => unimplemented!( "variant `{}` of union `{}` is not a struct or union", desc.variants[index].1.name(), desc.name, ), }, Err(_) => { println!("{}(unknown variant)", indent); } } } Err(e) => { println!("{}{}", indent, e); } } } fn dump_field( vrom: Vrom<'_>, segment_table: &SegmentTable, field: &'static FieldDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); match field.location { StructFieldLocation::Simple { offset } => { let addr = addr + offset; print!( "{}(0x{:08x}) {}: {} = ", indent, addr.0, field.name, field.desc.name(), ); dump(vrom, segment_table, field.desc, addr, indent_level); println!(); } StructFieldLocation::Slice { count_offset, count_desc, ptr_offset, } => { let count_addr = addr + count_offset; print!( "{}(0x{:08x}) {}_count: {} = ", indent, count_addr.0, field.name, count_desc.name(), ); let count = match count_desc.read_as_u32(vrom, count_addr) { Ok(count) => { println!("{}", count); Some(count) } Err(e) => { println!("{}", e); None } }; let ptr_addr = addr + ptr_offset; print!( "{}(0x{:08x}) {}_ptr: &{} = ", indent, ptr_addr.0, field.name, field.desc.name(), ); let segment_ptr = match SegmentAddr::from_vrom(vrom, ptr_addr) { Ok(segment_ptr) => { println!("{:?}", segment_ptr); Some(segment_ptr) } Err(e) => { println!("{}", e); None } }; if let (Some(count), Some(segment_ptr)) = (count, segment_ptr) { match segment_table.resolve(segment_ptr) { Ok(mut vrom_addr) => { println!( "{}{}: &[{}; {}] = &[", indent, field.name, field.desc.name(), count, ); for _ in 0..count { print!("{} (0x{:08x}) ", indent, vrom_addr.0); dump(vrom, segment_table, field.desc, vrom_addr, indent_level + 1); println!(); vrom_addr += match field.desc.size() { Some(size) => size, None => panic!( "slice element {} has no size, referenced from field {}", field.desc.name(), field.name, ), }; } println!("{}]", indent) } Err(e) => { print!( "{}{}: &[{}; {}] = {}", indent, field.name, field.desc.name(), count, e, ); } } } } StructFieldLocation::InlineDelimitedList { offset } => { let mut addr = addr + offset; println!( "{}(0x{:08x}) {}: [{}; N] = [", indent, addr.0, field.name, field.desc.name(), ); loop { print!("{} (0x{:08x}) ", indent, addr.0); dump(vrom, segment_table, field.desc, addr, indent_level + 1); println!(); if (field .desc .is_end() .expect("inline delimited list element has no is_end"))( vrom, addr ) { break; } addr += field .desc .size() .expect("inline delimited list element has no size"); } println!("{}]", indent) } } }
match desc.values.binary_search_by_key(&value, |&(x, _)| x) { Ok(index) => print!("{}", desc.values[index].1), Err(_) => print!("(unknown value 0x{:x}", value), }
if_condition
[ { "content": "pub fn is_end<T>(vrom: Vrom<'_>, addr: VromAddr) -> bool\n\nwhere\n\n T: FromVrom + Layout + Sentinel,\n\n{\n\n match T::from_vrom(vrom, addr) {\n\n Ok(value) => value.is_end(vrom),\n\n Err(_) => true,\n\n }\n\n}\n", "file_path": "oot-explorer-read/src/sentinel.rs", "rank": 3, "score": 226303.95946145486 }, { "content": "pub fn aligned_data<T: Layout>(vrom: Vrom<'_>, addr: VromAddr) -> Result<&[u8], ReadError> {\n\n check_alignment::<T>(addr)?;\n\n Ok(vrom.slice(addr..addr + T::SIZE)?)\n\n}\n", "file_path": "oot-explorer-read/src/layout.rs", "rank": 4, "score": 214071.1083290586 }, { "content": "pub fn check_alignment<T: Layout>(addr: VromAddr) -> Result<(), ReadError> {\n\n if addr.0.trailing_zeros() >= T::ALIGN_BITS {\n\n Ok(())\n\n } else {\n\n Err(ReadError::Misaligned {\n\n align_bits: T::ALIGN_BITS,\n\n addr,\n\n })\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-read/src/layout.rs", "rank": 5, "score": 202772.7386840657 }, { "content": "fn fetch_and_display<T>(vrom: Vrom<'_>, addr: VromAddr) -> Result<Option<String>, String>\n\nwhere\n\n T: Display + FromVrom + Layout,\n\n{\n\n match T::from_vrom(vrom, addr) {\n\n Ok(value) => Ok(Some(format!(\"{}\", value))),\n\n Err(_) => Err(format!(\"(inaccessible)\")),\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 6, "score": 199191.45792133675 }, { "content": "fn fetch_and_debug<T>(vrom: Vrom<'_>, addr: VromAddr) -> Result<Option<String>, String>\n\nwhere\n\n T: Debug + FromVrom + Layout,\n\n{\n\n match T::from_vrom(vrom, addr) {\n\n Ok(value) => Ok(Some(format!(\"{:?}\", value))),\n\n Err(_) => Err(format!(\"(inaccessible)\")),\n\n }\n\n}\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 7, "score": 199191.45792133675 }, { "content": "pub fn reflect_field(\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n base_addr: VromAddr,\n\n field_name: Option<String>,\n\n location: &StructFieldLocation,\n\n desc: TypeDescriptor,\n\n) -> ReflectResult {\n\n // Format the fully decorated type name.\n\n let type_string = desc.name().to_string()\n\n + match location {\n\n StructFieldLocation::Simple { .. } => \"\",\n\n StructFieldLocation::Slice { .. } => \"[]*\",\n\n StructFieldLocation::InlineDelimitedList { .. } => \"[..]\",\n\n };\n\n\n\n let vrom_range = get_field_vrom_range(base_addr, &location, desc);\n\n let value_string = field_value_string(vrom, segment_table, base_addr, &location, desc);\n\n let contents = contents(vrom, segment_table, base_addr, &location, desc);\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 8, "score": 194580.98599615865 }, { "content": "pub fn decompressed_size(mut data: &[u8]) -> io::Result<u32> {\n\n // Skip magic word.\n\n data = &data[4..];\n\n\n\n data.read_u32::<BigEndian>()\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum DecompressError {\n\n #[error(\"{0}\")]\n\n IoError(#[from] io::Error),\n\n\n\n #[error(\"bad magic word: {0:?}\")]\n\n BadMagicWord([u8; 3]),\n\n\n\n #[error(\"bad decompressed size: tagged size {tagged}, actual size {actual}\")]\n\n BadDecompressedSize { tagged: usize, actual: usize },\n\n}\n\n\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 10, "score": 171696.67260204488 }, { "content": "fn get_field_vrom_range(\n\n base_addr: VromAddr,\n\n location: &StructFieldLocation,\n\n desc: TypeDescriptor,\n\n) -> Range<VromAddr> {\n\n let (offset, known_size) = match location {\n\n StructFieldLocation::Simple { offset } => (*offset, None),\n\n StructFieldLocation::Slice { ptr_offset, .. } => (*ptr_offset, Some(SegmentAddr::SIZE)),\n\n StructFieldLocation::InlineDelimitedList { offset } => (*offset, None),\n\n };\n\n\n\n let addr = base_addr + offset;\n\n match known_size.or_else(|| desc.size()) {\n\n Some(size) => addr..addr + size,\n\n None => addr..addr + 1,\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 11, "score": 171607.5007491081 }, { "content": "pub fn decompress<R>(mut r: R) -> Result<Vec<u8>, DecompressError>\n\nwhere\n\n R: Read,\n\n{\n\n // Verify header.\n\n {\n\n let mut magic = [0; 4];\n\n r.read_exact(&mut magic)?;\n\n // Don't verify the fourth byte, which varies but doesn't seem to affect this\n\n // algorithm's ability to decompress the stream.\n\n if &magic[0..3] != \"Yaz\".as_bytes() {\n\n return Err(DecompressError::BadMagicWord(\n\n magic[0..3].try_into().unwrap(),\n\n ));\n\n }\n\n }\n\n\n\n let decompressed_size = r.read_u32::<BigEndian>()? as usize;\n\n\n\n // Skip padding.\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 12, "score": 164099.70467870106 }, { "content": "pub fn decompress(\n\n rom: Rom<'_>,\n\n file_table_addr: RomAddr,\n\n) -> Result<(FileTable, OwnedVrom), DecompressError> {\n\n let mut entry_addr = file_table_addr;\n\n let mut file_ranges = vec![];\n\n let mut vrom = vec![];\n\n loop {\n\n // Locate the table entry.\n\n let entry = FileSystemTableEntry::from_rom(rom, entry_addr)?;\n\n if entry.is_end() {\n\n break;\n\n }\n\n\n\n // Record the file's VROM range.\n\n file_ranges.push(entry.virtual_range());\n\n\n\n if entry.is_present() {\n\n // Grow the VROM buffer if needed.\n\n let start = entry.virtual_start.0 as usize;\n", "file_path": "oot-explorer-vrom/src/lib.rs", "rank": 13, "score": 162101.00826537464 }, { "content": "fn dump_texture(vrom: Vrom<'_>, texture: &TextureDescriptor) {\n\n let src = texture.source.src().unwrap();\n\n\n\n let decoded_texture = match oot_explorer_gl::texture::decode(vrom, texture) {\n\n Ok(decoded_texture) => decoded_texture,\n\n Err(e) => {\n\n eprintln!(\"WARNING: failed to decode texture {:?}: {}\", src, e);\n\n return;\n\n }\n\n };\n\n\n\n let mut file = {\n\n let mut path = PathBuf::from(\"./textures\");\n\n let hash = {\n\n let mut hasher = DefaultHasher::new();\n\n texture.hash(&mut hasher);\n\n hasher.finish()\n\n };\n\n path.push(format!(\"0x{:08x}_0x{:016x}.png\", src.0, hash));\n\n BufWriter::new(File::create(path).unwrap())\n", "file_path": "oot-explorer-demo/src/main.rs", "rank": 14, "score": 152629.16851568708 }, { "content": "fn field_value_string(\n\n vrom: Vrom<'_>,\n\n _segment_table: &SegmentTable,\n\n base_addr: VromAddr,\n\n location: &StructFieldLocation,\n\n desc: TypeDescriptor,\n\n) -> Option<String> {\n\n let fallible_result = (|| match location {\n\n StructFieldLocation::Simple { offset } => {\n\n let field_addr = base_addr + *offset;\n\n match desc {\n\n TypeDescriptor::Enum(enum_desc) => {\n\n let value = enum_desc\n\n .read_as_u32(vrom, field_addr)\n\n .map_err(|_| format!(\"(inaccessible)\"))?;\n\n let index = enum_desc\n\n .values\n\n .binary_search_by_key(&value, |&(x, _)| x)\n\n .map_err(|_| format!(\"(unknown value 0x{:x})\", value))?;\n\n Ok(Some(format!(\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 15, "score": 147736.34493471828 }, { "content": "fn add_field_infos_for_fields(\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n desc: TypeDescriptor,\n\n addr: VromAddr,\n\n field_infos: &mut Vec<ReflectFieldInfo>,\n\n) {\n\n match desc {\n\n TypeDescriptor::Struct(desc) => {\n\n field_infos.extend(desc.fields.iter().map(|field| ReflectFieldInfo {\n\n name: Some(field.name.to_string()),\n\n base_addr: addr,\n\n location: field.location.clone(),\n\n desc: field.desc,\n\n }))\n\n }\n\n\n\n TypeDescriptor::Union(union_desc) => {\n\n // Add an item for the discriminant.\n\n field_infos.push(ReflectFieldInfo {\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 16, "score": 145501.14790030257 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main() {\n\n std::panic::set_hook(Box::new(console_error_panic_hook::hook));\n\n}\n\n\n\n#[wasm_bindgen]\n\npub struct Context {\n\n inner: Arc<Mutex<InnerContext>>,\n\n}\n\npub struct InnerContext {\n\n gl: WebGl2RenderingContext,\n\n rom: OwnedRom,\n\n file_table: Option<FileTable>,\n\n vrom: Option<OwnedVrom>,\n\n texture_cache: TextureCache,\n\n sampler_cache: SamplerCache,\n\n}\n\n\n", "file_path": "oot-explorer-web/src/lib.rs", "rank": 18, "score": 123350.02488370964 }, { "content": "pub fn compress(data: &[u8], max_effort: MaxEffort) -> Vec<u8> {\n\n let mut dict: HashMap<&[u8], Vec<Range<usize>>> = HashMap::new();\n\n let mut result = CodeVector::new(data.len() as u32);\n\n let mut last_pos = 0;\n\n let mut pos = 0;\n\n while pos < data.len() {\n\n // Remove potential matches that have left the sliding window.\n\n for remove_pos in last_pos..pos {\n\n let begin = remove_pos.wrapping_sub(Match::MAX_DISTANCE as usize);\n\n let end = begin.wrapping_add(3);\n\n if begin < end && end <= data.len() {\n\n if let Some(bucket_list) = dict.get_mut(&data[begin..end]) {\n\n // The oldest entry will always be the first one in its bucket's list.\n\n if bucket_list.len() > 0 {\n\n bucket_list.remove(0);\n\n }\n\n }\n\n }\n\n }\n\n\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 19, "score": 123107.18829143232 }, { "content": "pub fn decode<'a>(\n\n vrom: Vrom<'a>,\n\n texture: &TextureDescriptor,\n\n) -> Result<DecodedTexture, DecodeError> {\n\n let (src_ptr, src_format, src_depth, load_dxt, load_format, load_depth, load_len) =\n\n get_texture_source_and_load_information(texture)?;\n\n\n\n // Format conversion during load is not implemented.\n\n assert_eq!(src_format, load_format);\n\n assert_eq!(src_depth, load_depth);\n\n\n\n let rgba16: &dyn TexelDecoder = &Rgba16TexelDecoder;\n\n let ci4_rgba: &dyn TexelDecoder = &Ci4TexelDecoder(RgbaPaletteDecoder);\n\n let ci4_ia: &dyn TexelDecoder = &Ci4TexelDecoder(IaPaletteDecoder);\n\n let ci8_rgba: &dyn TexelDecoder = &Ci8TexelDecoder(RgbaPaletteDecoder);\n\n let ci8_ia: &dyn TexelDecoder = &Ci8TexelDecoder(IaPaletteDecoder);\n\n let ia4: &dyn TexelDecoder = &Ia4TexelDecoder;\n\n let ia8: &dyn TexelDecoder = &Ia8TexelDecoder;\n\n let ia16: &dyn TexelDecoder = &Ia16TexelDecoder;\n\n let i4: &dyn TexelDecoder = &I4TexelDecoder;\n", "file_path": "oot-explorer-gl/src/texture.rs", "rank": 20, "score": 117827.48281419245 }, { "content": "pub fn get_palette_data<'a>(\n\n vrom: Vrom<'a>,\n\n texture: &TextureDescriptor,\n\n entry_range: Range<u32>,\n\n) -> Result<&'a [u8], DecodeError> {\n\n let source = match texture.palette_source {\n\n PaletteSource::None => {\n\n unreachable!(\"BUG: this should always be set for color-indexed formats\")\n\n }\n\n PaletteSource::Rgba(ref source) => Ok(source),\n\n PaletteSource::Ia(ref source) => Ok(source),\n\n }?;\n\n match source {\n\n &TmemSource::LoadBlock { .. } => Err(DecodeError::UnexpectedPaletteSource),\n\n &TmemSource::LoadTlut { ptr, count } => {\n\n assert!(count as u32 >= entry_range.end);\n\n vrom.slice((ptr + 2 * entry_range.start)..(ptr + 2 * entry_range.end))\n\n .map_err(|e| DecodeError::InaccessiblePalette(e))\n\n }\n\n &TmemSource::Undefined => Err(DecodeError::UndefinedPalette),\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-gl/src/texture.rs", "rank": 21, "score": 114350.48131972116 }, { "content": "pub fn get_texel_data<'a>(\n\n vrom: Vrom<'a>,\n\n texture: &TextureDescriptor,\n\n src_ptr: VromAddr,\n\n load_len: u32,\n\n) -> Result<(&'a [u8], usize), DecodeError> {\n\n let expected_len = (8 * texture.render_width\n\n / texture.render_depth.texels_per_tmem_word::<usize>()\n\n + 8 * (texture.render_height - 1) * texture.render_stride) as u32;\n\n if load_len < expected_len {\n\n return Err(DecodeError::UnderflowedTexels {\n\n want: expected_len,\n\n got: load_len,\n\n });\n\n }\n\n\n\n let src = vrom\n\n .slice(src_ptr..src_ptr + expected_len)\n\n .map_err(|e| DecodeError::InaccessibleTexels(e))?;\n\n\n\n let stride_bytes = 8 * texture.render_stride;\n\n\n\n Ok((src, stride_bytes))\n\n}\n\n\n", "file_path": "oot-explorer-gl/src/texture.rs", "rank": 22, "score": 114350.48131972116 }, { "content": "/// Proxy types that wrap a VROM address.\n\npub trait VromProxy: FromVrom {\n\n fn addr(&self) -> VromAddr;\n\n}\n", "file_path": "oot-explorer-read/src/vrom_proxy.rs", "rank": 23, "score": 107610.83919004376 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct Match {\n\n pub distance: u16,\n\n pub length: u16,\n\n}\n\nimpl Match {\n\n pub const MIN_DISTANCE: u16 = 1;\n\n pub const MAX_DISTANCE: u16 = 0x1000;\n\n pub const MIN_LENGTH: u16 = 3;\n\n pub const MAX_LENGTH: u16 = 255 + 18;\n\n\n\n pub fn read<R>(mut r: R) -> io::Result<Match>\n\n where\n\n R: Read,\n\n {\n\n let word = r.read_u16::<BigEndian>()?;\n\n let distance = (word & 0xfff) + 1;\n\n let n = (word >> 12) & 0xf;\n\n let length = if n > 0 {\n\n // Short length.\n\n n + 2\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 24, "score": 107266.91755298022 }, { "content": "/// Types that can be constructed with VROM data and an address.\n\npub trait FromVrom: Sized {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError>;\n\n}\n\n\n\nimpl FromVrom for bool {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(u8::from_vrom(vrom, addr)? != 0)\n\n }\n\n}\n\n\n\nimpl FromVrom for u8 {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(vrom.slice(addr..addr + 1)?.read_u8().unwrap())\n\n }\n\n}\n\n\n\nimpl FromVrom for i8 {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(vrom.slice(addr..addr + 1)?.read_i8().unwrap())\n\n }\n", "file_path": "oot-explorer-read/src/from_vrom.rs", "rank": 25, "score": 105923.04422549822 }, { "content": "fn contents(\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n base_addr: VromAddr,\n\n location: &StructFieldLocation,\n\n desc: TypeDescriptor,\n\n) -> Vec<ReflectFieldInfo> {\n\n match location {\n\n StructFieldLocation::Simple { offset } => {\n\n // This instance represents a simple field. Dig in and add any of its fields.\n\n let mut field_infos = vec![];\n\n add_field_infos_for_fields(\n\n vrom,\n\n segment_table,\n\n desc,\n\n base_addr + *offset,\n\n &mut field_infos,\n\n );\n\n field_infos\n\n }\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 26, "score": 103867.61529996162 }, { "content": "pub fn opaque_key(params: &TextureParams) -> u32 {\n\n let mut hasher = DefaultHasher::new();\n\n params.s.hash(&mut hasher);\n\n params.t.hash(&mut hasher);\n\n let hash = hasher.finish();\n\n ((hash >> 32) ^ hash) as u32\n\n}\n\n\n", "file_path": "oot-explorer-web/src/sampler_cache.rs", "rank": 31, "score": 100442.24570709848 }, { "content": "pub fn opaque_key(descriptor: &TextureDescriptor) -> u32 {\n\n let mut hasher = DefaultHasher::new();\n\n descriptor.hash(&mut hasher);\n\n let hash = hasher.finish();\n\n ((hash >> 32) ^ hash) as u32\n\n}\n\n\n", "file_path": "oot-explorer-web/src/texture_cache.rs", "rank": 32, "score": 100442.24570709848 }, { "content": "/// Applies both layers of TMEM word swapping. One is performed by the LoadBlock command based on\n\n/// load_dxt and the word offset. The other is performed by the RDP based on y. These two swaps may\n\n/// cancel out.\n\nfn word_swap(offset: usize, load_dxt: Qu1_11, render_y: usize) -> usize {\n\n let load_line = ((offset / 8) * (load_dxt.0 as usize)) >> 11;\n\n let load_swap = load_line & 1 == 1;\n\n\n\n let render_swap = render_y & 1 == 1;\n\n\n\n if load_swap != render_swap {\n\n offset ^ 0x4\n\n } else {\n\n offset\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-gl/src/texture.rs", "rank": 33, "score": 93988.77272158 }, { "content": "pub trait ToExpr<T: expr::ValueType> {\n\n fn to_expr(&self, ctx: &mut expr::Context<T>, cycle: Cycle) -> expr::Key;\n\n}\n\n\n\nimpl ToExpr<GlslVec3Constant> for ColorCombine {\n\n fn to_expr(&self, ctx: &mut expr::Context<GlslVec3Constant>, cycle: Cycle) -> expr::Key {\n\n let a = self.a.to_expr(ctx, cycle);\n\n let b = self.b.to_expr(ctx, cycle);\n\n let neg_b = ctx.neg(b);\n\n let sum = ctx.add(vec![a, neg_b]);\n\n let c = self.c.to_expr(ctx, cycle);\n\n let product = ctx.mul(vec![sum, c]);\n\n let d = self.d.to_expr(ctx, cycle);\n\n ctx.add(vec![product, d])\n\n }\n\n}\n\n\n\nimpl ToExpr<GlslFloatConstant> for AlphaCombine {\n\n fn to_expr(&self, ctx: &mut expr::Context<GlslFloatConstant>, cycle: Cycle) -> expr::Key {\n\n let a = self.a.to_expr(ctx, cycle);\n", "file_path": "oot-explorer-gl/src/to_expr.rs", "rank": 34, "score": 87658.4117552942 }, { "content": "use std::fmt::{self, Debug};\n\nuse std::ops::{Add, AddAssign, Sub, SubAssign};\n\n\n\nuse crate::VromError;\n\n\n\n/// An address in VROM.\n\n#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub struct VromAddr(pub u32);\n\n\n\nimpl VromAddr {\n\n pub fn checked_add(self, offset: u32) -> Result<VromAddr, VromError> {\n\n match self.0.checked_add(offset) {\n\n Some(result) => Ok(VromAddr(result)),\n\n None => Err(VromError::VromAddrOverflow { addr: self, offset }),\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for VromAddr {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "oot-explorer-vrom/src/addr.rs", "rank": 35, "score": 81684.84071160576 }, { "content": " write!(f, \"VromAddr(0x{:08x})\", self.0)\n\n }\n\n}\n\n\n\nimpl Add<u32> for VromAddr {\n\n type Output = VromAddr;\n\n\n\n fn add(self, rhs: u32) -> VromAddr {\n\n VromAddr(self.0 + rhs)\n\n }\n\n}\n\n\n\nimpl AddAssign<u32> for VromAddr {\n\n fn add_assign(&mut self, rhs: u32) {\n\n self.0 += rhs;\n\n }\n\n}\n\n\n\nimpl Sub<VromAddr> for VromAddr {\n\n type Output = u32;\n", "file_path": "oot-explorer-vrom/src/addr.rs", "rank": 36, "score": 81674.20919401564 }, { "content": "\n\n fn sub(self, rhs: VromAddr) -> u32 {\n\n self.0 - rhs.0\n\n }\n\n}\n\n\n\nimpl Sub<u32> for VromAddr {\n\n type Output = VromAddr;\n\n\n\n fn sub(self, rhs: u32) -> VromAddr {\n\n VromAddr(self.0 - rhs)\n\n }\n\n}\n\n\n\nimpl SubAssign<u32> for VromAddr {\n\n fn sub_assign(&mut self, rhs: u32) {\n\n self.0 -= rhs;\n\n }\n\n}\n", "file_path": "oot-explorer-vrom/src/addr.rs", "rank": 37, "score": 81674.10534119314 }, { "content": "pub fn get_scene_table(file_table: &FileTable) -> Result<Slice<SceneTableEntry>, GetFileError> {\n\n Ok(Slice::new(\n\n file_table.file_vrom_range(SCENE_TABLE_FILE_INDEX)?.start + SCENE_TABLE_OFFSET,\n\n SCENE_TABLE_COUNT,\n\n ))\n\n}\n\n\n\ncompile_interfaces! {\n\n #[layout(size = 0x14, align_bits = 2)]\n\n struct SceneTableEntry {\n\n VromAddr scene_start @0;\n\n VromAddr scene_end @4;\n\n VromAddr raw_title_card_start @8;\n\n VromAddr raw_title_card_end @0xc;\n\n u8 unknown_a @0x10;\n\n u8 render_init_function @0x11;\n\n u8 unknown_b @0x12;\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-game-data/src/versions/oot_ntsc_10.rs", "rank": 38, "score": 80588.9111505695 }, { "content": "pub trait ValueType: Clone + Display + Eq + Hash + One + Zero {}\n\nimpl<T> ValueType for T where T: Clone + Display + Eq + Hash + One + Zero {}\n\n\n\n#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub struct Key(DefaultKey);\n\n\n\npub struct Context<T>\n\nwhere\n\n T: ValueType,\n\n{\n\n storage: DenseSlotMap<DefaultKey, Expr<T>>,\n\n index: HashMap<Expr<T>, DefaultKey>,\n\n}\n\nimpl<T> Context<T>\n\nwhere\n\n T: ValueType,\n\n{\n\n pub fn new() -> Context<T> {\n\n Context {\n\n storage: DenseSlotMap::new(),\n", "file_path": "oot-explorer-expr/src/lib.rs", "rank": 39, "score": 79663.45963492975 }, { "content": "fn main() {\n\n // Load and decompress the game data. Put the results in an Arc to share with worker threads.\n\n let (file_table, vrom) = decompress(\n\n Rom(&std::fs::read(\"Legend of Zelda, The - Ocarina of Time (U) (V1.0) [!].z64\").unwrap()),\n\n oot_ntsc_10::FILE_TABLE_ROM_ADDR,\n\n )\n\n .unwrap();\n\n let ctx = Arc::new(Context { file_table, vrom });\n\n\n\n // A channel for the main thread to send work to the worker threads.\n\n let (sender, receiver) = crossbeam::channel::bounded(0);\n\n\n\n // Spawn worker threads to dump textures.\n\n let mut join_handles = vec![];\n\n for _ in 0..8 {\n\n let ctx = Arc::clone(&ctx);\n\n let receiver = receiver.clone();\n\n join_handles.push(std::thread::spawn(move || {\n\n while let Ok(texture) = receiver.recv() {\n\n dump_texture(ctx.vrom.borrow(), &texture);\n", "file_path": "oot-explorer-demo/src/main.rs", "rank": 40, "score": 65793.64437020641 }, { "content": "fn examine_scene(\n\n file_table: &FileTable,\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n dlist_interp: &mut DisplayListInterpreter,\n\n scene_index: usize,\n\n scene: Scene,\n\n) {\n\n let segment_table = segment_table.with(Segment::SCENE, scene.addr());\n\n\n\n reflect_text::dump(vrom, &segment_table, SCENE_DESC, scene.addr(), 0);\n\n println!();\n\n\n\n for result in scene.headers(vrom) {\n\n let header = result.unwrap();\n\n match header.variant(vrom) {\n\n SceneHeaderVariant::RoomList(header) => {\n\n for (room_index, room_list_entry) in header\n\n .room_list(vrom, &segment_table)\n\n .unwrap()\n", "file_path": "oot-explorer-demo/src/main.rs", "rank": 41, "score": 64864.48373159501 }, { "content": "fn examine_room(\n\n file_table: &FileTable,\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n dlist_interp: &mut DisplayListInterpreter,\n\n scene_index: usize,\n\n room_index: usize,\n\n room: Room,\n\n) {\n\n let segment_table = segment_table.with(Segment::ROOM, room.addr());\n\n\n\n reflect_text::dump(vrom, &segment_table, ROOM_DESC, room.addr(), 0);\n\n println!();\n\n\n\n for result in room.headers(vrom) {\n\n let header = result.unwrap();\n\n match header.variant(vrom) {\n\n RoomHeaderVariant::Mesh(header) => {\n\n enumerate_meshes(\n\n vrom,\n", "file_path": "oot-explorer-demo/src/main.rs", "rank": 42, "score": 64864.48373159501 }, { "content": "fn examine_room(\n\n file_table: &FileTable,\n\n vrom: Vrom<'_>,\n\n scene: Scene,\n\n room: Room,\n\n dlist_interp: &mut DisplayListInterpreter,\n\n backgrounds: &mut Vec<String>,\n\n) {\n\n let cpu_ctx = SegmentTable::new()\n\n .with(Segment::SCENE, scene.addr())\n\n .with(Segment::ROOM, room.addr());\n\n let rsp_ctx = {\n\n let ctx = cpu_ctx.clone();\n\n\n\n // const ICON_ITEM_STATIC: usize = 8;\n\n // ctx.set(\n\n // Segment(8),\n\n // fs.get_file(scope, ICON_ITEM_STATIC),\n\n // fs.metadata(ICON_ITEM_STATIC).virtual_range(),\n\n // );\n", "file_path": "oot-explorer-web/src/lib.rs", "rank": 43, "score": 64864.48373159501 }, { "content": "fn make_row(\n\n document: &Document,\n\n data: &[u8],\n\n addr: VromAddr,\n\n markings: &[(Marking, Range<VromAddr>)],\n\n) -> HtmlElement {\n\n let element = html_template!(document, return div[class=\"hexdump-row\"] {});\n\n\n\n // Start with the address.\n\n let mut text = format!(\"{:08x}\", addr.0);\n\n\n\n // This function flushes `text` into the DOM, applying styles for marking as needed.\n\n let flush = |text: &mut String, marking| {\n\n if !text.is_empty() {\n\n match marking {\n\n Marking::None => {\n\n html_template!(document, in element: text(&text));\n\n }\n\n Marking::Selection => {\n\n html_template!(document, in element:\n", "file_path": "oot-explorer-web/src/hexdump.rs", "rank": 44, "score": 64864.48373159501 }, { "content": "fn examine_scene(\n\n file_table: &FileTable,\n\n vrom: Vrom<'_>,\n\n scene: Scene,\n\n dlist_interp: &mut DisplayListInterpreter,\n\n backgrounds: &mut Vec<String>,\n\n) -> Option<[f64; 5]> {\n\n let segment_table = SegmentTable::new().with(Segment::SCENE, scene.addr());\n\n let mut start_pos = None;\n\n for result in scene.headers(vrom) {\n\n let header = result.unwrap_throw();\n\n match header.variant(vrom) {\n\n SceneHeaderVariant::StartPositions(header) => {\n\n start_pos = header\n\n .start_positions(vrom, &segment_table)\n\n .unwrap_throw()\n\n .iter(vrom)\n\n .next()\n\n .map(|result| {\n\n let actor = result.unwrap_throw();\n", "file_path": "oot-explorer-web/src/lib.rs", "rank": 45, "score": 64864.48373159501 }, { "content": "fn get_texture_source_and_load_information(\n\n texture: &TextureDescriptor,\n\n) -> Result<\n\n (\n\n VromAddr,\n\n TextureFormat,\n\n TextureDepth,\n\n Qu1_11,\n\n TextureFormat,\n\n TextureDepth,\n\n u32,\n\n ),\n\n DecodeError,\n\n> {\n\n match texture.source {\n\n TmemSource::LoadBlock {\n\n src_ptr,\n\n src_format,\n\n src_depth,\n\n load_dxt,\n", "file_path": "oot-explorer-gl/src/texture.rs", "rank": 46, "score": 62320.601507256695 }, { "content": "/// Types that read a value of statically known size and alignment.\n\npub trait Layout {\n\n const SIZE: u32;\n\n const ALIGN_BITS: u32 = Self::SIZE.trailing_zeros();\n\n}\n\n\n\nimpl Layout for bool {\n\n const SIZE: u32 = 1;\n\n}\n\n\n\nimpl Layout for u8 {\n\n const SIZE: u32 = 1;\n\n}\n\n\n\nimpl Layout for i8 {\n\n const SIZE: u32 = 1;\n\n}\n\n\n\nimpl Layout for u16 {\n\n const SIZE: u32 = 2;\n\n}\n", "file_path": "oot-explorer-read/src/layout.rs", "rank": 47, "score": 62091.66726116919 }, { "content": "/// Types with sentinel values that may end a list.\n\npub trait Sentinel {\n\n const ITER_YIELDS_SENTINEL_VALUE: bool;\n\n\n\n fn is_end(&self, vrom: Vrom<'_>) -> bool;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\n#[must_use = \"iterators are lazy and do nothing unless consumed\"]\n\npub struct SentinelIter<'a, T> {\n\n vrom: Vrom<'a>,\n\n addr: Option<VromAddr>,\n\n _phantom_t: PhantomData<fn() -> T>,\n\n}\n\n\n\nimpl<'a, T> SentinelIter<'a, T>\n\nwhere\n\n T: FromVrom + Layout,\n\n{\n\n pub fn new(vrom: Vrom<'a>, addr: VromAddr) -> Self {\n\n Self {\n", "file_path": "oot-explorer-read/src/sentinel.rs", "rank": 48, "score": 62087.39255531781 }, { "content": "pub trait MeshEntry {\n\n fn opaque_display_list(\n\n self,\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n ) -> Result<Option<DisplayList>, ReadError>;\n\n fn translucent_display_list(\n\n self,\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n ) -> Result<Option<DisplayList>, ReadError>;\n\n}\n\n\n\nimpl MeshEntry for SimpleMeshEntry {\n\n // TODO: Guarded getters in codegen.\n\n\n\n fn opaque_display_list(\n\n self,\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n", "file_path": "oot-explorer-game-data/src/mesh.rs", "rank": 49, "score": 60339.88151273012 }, { "content": "fn enumerate_mesh_entry<F>(\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n scene_index: usize,\n\n room_index: usize,\n\n entry: impl MeshEntry + Copy,\n\n mut f: F,\n\n) where\n\n F: FnMut(DisplayListOpacity, DisplayList),\n\n{\n\n match entry.opaque_display_list(vrom, segment_table) {\n\n Ok(Some(dlist)) => f(DisplayListOpacity::Opaque, dlist),\n\n Ok(None) => (),\n\n Err(e) => {\n\n eprintln!(\n\n \"scene {}, room {}: while resolving display list: {}\",\n\n scene_index, room_index, e,\n\n )\n\n }\n\n }\n", "file_path": "oot-explorer-demo/src/main.rs", "rank": 50, "score": 59533.14187650822 }, { "content": "fn enumerate_meshes<F, G>(\n\n vrom: Vrom<'_>,\n\n segment_table: &SegmentTable,\n\n scene_index: usize,\n\n room_index: usize,\n\n header: MeshHeader,\n\n mut f: F,\n\n mut g: G,\n\n) where\n\n F: FnMut(DisplayListOpacity, DisplayList),\n\n G: FnMut(Background),\n\n{\n\n match header.mesh(vrom, segment_table).unwrap().variant(vrom) {\n\n MeshVariant::Simple(mesh) => {\n\n for result in mesh.entries(vrom, segment_table).unwrap().iter(vrom) {\n\n enumerate_mesh_entry(\n\n vrom,\n\n segment_table,\n\n scene_index,\n\n room_index,\n", "file_path": "oot-explorer-demo/src/main.rs", "rank": 51, "score": 56808.61860576874 }, { "content": "fn rgb5a1_to_rgba8(x: u16) -> [u8; 4] {\n\n let expand_5_to_8 = |x| (x << 3) | (x >> 2);\n\n\n\n let r = expand_5_to_8(((x >> 11) & 0x1f) as u8);\n\n let g = expand_5_to_8(((x >> 6) & 0x1f) as u8);\n\n let b = expand_5_to_8(((x >> 1) & 0x1f) as u8);\n\n let a = if x & 0x01 == 0x01 { 0xff } else { 0x00 };\n\n [r, g, b, a]\n\n}\n\n\n", "file_path": "oot-explorer-gl/src/texture.rs", "rank": 52, "score": 52955.71950135807 }, { "content": "fn create_gl_sampler(gl: &WebGl2RenderingContext, _params: &TextureParams) -> WebGlSampler {\n\n let sampler = gl.create_sampler().unwrap_throw();\n\n gl.sampler_parameteri(&sampler, Gl::TEXTURE_MAG_FILTER, Gl::NEAREST as i32);\n\n gl.sampler_parameteri(&sampler, Gl::TEXTURE_MIN_FILTER, Gl::NEAREST as i32);\n\n\n\n gl.sampler_parameteri(&sampler, Gl::TEXTURE_WRAP_S, Gl::CLAMP_TO_EDGE as i32);\n\n gl.sampler_parameteri(&sampler, Gl::TEXTURE_WRAP_T, Gl::CLAMP_TO_EDGE as i32);\n\n\n\n sampler\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct SamplerCache {\n\n map: HashMap<u32, WebGlSampler>,\n\n}\n\n\n\nimpl SamplerCache {\n\n pub fn new() -> SamplerCache {\n\n SamplerCache::default()\n\n }\n", "file_path": "oot-explorer-web/src/sampler_cache.rs", "rank": 53, "score": 44230.2286435997 }, { "content": "fn create_gl_texture(gl: &WebGl2RenderingContext, decoded: DecodedTexture) -> WebGlTexture {\n\n let texture = gl.create_texture().unwrap_throw();\n\n gl.bind_texture(Gl::TEXTURE_2D, Some(&texture));\n\n\n\n gl.tex_storage_2d(\n\n Gl::TEXTURE_2D,\n\n 1,\n\n Gl::RGBA8,\n\n decoded.width as i32,\n\n decoded.height as i32,\n\n );\n\n gl.tex_sub_image_2d_with_i32_and_i32_and_u32_and_type_and_opt_u8_array(\n\n Gl::TEXTURE_2D,\n\n 0,\n\n 0,\n\n 0,\n\n decoded.width as i32,\n\n decoded.height as i32,\n\n Gl::RGBA,\n\n Gl::UNSIGNED_BYTE,\n", "file_path": "oot-explorer-web/src/texture_cache.rs", "rank": 54, "score": 44230.2286435997 }, { "content": "use std::fmt::{self, Debug, Formatter};\n\n\n\nuse crate::Segment;\n\n\n\n/// A segmented address.\n\n#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub struct SegmentAddr(pub u32);\n\n\n\nimpl Debug for SegmentAddr {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"SegmentAddr({:?}, 0x{:06x})\",\n\n self.segment(),\n\n self.offset(),\n\n )\n\n }\n\n}\n\n\n\nimpl SegmentAddr {\n", "file_path": "oot-explorer-segment/src/addr.rs", "rank": 55, "score": 41024.19915399711 }, { "content": "use std::fmt::{self, Debug, Formatter};\n\nuse std::ops::{Add, AddAssign, Sub, SubAssign};\n\n\n\n/// An address in ROM.\n\n#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub struct RomAddr(pub u32);\n\n\n\nimpl Debug for RomAddr {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"RomAddr(0x{:08x})\", self.0)\n\n }\n\n}\n\n\n\nimpl Add<u32> for RomAddr {\n\n type Output = RomAddr;\n\n\n\n fn add(self, rhs: u32) -> RomAddr {\n\n RomAddr(self.0 + rhs)\n\n }\n\n}\n", "file_path": "oot-explorer-rom/src/addr.rs", "rank": 56, "score": 41023.375343424144 }, { "content": "\n\nimpl AddAssign<u32> for RomAddr {\n\n fn add_assign(&mut self, rhs: u32) {\n\n self.0 += rhs;\n\n }\n\n}\n\n\n\nimpl Sub<RomAddr> for RomAddr {\n\n type Output = u32;\n\n\n\n fn sub(self, rhs: RomAddr) -> u32 {\n\n self.0 - rhs.0\n\n }\n\n}\n\n\n\nimpl Sub<u32> for RomAddr {\n\n type Output = RomAddr;\n\n\n\n fn sub(self, rhs: u32) -> RomAddr {\n\n RomAddr(self.0 - rhs)\n\n }\n\n}\n\n\n\nimpl SubAssign<u32> for RomAddr {\n\n fn sub_assign(&mut self, rhs: u32) {\n\n self.0 -= rhs;\n\n }\n\n}\n", "file_path": "oot-explorer-rom/src/addr.rs", "rank": 57, "score": 41017.78019155775 }, { "content": " pub fn segment(self) -> Segment {\n\n Segment((self.0 >> 24) as u8)\n\n }\n\n\n\n pub fn offset(self) -> u32 {\n\n self.0 & 0x00ff_ffff\n\n }\n\n\n\n pub fn is_null(self) -> bool {\n\n self.0 == 0\n\n }\n\n\n\n pub fn non_null(self) -> Option<SegmentAddr> {\n\n if self.0 == 0 {\n\n None\n\n } else {\n\n Some(self)\n\n }\n\n }\n\n}\n", "file_path": "oot-explorer-segment/src/addr.rs", "rank": 58, "score": 41017.53441052455 }, { "content": "use std::fmt::{self, Debug, Formatter};\n\nuse std::ops::{Deref, Range};\n\n\n\nuse crate::{VromAddr, VromError};\n\n\n\n/// A slice representing all of VROM.\n\n#[derive(Clone, Copy)]\n\npub struct Vrom<'a>(pub &'a [u8]);\n\n\n\nimpl<'a> Vrom<'a> {\n\n pub fn slice_from(self, from: VromAddr) -> Result<&'a [u8], VromError> {\n\n self.0\n\n .get(from.0 as usize..)\n\n .ok_or_else(|| VromError::OutOfRange {\n\n from: Some(from),\n\n to: None,\n\n vrom_size: self.0.len() as u32,\n\n })\n\n }\n\n\n", "file_path": "oot-explorer-vrom/src/borrowed.rs", "rank": 59, "score": 40671.91458327264 }, { "content": "use oot_explorer_rom::{Rom, RomAddr, RomError};\n\nuse std::borrow::{Borrow, Cow};\n\nuse thiserror::Error;\n\n\n\nuse crate::file_system_table_entry::FileSystemTableEntry;\n\n\n\nmod addr;\n\nmod borrowed;\n\nmod error;\n\nmod file_system_table_entry;\n\nmod file_table;\n\nmod owned;\n\npub mod yaz;\n\n\n\npub use addr::VromAddr;\n\npub use borrowed::Vrom;\n\npub use error::VromError;\n\npub use file_table::{FileIndex, FileTable, GetFileError};\n\npub use owned::OwnedVrom;\n\n\n", "file_path": "oot-explorer-vrom/src/lib.rs", "rank": 60, "score": 40669.61414258895 }, { "content": "use thiserror::Error;\n\n\n\nuse crate::VromAddr;\n\n\n\n#[derive(Debug, Error)]\n\npub enum VromError {\n\n #[error(\"VROM access out of range: {from:?}..{to:?}, VROM size {vrom_size:08x}\")]\n\n OutOfRange {\n\n from: Option<VromAddr>,\n\n to: Option<VromAddr>,\n\n vrom_size: u32,\n\n },\n\n\n\n #[error(\"VROM address overflow: {addr:?} + {offset:08x}\")]\n\n VromAddrOverflow { addr: VromAddr, offset: u32 },\n\n}\n", "file_path": "oot-explorer-vrom/src/error.rs", "rank": 61, "score": 40668.73929214987 }, { "content": " pub fn slice_to(self, to: VromAddr) -> Result<&'a [u8], VromError> {\n\n self.0\n\n .get(..to.0 as usize)\n\n .ok_or_else(|| VromError::OutOfRange {\n\n from: None,\n\n to: Some(to),\n\n vrom_size: self.0.len() as u32,\n\n })\n\n }\n\n\n\n pub fn slice(self, range: Range<VromAddr>) -> Result<&'a [u8], VromError> {\n\n self.0\n\n .get(range.start.0 as usize..range.end.0 as usize)\n\n .ok_or_else(|| VromError::OutOfRange {\n\n from: Some(range.start),\n\n to: Some(range.end),\n\n vrom_size: self.0.len() as u32,\n\n })\n\n }\n\n}\n", "file_path": "oot-explorer-vrom/src/borrowed.rs", "rank": 62, "score": 40668.5961109566 }, { "content": "use byteorder::{BigEndian, ReadBytesExt};\n\nuse oot_explorer_rom::RomAddr;\n\nuse oot_explorer_segment::SegmentAddr;\n\nuse oot_explorer_vrom::{Vrom, VromAddr};\n\n\n\nuse crate::ReadError;\n\n\n\n/// Types that can be constructed with VROM data and an address.\n", "file_path": "oot-explorer-read/src/from_vrom.rs", "rank": 63, "score": 40666.04455866823 }, { "content": " } else {\n\n // Long length.\n\n (r.read_u8()? as u16) + 18\n\n };\n\n Ok(Match { distance, length })\n\n }\n\n\n\n pub fn write<W>(&self, mut w: W) -> io::Result<()>\n\n where\n\n W: Write,\n\n {\n\n if !(Match::MIN_DISTANCE..=Match::MAX_DISTANCE).contains(&self.distance) {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"match distance out of range\",\n\n ));\n\n }\n\n if !(Match::MIN_LENGTH..=Match::MAX_LENGTH).contains(&self.length) {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 64, "score": 40665.49720799154 }, { "content": "use crate::Vrom;\n\n\n\n/// A boxed slice representing all of VROM.\n\n///\n\n/// Function parameters should generally prefer the borrowed [`Vrom`].\n\npub struct OwnedVrom {\n\n vrom: Box<[u8]>,\n\n}\n\n\n\nimpl OwnedVrom {\n\n pub fn new(vrom: Box<[u8]>) -> Self {\n\n Self { vrom }\n\n }\n\n\n\n pub fn borrow(&self) -> Vrom<'_> {\n\n Vrom(&self.vrom)\n\n }\n\n}\n", "file_path": "oot-explorer-vrom/src/owned.rs", "rank": 65, "score": 40664.69895845623 }, { "content": " fn is_empty(&self) -> bool {\n\n self.flag_bits_remaining == 8\n\n }\n\n fn clear(&mut self) {\n\n self.buf.clear();\n\n self.buf.push(0);\n\n self.flag_bits_remaining = 8;\n\n }\n\n fn push(&mut self, code: Code) -> io::Result<usize> {\n\n if self.is_full() {\n\n return Err(io::Error::new(io::ErrorKind::Other, \"push while full\"));\n\n }\n\n self.flag_bits_remaining -= 1;\n\n match code {\n\n Code::Literal(x) => {\n\n self.buf[0] |= 1 << self.flag_bits_remaining;\n\n self.buf.push(x);\n\n Ok(1)\n\n }\n\n Code::Match(m) => {\n\n let len_before = self.buf.len();\n\n m.write(&mut self.buf)?;\n\n Ok(self.buf.len() - len_before)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 66, "score": 40664.682216877525 }, { "content": " for range in bucket_list.iter().rev() {\n\n let match_length = data[range.start..]\n\n .iter()\n\n .take(Match::MAX_LENGTH as usize)\n\n .zip(data[pos..].iter())\n\n .take_while(|(a, b)| a == b)\n\n .count();\n\n if match_length >= (Match::MIN_LENGTH as usize) {\n\n if match best_match {\n\n Some(m) => match_length > (m.length as usize),\n\n None => true,\n\n } {\n\n best_match = Some(Match {\n\n distance: (pos - range.start) as u16,\n\n length: match_length as u16,\n\n });\n\n }\n\n }\n\n }\n\n if let Some(m) = best_match {\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 67, "score": 40664.31478071162 }, { "content": " let end = entry.virtual_end.0 as usize;\n\n if vrom.len() < end {\n\n vrom.resize(end, 0x00);\n\n }\n\n\n\n // Retrieve the file's data.\n\n let file_data = if entry.is_compressed() {\n\n Cow::Owned(yaz::decompress(rom.slice(entry.physical_range())?)?)\n\n } else {\n\n Cow::Borrowed(rom.slice(\n\n entry.physical_start\n\n ..entry.physical_start + (entry.virtual_end - entry.virtual_start),\n\n )?)\n\n };\n\n\n\n // Copy the file into the VROM buffer.\n\n (&mut vrom[start..end]).copy_from_slice(file_data.borrow());\n\n }\n\n\n\n entry_addr += FileSystemTableEntry::SIZE;\n", "file_path": "oot-explorer-vrom/src/lib.rs", "rank": 68, "score": 40663.54080128414 }, { "content": " r.read_u32::<BigEndian>()?;\n\n r.read_u32::<BigEndian>()?;\n\n\n\n let mut result = Vec::with_capacity(decompressed_size);\n\n while result.len() < decompressed_size {\n\n let mut literal_flags = r.read_u8()?;\n\n for _bit in 0..8 {\n\n if (literal_flags & 0x80) == 0x80 {\n\n // Literal.\n\n result.push(r.read_u8()?);\n\n } else {\n\n // Match.\n\n let Match { distance, length } = Match::read(&mut r)?;\n\n let distance = distance as usize;\n\n for _ in 0..length {\n\n result.push(result[result.len() - distance]);\n\n }\n\n }\n\n if result.len() >= decompressed_size {\n\n break;\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 69, "score": 40662.75577686352 }, { "content": "impl FromVrom for i32 {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(vrom.slice(addr..addr + 4)?.read_i32::<BigEndian>().unwrap())\n\n }\n\n}\n\n\n\nimpl FromVrom for RomAddr {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(RomAddr(u32::from_vrom(vrom, addr)?))\n\n }\n\n}\n\n\n\nimpl FromVrom for VromAddr {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(VromAddr(u32::from_vrom(vrom, addr)?))\n\n }\n\n}\n\n\n\nimpl FromVrom for SegmentAddr {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(SegmentAddr(u32::from_vrom(vrom, addr)?))\n\n }\n\n}\n", "file_path": "oot-explorer-read/src/from_vrom.rs", "rank": 70, "score": 40662.668908489264 }, { "content": "}\n\n\n\nimpl FromVrom for u16 {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(vrom.slice(addr..addr + 2)?.read_u16::<BigEndian>().unwrap())\n\n }\n\n}\n\n\n\nimpl FromVrom for i16 {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(vrom.slice(addr..addr + 2)?.read_i16::<BigEndian>().unwrap())\n\n }\n\n}\n\n\n\nimpl FromVrom for u32 {\n\n fn from_vrom(vrom: Vrom<'_>, addr: VromAddr) -> Result<Self, ReadError> {\n\n Ok(vrom.slice(addr..addr + 4)?.read_u32::<BigEndian>().unwrap())\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-read/src/from_vrom.rs", "rank": 71, "score": 40662.402967920876 }, { "content": " }\n\n self.buf.push(code).unwrap()\n\n }\n\n fn into_vec(mut self) -> Vec<u8> {\n\n if !self.buf.is_empty() {\n\n self.data.extend_from_slice(&self.buf.buf);\n\n }\n\n let padding_len = ((self.data.len() + 15) & !0xf) - self.data.len();\n\n for _i in 0..padding_len {\n\n self.data.push(0);\n\n }\n\n self.data\n\n }\n\n}\n\n\n\npub struct MaxEffort(pub usize);\n\nimpl MaxEffort {\n\n pub const DEFAULT: MaxEffort = MaxEffort(100);\n\n}\n\n\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 72, "score": 40661.970222890515 }, { "content": "\n\nimpl<'a> Debug for Vrom<'a> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Vrom(_)\")\n\n }\n\n}\n\n\n\nimpl<'a> Deref for Vrom<'a> {\n\n type Target = [u8];\n\n\n\n fn deref(&self) -> &[u8] {\n\n self.0\n\n }\n\n}\n", "file_path": "oot-explorer-vrom/src/borrowed.rs", "rank": 73, "score": 40660.35066333303 }, { "content": " }\n\n let vrom = vrom.into_boxed_slice();\n\n\n\n Ok((FileTable { file_ranges }, OwnedVrom::new(vrom)))\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum DecompressError {\n\n #[error(\"{0}\")]\n\n RomError(#[from] RomError),\n\n\n\n #[error(\"{0}\")]\n\n YazError(#[from] yaz::DecompressError),\n\n}\n", "file_path": "oot-explorer-vrom/src/lib.rs", "rank": 74, "score": 40659.7555580947 }, { "content": " result.push(Code::Match(m));\n\n pos += m.length as usize;\n\n continue;\n\n }\n\n }\n\n }\n\n result.push(Code::Literal(data[pos]));\n\n pos += 1\n\n }\n\n result.into_vec()\n\n}\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 75, "score": 40659.35894656876 }, { "content": " // Add potential matches that have entered the sliding window.\n\n for add_pos in last_pos..pos {\n\n let begin = add_pos.wrapping_sub(1);\n\n let end = begin.wrapping_add(3);\n\n if begin < end && end <= data.len() {\n\n let bucket_list = dict\n\n .entry(&data[begin..end])\n\n .or_insert_with(|| Vec::with_capacity(max_effort.0));\n\n bucket_list.push(begin..end);\n\n while bucket_list.len() > max_effort.0 {\n\n bucket_list.remove(0);\n\n }\n\n }\n\n }\n\n\n\n last_pos = pos;\n\n\n\n if pos + 3 <= data.len() {\n\n if let Some(bucket_list) = dict.get(&data[pos..pos + 3]) {\n\n let mut best_match: Option<Match> = None;\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 76, "score": 40658.23234573588 }, { "content": "use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};\n\nuse std::collections::HashMap;\n\nuse std::convert::TryInto;\n\nuse std::io::{self, Read, Write};\n\nuse std::ops::Range;\n\nuse thiserror::Error;\n\n\n\n#[derive(Clone, Copy, Debug)]\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 77, "score": 40657.08978449329 }, { "content": " \"match length out of range\",\n\n ));\n\n }\n\n let (length1, length2) = if self.length <= 17 {\n\n (self.length - 2, None)\n\n } else {\n\n (0, Some((self.length - 18) as u8))\n\n };\n\n let word = (length1 << 12) | (self.distance - 1);\n\n w.write_u16::<BigEndian>(word)?;\n\n match length2 {\n\n Some(x) => w.write_u8(x)?,\n\n None => (),\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 78, "score": 40656.22721550069 }, { "content": " }\n\n literal_flags <<= 1;\n\n }\n\n }\n\n if result.len() != decompressed_size {\n\n return Err(DecompressError::BadDecompressedSize {\n\n tagged: decompressed_size,\n\n actual: result.len(),\n\n });\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 79, "score": 40655.54709240304 }, { "content": " // Resolve the segment address. If it's unmapped, we have no contents. The slice field's\n\n // one-line value should display the error message.\n\n let mut vrom_ptr = match segment_table.resolve(segment_ptr) {\n\n Ok(vrom_ptr) => vrom_ptr,\n\n Err(_) => return vec![],\n\n };\n\n\n\n // Add a field for each value in the slice.\n\n let mut field_infos = vec![];\n\n for index in 0..count {\n\n field_infos.push(ReflectFieldInfo {\n\n name: Some(format!(\"{}\", index)),\n\n base_addr: vrom_ptr,\n\n location: StructFieldLocation::Simple { offset: 0 },\n\n desc,\n\n });\n\n\n\n vrom_ptr += match desc.size() {\n\n Some(size) => size,\n\n None => panic!(\"slice element {} has no size\", desc.name()),\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 80, "score": 39962.77055890687 }, { "content": " .binary_search_by_key(&discriminant, |&(x, _)| x)\n\n {\n\n let variant_desc = union_desc.variants[index].1;\n\n add_field_infos_for_fields(vrom, segment_table, variant_desc, addr, field_infos);\n\n }\n\n }\n\n\n\n TypeDescriptor::Pointer(pointer_desc) => {\n\n // TODO: Add pseudo-items for failure to dereference a pointer.\n\n\n\n let segment_ptr = match SegmentAddr::from_vrom(vrom, addr) {\n\n Ok(segment_ptr) => segment_ptr,\n\n Err(_) => return,\n\n };\n\n let vrom_ptr = match segment_table.resolve(segment_ptr) {\n\n Ok(vrom_ptr) => vrom_ptr,\n\n Err(_) => return,\n\n };\n\n\n\n // Add an item for the pointed-to value.\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 81, "score": 39962.485484247714 }, { "content": "\n\n // Structs do not have a one-line value string.\n\n TypeDescriptor::Struct(_) => Ok(None),\n\n }\n\n }\n\n\n\n StructFieldLocation::Slice {\n\n count_offset,\n\n count_desc,\n\n ptr_offset,\n\n } => {\n\n let count = match count_desc.read_as_u32(vrom, base_addr + *count_offset) {\n\n Ok(count) => format!(\"{}\", count),\n\n Err(_) => format!(\"(inaccessible)\"),\n\n };\n\n let ptr_addr = base_addr + *ptr_offset;\n\n let ptr = match SegmentAddr::from_vrom(vrom, ptr_addr) {\n\n Ok(vrom_addr) => format!(\"{:?}\", vrom_addr),\n\n Err(_) => format!(\"(inaccessible)\"),\n\n };\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 82, "score": 39961.819009163584 }, { "content": " match SegmentAddr::from_vrom(vrom, field_addr) {\n\n Ok(segment_ptr) => {\n\n Ok(Some(format!(\"({}) {:?}\", pointer_desc.name, segment_ptr)))\n\n }\n\n Err(_) => Ok(Some(format!(\"(inaccessible)\"))),\n\n }\n\n }\n\n\n\n TypeDescriptor::Union(union_desc) => {\n\n let discriminant_value = field_value_string(\n\n vrom,\n\n _segment_table,\n\n base_addr + union_desc.discriminant_offset,\n\n &StructFieldLocation::Simple { offset: 0 },\n\n union_desc.discriminant_desc,\n\n )\n\n .ok_or_else(|| \"(inaccessible)\".to_string())?;\n\n\n\n Ok(Some(format!(\"{{ {}, .. }}\", discriminant_value)))\n\n }\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 83, "score": 39961.55037061162 }, { "content": " base_addr: u32,\n\n field_name: Option<String>,\n\n type_string: String,\n\n value_string: Option<String>,\n\n vrom_start: u32,\n\n vrom_end: u32,\n\n}\n\n\n\n#[wasm_bindgen]\n\n#[derive(Clone)]\n\npub struct ReflectFieldInfo {\n\n name: Option<String>,\n\n base_addr: VromAddr,\n\n location: StructFieldLocation,\n\n desc: TypeDescriptor,\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl ReflectFieldInfo {\n\n pub fn reflect(&self, ctx: &Context, root: &ReflectRoot) -> ReflectResult {\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 84, "score": 39957.76347763287 }, { "content": "use oot_explorer_read::{FromVrom, Layout};\n\nuse oot_explorer_reflect::{PrimitiveType, StructFieldLocation, TypeDescriptor};\n\nuse oot_explorer_segment::{SegmentAddr, SegmentTable};\n\nuse oot_explorer_vrom::{Vrom, VromAddr};\n\nuse serde::Serialize;\n\nuse std::fmt::{Debug, Display};\n\nuse std::ops::Range;\n\nuse wasm_bindgen::prelude::wasm_bindgen;\n\nuse wasm_bindgen::{JsValue, UnwrapThrowExt};\n\n\n\nuse crate::reflect_root::ReflectRoot;\n\nuse crate::Context;\n\n\n\n#[wasm_bindgen]\n\npub struct ReflectResult {\n\n info: ReflectItemInfo,\n\n fields: Vec<ReflectFieldInfo>,\n\n}\n\n\n\n#[wasm_bindgen]\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 85, "score": 39956.36139844662 }, { "content": " name: Some(\"discriminant\".to_string()),\n\n base_addr: addr,\n\n location: StructFieldLocation::Simple {\n\n offset: union_desc.discriminant_offset,\n\n },\n\n desc: union_desc.discriminant_desc,\n\n });\n\n\n\n // If the discriminant is accessible and known, recurse to add items for each field in\n\n // the variant.\n\n let discriminant = match union_desc\n\n .discriminant_desc\n\n .read_as_u32(vrom, addr + union_desc.discriminant_offset)\n\n .expect(\"union discriminants must be readable as u32\")\n\n {\n\n Ok(discriminant) => discriminant,\n\n Err(_) => return,\n\n };\n\n if let Ok(index) = union_desc\n\n .variants\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 86, "score": 39956.149348215535 }, { "content": " };\n\n }\n\n field_infos\n\n }\n\n StructFieldLocation::InlineDelimitedList { offset } => {\n\n // This instance represents an inline delimited list field.\n\n\n\n // Retrieve the is_end function.\n\n let is_end = match desc.is_end() {\n\n Some(is_end) => is_end,\n\n None => panic!(\"delimited list element {} has no is_end\", desc.name()),\n\n };\n\n\n\n // Add a field for each value in the list.\n\n let mut field_infos = vec![];\n\n let mut ptr = base_addr + *offset;\n\n for index in 0.. {\n\n field_infos.push(ReflectFieldInfo {\n\n name: Some(format!(\"{}\", index)),\n\n base_addr: ptr,\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 87, "score": 39956.08597426707 }, { "content": " \"{} (0x{:x})\",\n\n enum_desc.values[index].1, value\n\n )))\n\n }\n\n\n\n TypeDescriptor::Bitfield(_) => Ok(Some(format!(\"(bitfields not implemented)\"))),\n\n\n\n TypeDescriptor::Primitive(primitive) => match primitive {\n\n PrimitiveType::Bool => fetch_and_display::<bool>(vrom, field_addr),\n\n PrimitiveType::U8 => fetch_and_display::<u8>(vrom, field_addr),\n\n PrimitiveType::I8 => fetch_and_display::<i8>(vrom, field_addr),\n\n PrimitiveType::U16 => fetch_and_display::<u16>(vrom, field_addr),\n\n PrimitiveType::I16 => fetch_and_display::<i16>(vrom, field_addr),\n\n PrimitiveType::U32 => fetch_and_display::<u32>(vrom, field_addr),\n\n PrimitiveType::I32 => fetch_and_display::<i32>(vrom, field_addr),\n\n PrimitiveType::VromAddr => fetch_and_debug::<VromAddr>(vrom, field_addr),\n\n PrimitiveType::SegmentAddr => fetch_and_debug::<SegmentAddr>(vrom, field_addr),\n\n },\n\n\n\n TypeDescriptor::Pointer(pointer_desc) => {\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 88, "score": 39954.11799108671 }, { "content": "impl ReflectResult {\n\n #[wasm_bindgen(getter)]\n\n pub fn info(&self) -> JsValue {\n\n serde_wasm_bindgen::to_value(&self.info).unwrap_throw()\n\n }\n\n\n\n #[wasm_bindgen(getter = fieldsCount)]\n\n pub fn fields_count(&self) -> usize {\n\n self.fields.len()\n\n }\n\n\n\n #[wasm_bindgen(js_name = getField)]\n\n pub fn get_field(&self, index: usize) -> ReflectFieldInfo {\n\n self.fields[index].clone()\n\n }\n\n}\n\n\n\n#[derive(Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ReflectItemInfo {\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 89, "score": 39952.76348751129 }, { "content": " StructFieldLocation::Slice {\n\n count_offset,\n\n count_desc,\n\n ptr_offset,\n\n } => {\n\n // This instance represents a slice field.\n\n\n\n // Retrieve the count.\n\n let count = count_desc\n\n .read_as_u32(vrom, base_addr + *count_offset)\n\n .expect(\"not ready to make this robust yet\");\n\n\n\n // Retrieve the initial pointer.\n\n let ptr_addr = base_addr + *ptr_offset;\n\n let segment_ptr =\n\n SegmentAddr::from_vrom(vrom, ptr_addr).expect(\"not ready to make this robust yet\");\n\n if segment_ptr.is_null() {\n\n return vec![];\n\n }\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 90, "score": 39951.803038371014 }, { "content": " location: StructFieldLocation::Simple { offset: 0 },\n\n desc,\n\n });\n\n\n\n if is_end(vrom, ptr) {\n\n break;\n\n }\n\n\n\n ptr += match desc.size() {\n\n Some(size) => size,\n\n None => panic!(\"delimited list element {} has no size\", desc.name()),\n\n };\n\n }\n\n field_infos\n\n }\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 91, "score": 39951.57667460185 }, { "content": " let ctx_ref = ctx.inner.lock().unwrap_throw();\n\n let vrom = ctx_ref.vrom.as_ref().unwrap_throw().borrow();\n\n\n\n reflect_field(\n\n vrom,\n\n &root.segment_table,\n\n self.base_addr,\n\n self.name.clone(),\n\n &self.location,\n\n self.desc,\n\n )\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 92, "score": 39951.4468389027 }, { "content": " field_infos.push(ReflectFieldInfo {\n\n name: None,\n\n base_addr: vrom_ptr,\n\n location: StructFieldLocation::Simple { offset: 0 },\n\n desc: pointer_desc.target,\n\n });\n\n }\n\n\n\n // These types don't have fields.\n\n TypeDescriptor::Enum(_) | TypeDescriptor::Bitfield(_) | TypeDescriptor::Primitive(_) => {}\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 93, "score": 39951.4252525494 }, { "content": " Ok(Some(format!(\"({}[{}]*) {}\", desc.name(), count, ptr)))\n\n }\n\n StructFieldLocation::InlineDelimitedList { .. } => {\n\n // TODO\n\n Ok(None)\n\n }\n\n })();\n\n match fallible_result {\n\n Ok(result) => result,\n\n Err(message) => Some(message),\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 94, "score": 39950.77487792163 }, { "content": " ReflectResult {\n\n info: ReflectItemInfo {\n\n base_addr: base_addr.0,\n\n field_name,\n\n type_string,\n\n value_string,\n\n vrom_start: vrom_range.start.0,\n\n vrom_end: vrom_range.end.0,\n\n },\n\n fields: contents,\n\n }\n\n}\n\n\n", "file_path": "oot-explorer-web/src/reflect_value.rs", "rank": 95, "score": 39949.335054367955 }, { "content": "use std::ops::Range;\n\nuse thiserror::Error;\n\n\n\nuse crate::VromAddr;\n\n\n\n#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub struct FileIndex(pub u32);\n\n\n\n/// A processed file table for use after VROM is decompressed.\n\n#[derive(Clone)]\n\npub struct FileTable {\n\n pub(crate) file_ranges: Vec<Range<VromAddr>>,\n\n}\n\n\n\nimpl FileTable {\n\n pub fn file_vrom_range(&self, index: FileIndex) -> Result<Range<VromAddr>, GetFileError> {\n\n Ok(self\n\n .file_ranges\n\n .get(index.0 as usize)\n\n .ok_or_else(|| GetFileError::InvalidFileIndex {\n", "file_path": "oot-explorer-vrom/src/file_table.rs", "rank": 96, "score": 39697.43522227693 }, { "content": "use oot_explorer_vrom::VromAddr;\n\n\n\nuse crate::FromVrom;\n\n\n\n/// Proxy types that wrap a VROM address.\n", "file_path": "oot-explorer-read/src/vrom_proxy.rs", "rank": 97, "score": 39692.35927153888 }, { "content": " index,\n\n file_count: self.file_ranges.len() as u32,\n\n })?\n\n .clone())\n\n }\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum GetFileError {\n\n #[error(\"invalid file index: {index:?}, file count {file_count}\")]\n\n InvalidFileIndex { index: FileIndex, file_count: u32 },\n\n}\n", "file_path": "oot-explorer-vrom/src/file_table.rs", "rank": 98, "score": 39681.69092324322 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum Code {\n\n Literal(u8),\n\n Match(Match),\n\n}\n\n\n", "file_path": "oot-explorer-vrom/src/yaz.rs", "rank": 99, "score": 39678.67251627028 } ]
Rust
src/trace/collector.rs
piercetrey-figure/minitrace-rust
1ab75c3399b07b86d8e142a8e5a81f1f0d1d9f17
use crossbeam::channel::Receiver; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; use crate::span::Span; use crate::span::{Anchor, DefaultClock}; use crate::trace::acquirer::SpanCollection; pub struct Collector { receiver: Receiver<SpanCollection>, closed: Arc<AtomicBool>, } impl Collector { pub(crate) fn new(receiver: Receiver<SpanCollection>, closed: Arc<AtomicBool>) -> Self { Collector { receiver, closed } } pub fn collect(self) -> Vec<Span> { self.collect_with_args(CollectArgs { sync: false, duration_threshold: None, }) } pub fn collect_with_args( self, CollectArgs { sync, duration_threshold, }: CollectArgs, ) -> Vec<Span> { let span_collections: Vec<_> = if sync { self.receiver.iter().collect() } else { self.receiver.try_iter().collect() }; self.closed.store(true, Ordering::SeqCst); let anchor = DefaultClock::anchor(); if let Some(duration) = duration_threshold { if let Some(root_span) = span_collections.iter().find_map(|s| match s { SpanCollection::Span(s) if s.parent_id.0 == 0 => Some(s), _ => None, }) { let root_span = root_span.clone().into_span(anchor); if root_span.duration_ns < duration.as_nanos() as _ { return vec![root_span]; } } } Self::amend(span_collections, anchor) } } impl Collector { #[inline] fn amend(span_collections: Vec<SpanCollection>, anchor: Anchor) -> Vec<Span> { let capacity = span_collections .iter() .map(|sc| match sc { SpanCollection::LocalSpans { local_spans: raw_spans, .. } => raw_spans.spans.len(), SpanCollection::Span(_) => 1, }) .sum(); let mut spans = Vec::with_capacity(capacity); for span_collection in span_collections { match span_collection { SpanCollection::LocalSpans { local_spans: raw_spans, parent_id_of_root: span_id, } => { for span in &raw_spans.spans { let begin_unix_time_ns = DefaultClock::cycle_to_unix_time_ns(span.begin_cycle, anchor); let end_unix_time_ns = if span.end_cycle.is_zero() { DefaultClock::cycle_to_unix_time_ns(raw_spans.end_time, anchor) } else { DefaultClock::cycle_to_unix_time_ns(span.end_cycle, anchor) }; let parent_id = if span.parent_id.0 == 0 { span_id.0 } else { span.parent_id.0 }; spans.push(Span { id: span.id.0, parent_id, begin_unix_time_ns, duration_ns: end_unix_time_ns - begin_unix_time_ns, event: span.event, properties: span.properties.clone(), }); } } SpanCollection::Span(span) => spans.push(span.into_span(anchor)), } } spans } } #[derive(Default, Debug)] pub struct CollectArgs { sync: bool, duration_threshold: Option<Duration>, } impl CollectArgs { pub fn sync(self, sync: bool) -> Self { Self { sync, ..self } } pub fn duration_threshold(self, duration_threshold: Duration) -> Self { Self { duration_threshold: Some(duration_threshold), ..self } } }
use crossbeam::channel::Receiver; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; use crate::span::Span; use crate::span::{Anchor, DefaultClock}; use crate::trace::acquirer::SpanCollection; pub struct Collector { receiver: Receiver<SpanCollection>, closed: Arc<AtomicBool>, } impl Collector { pub(crate) fn new(receiver: Receiver<SpanCollection>, closed: Arc<AtomicBool>) -> Self { Collector { receiver, closed } } pub fn collect(self) -> Vec<Span> { self.collect_with_args(CollectArgs { sync: false, duration_threshold: None, }) } pub fn collect_with_args( self, CollectArgs { sync, duration_threshold, }: CollectArgs, ) -> Vec<Span> { let span_collections: Vec<_> = if sync { self.receiver.iter().collect() } else { self.receiver.try_it
} impl Collector { #[inline] fn amend(span_collections: Vec<SpanCollection>, anchor: Anchor) -> Vec<Span> { let capacity = span_collections .iter() .map(|sc| match sc { SpanCollection::LocalSpans { local_spans: raw_spans, .. } => raw_spans.spans.len(), SpanCollection::Span(_) => 1, }) .sum(); let mut spans = Vec::with_capacity(capacity); for span_collection in span_collections { match span_collection { SpanCollection::LocalSpans { local_spans: raw_spans, parent_id_of_root: span_id, } => { for span in &raw_spans.spans { let begin_unix_time_ns = DefaultClock::cycle_to_unix_time_ns(span.begin_cycle, anchor); let end_unix_time_ns = if span.end_cycle.is_zero() { DefaultClock::cycle_to_unix_time_ns(raw_spans.end_time, anchor) } else { DefaultClock::cycle_to_unix_time_ns(span.end_cycle, anchor) }; let parent_id = if span.parent_id.0 == 0 { span_id.0 } else { span.parent_id.0 }; spans.push(Span { id: span.id.0, parent_id, begin_unix_time_ns, duration_ns: end_unix_time_ns - begin_unix_time_ns, event: span.event, properties: span.properties.clone(), }); } } SpanCollection::Span(span) => spans.push(span.into_span(anchor)), } } spans } } #[derive(Default, Debug)] pub struct CollectArgs { sync: bool, duration_threshold: Option<Duration>, } impl CollectArgs { pub fn sync(self, sync: bool) -> Self { Self { sync, ..self } } pub fn duration_threshold(self, duration_threshold: Duration) -> Self { Self { duration_threshold: Some(duration_threshold), ..self } } }
er().collect() }; self.closed.store(true, Ordering::SeqCst); let anchor = DefaultClock::anchor(); if let Some(duration) = duration_threshold { if let Some(root_span) = span_collections.iter().find_map(|s| match s { SpanCollection::Span(s) if s.parent_id.0 == 0 => Some(s), _ => None, }) { let root_span = root_span.clone().into_span(anchor); if root_span.duration_ns < duration.as_nanos() as _ { return vec![root_span]; } } } Self::amend(span_collections, anchor) }
function_block-function_prefixed
[ { "content": "#[proc_macro_attribute]\n\n#[proc_macro_error]\n\npub fn trace(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::ItemFn);\n\n let event = syn::parse_macro_input!(args as syn::Expr);\n\n\n\n let syn::ItemFn {\n\n attrs,\n\n vis,\n\n block,\n\n sig,\n\n } = input;\n\n\n\n let syn::Signature {\n\n output: return_type,\n\n inputs: params,\n\n unsafety,\n\n asyncness,\n\n constness,\n\n abi,\n\n ident,\n\n generics:\n", "file_path": "crates/minitrace-macro/src/lib.rs", "rank": 0, "score": 44280.7328356989 }, { "content": "#[proc_macro_attribute]\n\n#[proc_macro_error]\n\npub fn trace_async(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::ItemFn);\n\n let event = syn::parse_macro_input!(args as syn::Expr);\n\n\n\n let syn::ItemFn {\n\n attrs,\n\n vis,\n\n block,\n\n sig,\n\n } = input;\n\n\n\n let syn::Signature {\n\n output: return_type,\n\n inputs: params,\n\n unsafety,\n\n asyncness,\n\n constness,\n\n abi,\n\n ident,\n\n generics:\n", "file_path": "crates/minitrace-macro/src/lib.rs", "rank": 1, "score": 43270.12866039366 }, { "content": "#[derive(Serialize)]\n\nstruct MPSpan<'a> {\n\n name: &'a str,\n\n service: &'a str,\n\n start: i64,\n\n duration: i64,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n meta: Option<HashMap<&'a str, &'a str>>,\n\n span_id: u64,\n\n trace_id: u64,\n\n parent_id: u64,\n\n}\n", "file_path": "crates/minitrace-datadog/src/lib.rs", "rank": 2, "score": 37014.46682903423 }, { "content": "pub trait FutureExt: Sized {\n\n /// Bind `span` to the future and return a future adaptor `WithSpan`. It can help trace a top\n\n /// future (aka task) by calling [`Span::try_enter`](Span::try_enter) when the executor\n\n /// [`poll`](std::future::Future::poll)s it.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// # #[tokio::main]\n\n /// # async fn main() {\n\n /// use minitrace::{Span, FutureExt};\n\n ///\n\n /// let (span, _collector) = Span::root(\"Task\");\n\n /// let task = async {\n\n /// 42\n\n /// };\n\n ///\n\n /// tokio::spawn(task.in_span(span));\n\n /// # }\n\n /// ```\n", "file_path": "src/future.rs", "rank": 3, "score": 35907.40912951681 }, { "content": "fn main() {\n\n let spans = {\n\n let (span, collector) = Span::root(\"root\");\n\n\n\n let _sg1 = span.enter();\n\n let _sg2 =\n\n LocalSpan::enter(\"a span\").with_property(|| (\"a property\", \"a value\".to_owned()));\n\n\n\n for i in 1..=10 {\n\n func1(i);\n\n }\n\n\n\n collector\n\n }\n\n .collect_with_args(CollectArgs::default().sync(true));\n\n\n\n // Report to Jaeger\n\n let bytes = JReporter::encode(\"synchronous\".to_owned(), rand::random(), 0, 0, &spans).unwrap();\n\n JReporter::report(\"127.0.0.1:6831\".parse().unwrap(), &bytes).ok();\n\n\n\n // Report to Datadog\n\n let bytes = DReporter::encode(\"synchronous\", rand::random(), 0, 0, &spans).unwrap();\n\n DReporter::report_blocking(\"127.0.0.1:8126\".parse().unwrap(), bytes).ok();\n\n}\n", "file_path": "examples/synchronous.rs", "rank": 4, "score": 33320.97075638377 }, { "content": "fn main() {\n\n let collector = {\n\n let (root_span, collector) = Span::root(\"root\");\n\n let _span_guard = root_span.enter();\n\n\n\n let _local_span_guard = LocalSpan::enter(\"child\");\n\n\n\n // do something ...\n\n collector\n\n };\n\n\n\n let spans: Vec<span::Span> = collector.collect();\n\n\n\n let socket = SocketAddr::new(\"127.0.0.1\".parse().unwrap(), 6831);\n\n\n\n const TRACE_ID: u64 = 42;\n\n const SPAN_ID_PREFIX: u32 = 42;\n\n const ROOT_PARENT_SPAN_ID: u64 = 0;\n\n let bytes = Reporter::encode(\n\n String::from(\"service name\"),\n\n TRACE_ID,\n\n ROOT_PARENT_SPAN_ID,\n\n SPAN_ID_PREFIX,\n\n &spans,\n\n )\n\n .expect(\"encode error\");\n\n Reporter::report(socket, &bytes).expect(\"report error\");\n\n}\n", "file_path": "examples/get_started.rs", "rank": 5, "score": 31912.443322819818 }, { "content": "fn rustracing_harness() {\n\n fn dummy_rustracing(span: &rustracing::span::Span<()>) {\n\n for _ in 0..99 {\n\n let _child_span = span.child(\"child\", |c| c.start_with_state(()));\n\n }\n\n }\n\n\n\n let (span_tx, span_rx) = crossbeam::channel::bounded(100);\n\n\n\n {\n\n let tracer = rustracing::Tracer::with_sender(rustracing::sampler::AllSampler, span_tx);\n\n let parent_span = tracer.span(\"parent\").start_with_state(());\n\n dummy_rustracing(&parent_span);\n\n }\n\n\n\n let _r = span_rx.iter().collect::<Vec<_>>();\n\n}\n\n\n", "file_path": "benches/compare.rs", "rank": 6, "score": 31912.443322819818 }, { "content": "fn minitrace_harness() {\n\n fn dummy_minitrace() {\n\n for _ in 0..99 {\n\n let _guard = minitrace::LocalSpan::enter(\"child\");\n\n }\n\n }\n\n\n\n {\n\n let (root_span, collector) = minitrace::Span::root(\"parent\");\n\n let _g = root_span.enter();\n\n\n\n dummy_minitrace();\n\n\n\n collector\n\n }\n\n .collect();\n\n}\n\n\n", "file_path": "benches/compare.rs", "rank": 7, "score": 31912.443322819818 }, { "content": "fn opentelemetry_harness() {\n\n fn dummy_opentelementry() {\n\n for _ in 0..99 {\n\n let child = tracing::span!(tracing::Level::TRACE, \"child\");\n\n let _enter = child.enter();\n\n }\n\n }\n\n\n\n let root = tracing::span!(tracing::Level::TRACE, \"parent\");\n\n let _enter = root.enter();\n\n\n\n dummy_opentelementry();\n\n}\n\n\n", "file_path": "benches/compare.rs", "rank": 8, "score": 31912.443322819818 }, { "content": "fn init_opentelemetry() {\n\n use opentelemetry::api::Provider;\n\n use tracing_subscriber::layer::SubscriberExt;\n\n use tracing_subscriber::prelude::*;\n\n use tracing_subscriber::Registry;\n\n\n\n let tracer = opentelemetry::sdk::Provider::default().get_tracer(\"component_name\");\n\n let telemetry = tracing_opentelemetry::layer().with_tracer(tracer);\n\n\n\n Registry::default().with(telemetry).init();\n\n}\n\n\n", "file_path": "benches/compare.rs", "rank": 9, "score": 31912.443322819818 }, { "content": "#[trace(\"func2\")]\n\nfn func2(i: u64) {\n\n std::thread::sleep(std::time::Duration::from_millis(i));\n\n}\n\n\n", "file_path": "examples/synchronous.rs", "rank": 10, "score": 30255.481912272684 }, { "content": "fn func1(i: u64) {\n\n let _guard = LocalSpan::enter(\"func1\");\n\n std::thread::sleep(std::time::Duration::from_millis(i));\n\n func2(i);\n\n}\n\n\n", "file_path": "examples/synchronous.rs", "rank": 11, "score": 30255.481912272684 }, { "content": "fn dummy_iter(i: usize) {\n\n #[trace(\"\")]\n\n fn dummy() {}\n\n\n\n for _ in 0..i {\n\n dummy();\n\n }\n\n}\n\n\n", "file_path": "benches/trace.rs", "rank": 12, "score": 28994.520643379998 }, { "content": "#[trace(\"\")]\n\nfn dummy_rec(i: usize) {\n\n if i > 1 {\n\n dummy_rec(i - 1);\n\n }\n\n}\n\n\n", "file_path": "benches/trace.rs", "rank": 13, "score": 28994.520643379998 }, { "content": "fn tracing_comparison(c: &mut Criterion) {\n\n init_opentelemetry();\n\n\n\n let mut bgroup = c.benchmark_group(\"tracing_comparison\");\n\n\n\n bgroup.bench_function(\"Tokio Tracing\", |b| b.iter(opentelemetry_harness));\n\n bgroup.bench_function(\"Rustracing\", |b| b.iter(rustracing_harness));\n\n bgroup.bench_function(\"Minitrace\", |b| b.iter(minitrace_harness));\n\n\n\n bgroup.finish();\n\n}\n\n\n\ncriterion_group!(benches, tracing_comparison);\n\ncriterion_main!(benches);\n", "file_path": "benches/compare.rs", "rank": 14, "score": 27268.13759652237 }, { "content": "fn next_id_prefix() -> u16 {\n\n NEXT_ID_PREFIX.fetch_add(1, Ordering::AcqRel)\n\n}\n\n\n\nthread_local! {\n\n static LOCAL_ID_GENERATOR: Cell<(u16, u16)> = Cell::new((next_id_prefix(), 0))\n\n}\n\n\n\nimpl DefaultIdGenerator {\n\n #[inline]\n\n /// Create a non-zero `SpanId`\n\n pub fn next_id() -> SpanId {\n\n LOCAL_ID_GENERATOR.with(|g| {\n\n let (mut prefix, mut suffix) = g.get();\n\n\n\n if suffix == std::u16::MAX {\n\n suffix = 0;\n\n prefix = next_id_prefix();\n\n }\n\n // `suffix` can not be `0`, so `SpanId` won't be `0`.\n", "file_path": "src/span/span_id.rs", "rank": 15, "score": 26627.223256636855 }, { "content": "fn trace_wide_bench(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"trace_wide\");\n\n\n\n for len in &[1, 10, 100, 1000, 10000] {\n\n group.bench_function(len.to_string(), |b| {\n\n b.iter(|| {\n\n {\n\n let (root_span, collector) = Span::root(\"root\");\n\n let _sg = root_span.enter();\n\n dummy_iter(*len - 1);\n\n collector\n\n }\n\n .collect()\n\n })\n\n });\n\n }\n\n\n\n group.finish();\n\n}\n\n\n", "file_path": "benches/trace.rs", "rank": 16, "score": 26240.38107045513 }, { "content": "fn trace_future_bench(c: &mut Criterion) {\n\n async fn f(i: u32) {\n\n for _ in 0..i - 1 {\n\n async {}.in_local_span(black_box(\"\")).await\n\n }\n\n }\n\n\n\n let mut group = c.benchmark_group(\"trace_future\");\n\n\n\n for len in &[1, 10, 100, 1000, 10000] {\n\n group.bench_function(len.to_string(), |b| {\n\n b.iter(|| {\n\n {\n\n let (root_span, collector) = Span::root(\"root\");\n\n let _ = futures::executor::block_on(f(*len).in_span(root_span));\n\n collector\n\n }\n\n .collect()\n\n })\n\n });\n", "file_path": "benches/trace.rs", "rank": 17, "score": 26240.38107045513 }, { "content": "fn trace_deep_bench(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"trace_deep\");\n\n\n\n for len in &[1, 10, 100, 1000] {\n\n group.bench_function(len.to_string(), |b| {\n\n b.iter(|| {\n\n {\n\n let (root_span, collector) = Span::root(\"root\");\n\n let _sg = root_span.enter();\n\n dummy_rec(*len - 1);\n\n collector\n\n }\n\n .collect()\n\n })\n\n });\n\n }\n\n\n\n group.finish();\n\n}\n\n\n", "file_path": "benches/trace.rs", "rank": 18, "score": 26240.38107045513 }, { "content": "fn trace_deep_raw_bench(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"trace_deep_raw\");\n\n\n\n for len in &[1, 10, 100, 1000] {\n\n group.bench_function(len.to_string(), |b| {\n\n b.iter(|| {\n\n let local_collector = LocalCollector::start();\n\n dummy_rec(*len);\n\n local_collector.collect()\n\n })\n\n });\n\n }\n\n\n\n group.finish();\n\n}\n\n\n", "file_path": "benches/trace.rs", "rank": 26, "score": 25305.675810520785 }, { "content": "fn trace_wide_raw_bench(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"trace_wide_raw\");\n\n\n\n for len in &[1, 10, 100, 1000, 10000] {\n\n group.bench_function(len.to_string(), |b| {\n\n b.iter(|| {\n\n let local_collector = LocalCollector::start();\n\n dummy_iter(*len);\n\n local_collector.collect()\n\n })\n\n });\n\n }\n\n\n\n group.finish();\n\n}\n\n\n", "file_path": "benches/trace.rs", "rank": 27, "score": 25305.675810520785 }, { "content": "// Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse crate::local::local_span_line::LOCAL_SPAN_LINE;\n\nuse crate::span::RawSpan;\n\nuse crate::span::{Cycle, DefaultClock};\n\n\n\n#[must_use]\n\n#[derive(Debug, Ord, PartialOrd, Eq, PartialEq)]\n\npub struct LocalCollector {\n\n pub(crate) collected: bool,\n\n pub(crate) local_collector_epoch: usize,\n\n\n\n // Identical to\n\n // ```\n\n // impl !Sync for LocalCollector {}\n\n // impl !Send for LocalCollector {}\n\n // ```\n\n //\n", "file_path": "src/local/local_collector.rs", "rank": 28, "score": 24496.136539319974 }, { "content": " // TODO: Replace it once feature `negative_impls` is stable.\n\n _p: PhantomData<*const ()>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct LocalSpans {\n\n pub spans: Vec<RawSpan>,\n\n pub end_time: Cycle,\n\n}\n\n\n\nimpl LocalCollector {\n\n pub(crate) fn new(local_collector_epoch: usize) -> Self {\n\n Self {\n\n collected: false,\n\n local_collector_epoch,\n\n _p: Default::default(),\n\n }\n\n }\n\n\n\n pub fn start() -> Self {\n", "file_path": "src/local/local_collector.rs", "rank": 29, "score": 24495.578999544337 }, { "content": " Self::try_start().expect(\"Current thread is occupied by another local collector\")\n\n }\n\n\n\n pub fn try_start() -> Option<Self> {\n\n LOCAL_SPAN_LINE.with(|span_line| {\n\n let s = &mut *span_line.borrow_mut();\n\n s.register_local_collector()\n\n })\n\n }\n\n\n\n pub fn collect(mut self) -> LocalSpans {\n\n LOCAL_SPAN_LINE.with(|span_line| {\n\n let s = &mut *span_line.borrow_mut();\n\n self.collected = true;\n\n LocalSpans {\n\n spans: s.unregister_and_collect(self),\n\n end_time: DefaultClock::now(),\n\n }\n\n })\n\n }\n", "file_path": "src/local/local_collector.rs", "rank": 30, "score": 24491.022162927453 }, { "content": "}\n\n\n\nimpl Drop for LocalCollector {\n\n fn drop(&mut self) {\n\n if !self.collected {\n\n self.collected = true;\n\n LOCAL_SPAN_LINE.with(|span_line| {\n\n let s = &mut *span_line.borrow_mut();\n\n s.clear();\n\n })\n\n }\n\n }\n\n}\n", "file_path": "src/local/local_collector.rs", "rank": 31, "score": 24488.815223912865 }, { "content": "fn parallel_job() -> Vec<tokio::task::JoinHandle<()>> {\n\n let mut v = Vec::with_capacity(4);\n\n for i in 0..4 {\n\n v.push(tokio::spawn(\n\n iter_job(i).in_span(Span::from_local_parent(\"iter job\")),\n\n ));\n\n }\n\n v\n\n}\n\n\n\nasync fn iter_job(iter: u64) {\n\n std::thread::sleep(std::time::Duration::from_millis(iter * 10));\n\n tokio::task::yield_now().await;\n\n other_job().await;\n\n}\n\n\n\n#[trace_async(\"other job\")]\n\nasync fn other_job() {\n\n for i in 0..20 {\n\n if i == 10 {\n", "file_path": "examples/asynchronous.rs", "rank": 32, "score": 23964.078896618423 }, { "content": " acq.clone(),\n\n ))\n\n }\n\n }\n\n\n\n if to_report.is_empty() {\n\n Self { inner: None }\n\n } else {\n\n Self {\n\n inner: Some(SpanInner { span_id, to_report }),\n\n }\n\n }\n\n }\n\n\n\n pub fn root(event: &'static str) -> (Self, Collector) {\n\n let (tx, rx) = crossbeam::channel::unbounded();\n\n let closed = Arc::new(AtomicBool::new(false));\n\n let acquirer = Acquirer::new(Arc::new(tx), closed.clone());\n\n let span = Self::new(iter::once((SpanId::new(0), &acquirer)), event);\n\n let collector = Collector::new(rx, closed);\n", "file_path": "src/trace/span.rs", "rank": 33, "score": 11.82332444270068 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse std::cell::Cell;\n\nuse std::sync::atomic::{AtomicU16, Ordering};\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]\n\npub struct SpanId(pub u32);\n\n\n\nimpl SpanId {\n\n pub fn new(id: u32) -> Self {\n\n SpanId(id)\n\n }\n\n}\n\n\n\npub struct DefaultIdGenerator;\n\n\n\nstatic NEXT_ID_PREFIX: AtomicU16 = AtomicU16::new(0);\n", "file_path": "src/span/span_id.rs", "rank": 34, "score": 11.458446356448649 }, { "content": " (span, collector)\n\n }\n\n\n\n #[inline]\n\n pub fn empty() -> Self {\n\n Self { inner: None }\n\n }\n\n\n\n #[inline]\n\n pub fn is_empty(&self) -> bool {\n\n self.inner.is_none()\n\n }\n\n\n\n #[inline]\n\n pub fn from_parent(event: &'static str, span: &Span) -> Self {\n\n Self::from_parents(event, iter::once(span))\n\n }\n\n\n\n #[inline]\n\n pub fn from_parents<'a>(\n", "file_path": "src/trace/span.rs", "rank": 35, "score": 11.439102126534626 }, { "content": "pub struct Acquirer {\n\n sender: Arc<Sender<SpanCollection>>,\n\n closed: Arc<AtomicBool>,\n\n}\n\n\n\nimpl Acquirer {\n\n pub fn new(sender: Arc<Sender<SpanCollection>>, closed: Arc<AtomicBool>) -> Self {\n\n Acquirer { sender, closed }\n\n }\n\n\n\n pub fn submit(&self, span_collection: SpanCollection) {\n\n if self.is_shutdown() {\n\n return;\n\n }\n\n\n\n self.sender.send(span_collection).ok();\n\n }\n\n\n\n pub fn is_shutdown(&self) -> bool {\n\n self.closed.load(Ordering::SeqCst)\n\n }\n\n}\n", "file_path": "src/trace/acquirer.rs", "rank": 36, "score": 11.395463420282557 }, { "content": " span_handle: SpanHandle,\n\n local_collector_epoch: usize,\n\n}\n\n\n\nimpl LocalSpanLine {\n\n #[inline]\n\n pub fn with_capacity(capacity: usize) -> Self {\n\n Self {\n\n span_queue: SpanQueue::with_capacity(capacity),\n\n local_collector_existing: false,\n\n current_local_collector_epoch: 0,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn enter_span(&mut self, event: &'static str) -> Option<LocalSpanHandle> {\n\n if !self.local_collector_existing {\n\n return None;\n\n }\n\n\n", "file_path": "src/local/local_span_line.rs", "rank": 37, "score": 11.137860581016776 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse std::cell::RefCell;\n\nuse std::marker::PhantomData;\n\nuse std::sync::Arc;\n\n\n\nuse crate::local::local_collector::LocalCollector;\n\nuse crate::span::SpanId;\n\nuse crate::trace::acquirer::{Acquirer, SpanCollection};\n\nuse crate::Span;\n\n\n\nthread_local! {\n\n static ATTACHED_SPAN: RefCell<Option<AttachedSpan>> = RefCell::new(None);\n\n}\n\n\n\npub struct AttachedSpan {\n\n span_id: SpanId,\n\n acquirers: Vec<Acquirer>,\n\n\n\n local_collector: Option<LocalCollector>,\n", "file_path": "src/local/span_guard.rs", "rank": 38, "score": 10.15298346872003 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse std::iter;\n\nuse std::sync::atomic::AtomicBool;\n\nuse std::sync::Arc;\n\n\n\nuse crate::local::local_collector::LocalSpans;\n\nuse crate::span::RawSpan;\n\nuse crate::span::{DefaultClock, DefaultIdGenerator, SpanId};\n\nuse crate::trace::acquirer::{Acquirer, SpanCollection};\n\nuse crate::Collector;\n\n\n\n#[must_use]\n\n#[derive(Debug)]\n\npub struct Span {\n\n pub(crate) inner: Option<SpanInner>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct SpanInner {\n", "file_path": "src/trace/span.rs", "rank": 39, "score": 10.050180916584537 }, { "content": "#### Threads\n\n\n\n```rust\n\nuse minitrace::*;\n\n\n\nlet collector = {\n\n let (root_span, collector) = Span::root(\"task1\");\n\n let _span_guard = root_span.enter();\n\n\n\n let _local_span_guard = LocalSpan::enter(\"span of task1\");\n\n \n\n // To trace a child task\n\n let span = Span::from_local_parent(\"task2\");\n\n std::thread::spawn(move || {\n\n let _span_guard = span.enter();\n\n\n\n let _loal_span_guard = Span::enter(\"span of also task2\");\n\n });\n\n\n\n collector\n\n};\n\n\n\nlet spans: Vec<span::Span> = collector.collect();\n\n```\n\n\n\n#### Futures\n\n\n\nWe provide two `Future` adaptors:\n\n\n\n- `in_local_span`: call `LocalSpan::enter` at every poll\n\n- `in_span`: wrap the `Future` with a `Span`, then call `Span::try_enter` at every poll\n\n\n\nThe `in_span` adaptor is commonly used on a `Future` submitting to a runtime.\n\n\n\n```rust\n\nuse minitrace::*;\n\n\n\nlet collector = {\n\n let (root_span, collector) = Span::root(\"root\");\n\n let _span_guard = root_span.enter();\n\n\n\n // To trace another task\n\n runtime::spawn(async {\n\n let _ = async {\n\n // some works\n\n }.in_local_span(\"\");\n\n }.in_span(Span::from_local_parent(\"new task\")));\n\n\n\n collector\n\n};\n\n\n\nlet spans: Vec<span::Span> = collector.collect();\n\n```\n\n\n\n### Macros\n\n\n\nWe provide two macros to help reduce boilerplate code:\n\n\n\n- trace\n\n- trace_async\n\n\n\nFor normal functions, you can change:\n\n```rust\n\nuse minitrace::*;\n\n\n\nfn amazing_func() {\n\n let _span_guard = LocalSpan::enter(\"wow\");\n\n\n\n // some works\n\n}\n\n```\n\nto\n\n```rust\n\nuse minitrace::*;\n\nuse minitrace_macro::trace;\n\n\n\n#[trace(\"wow\")]\n\nfn amazing_func() {\n\n // some works\n\n}\n\n```\n\n\n\nFor async functions, you can change:\n\n```rust\n\nuse minitrace::*;\n\n\n\nasync fn amazing_async_func() {\n\n async {\n\n // some works\n\n }\n\n .in_local_span(\"wow\")\n\n .await\n\n}\n\n```\n\nto\n\n```rust\n\nuse minitrace::*;\n\nuse minitrace_macro::trace_async;\n\n\n\n#[trace_async(\"wow\")]\n\nasync fn amazing_async_func() {\n\n // some works\n\n}\n\n```\n\n\n\nTo access these macros, a dependency should be added as:\n\n\n\n```toml\n\n[dependencies]\n\nminitrace-macro = { git = \"https://github.com/tikv/minitrace-rust.git\" }\n\n```\n\n\n", "file_path": "README.md", "rank": 40, "score": 9.962523978843446 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::Arc;\n\n\n\nuse crossbeam::channel::Sender;\n\n\n\nuse crate::local::local_collector::LocalSpans;\n\nuse crate::span::{RawSpan, SpanId};\n\n\n\n#[derive(Clone, Debug)]\n\npub enum SpanCollection {\n\n LocalSpans {\n\n local_spans: Arc<LocalSpans>,\n\n parent_id_of_root: SpanId,\n\n },\n\n Span(RawSpan),\n\n}\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "src/trace/acquirer.rs", "rank": 41, "score": 9.940945861023812 }, { "content": " /// Tag list.\n\n pub tags: Vec<Tag>,\n\n}\n\n\n\nimpl From<Process> for Struct {\n\n fn from(f: Process) -> Self {\n\n let tags = List::from(f.tags.into_iter().map(Struct::from).collect::<Vec<_>>());\n\n if tags.is_empty() {\n\n Struct::from((f.service_name,))\n\n } else {\n\n Struct::from((f.service_name, tags))\n\n }\n\n }\n\n}\n\n\n\n/// `Batch` is a collection of spans reported out of process.\n\n#[derive(Debug, Clone)]\n\npub struct Batch {\n\n pub process: Process,\n\n pub spans: Vec<Span>,\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 42, "score": 9.72973105864164 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\n//! Thrift components defined in [jaeger.thrift].\n\n//!\n\n//! [jaeger.thrift]: https://github.com/uber/jaeger-idl/blob/master/thrift/jaeger.thrift\n\nuse thrift_codec::data::{Field, List, Struct};\n\nuse thrift_codec::message::Message;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct EmitBatchNotification {\n\n pub batch: Batch,\n\n}\n\n\n\nimpl From<EmitBatchNotification> for Message {\n\n fn from(f: EmitBatchNotification) -> Self {\n\n Message::oneway(\"emitBatch\", 0, Struct::from((Struct::from(f.batch),)))\n\n }\n\n}\n\n\n\n/// `TagKind` denotes the kind of a `Tag`'s value.\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 43, "score": 9.51025220786992 }, { "content": " pub timestamp: i64,\n\n pub fields: Vec<Tag>,\n\n}\n\n\n\nimpl From<Log> for Struct {\n\n fn from(f: Log) -> Self {\n\n Struct::from((\n\n f.timestamp,\n\n List::from(f.fields.into_iter().map(Struct::from).collect::<Vec<_>>()),\n\n ))\n\n }\n\n}\n\n\n\n/// Span reference kind.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum SpanRefKind {\n\n ChildOf = 0,\n\n FollowsFrom = 1,\n\n}\n\n\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 44, "score": 9.504138068998433 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\npub use crate::future::FutureExt;\n\npub use crate::local::local_collector::{LocalCollector, LocalSpans};\n\npub use crate::local::local_span_guard::LocalSpanGuard;\n\npub use crate::local::span_guard::SpanGuard;\n\npub use crate::trace::collector::{CollectArgs, Collector};\n\npub use crate::trace::local_span::LocalSpan;\n\npub use crate::trace::span::Span;\n\n\n\npub mod span;\n\n\n\npub(crate) mod future;\n\npub(crate) mod local;\n\npub(crate) mod trace;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::local::local_collector::LocalCollector;\n", "file_path": "src/lib.rs", "rank": 45, "score": 9.488980259812054 }, { "content": " pub fn is_occupied() -> bool {\n\n ATTACHED_SPAN.with(|attached_span| {\n\n let attached_span = attached_span.borrow();\n\n attached_span.is_some()\n\n })\n\n }\n\n}\n\n\n\n#[must_use]\n\npub struct SpanGuard {\n\n // Identical to\n\n // ```\n\n // impl !Sync for SpanGuard {}\n\n // impl !Send for SpanGuard {}\n\n // ```\n\n //\n\n // TODO: Replace it once feature `negative_impls` is stable.\n\n _p: PhantomData<*const ()>,\n\n}\n\n\n", "file_path": "src/local/span_guard.rs", "rank": 46, "score": 9.381704474141124 }, { "content": "}\n\n\n\nimpl From<Batch> for Struct {\n\n fn from(f: Batch) -> Self {\n\n Struct::from((\n\n Struct::from(f.process),\n\n List::from(f.spans.into_iter().map(Struct::from).collect::<Vec<_>>()),\n\n ))\n\n }\n\n}\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 47, "score": 9.037433266335755 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse crate::span::cycle::DefaultClock;\n\nuse crate::span::span_id::{DefaultIdGenerator, SpanId};\n\nuse crate::span::RawSpan;\n\n\n\npub struct SpanQueue {\n\n span_queue: Vec<RawSpan>,\n\n next_parent_id: SpanId,\n\n}\n\n\n\npub struct SpanHandle {\n\n pub(crate) index: usize,\n\n}\n\n\n\nimpl SpanQueue {\n\n pub fn with_capacity(capacity: usize) -> Self {\n\n Self {\n\n span_queue: Vec::with_capacity(capacity),\n\n next_parent_id: SpanId::new(0),\n", "file_path": "src/span/span_queue.rs", "rank": 48, "score": 8.699639845589934 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse crate::local::local_span_line::{LocalSpanHandle, LocalSpanLine, LOCAL_SPAN_LINE};\n\n\n\n#[must_use]\n\npub struct LocalSpanGuard {\n\n span_handle: Option<LocalSpanHandle>,\n\n\n\n // Identical to\n\n // ```\n\n // impl !Sync for LocalSpanGuard {}\n\n // impl !Send for LocalSpanGuard {}\n\n // ```\n\n //\n\n // TODO: Replace it once feature `negative_impls` is stable.\n\n _p: PhantomData<*const ()>,\n\n}\n\n\n", "file_path": "src/local/local_span_guard.rs", "rank": 49, "score": 8.340653867049168 }, { "content": " /// `1` signifies a SAMPLED span, `2` signifies a DEBUG span.\n\n pub flags: i32,\n\n\n\n /// Start time of this span.\n\n pub start_time: i64,\n\n\n\n /// Duration of this span.\n\n pub duration: i64,\n\n\n\n /// Tag list.\n\n pub tags: Vec<Tag>,\n\n\n\n /// Log list.\n\n pub logs: Vec<Log>,\n\n}\n\n\n\nimpl From<Span> for Struct {\n\n fn from(f: Span) -> Self {\n\n let mut fields = Vec::with_capacity(11);\n\n fields.push(Field::new(1, f.trace_id_low));\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 50, "score": 8.122977738090896 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse minitrace::span::Span;\n\nuse rmp_serde::Serializer;\n\nuse serde::Serialize;\n\nuse std::collections::HashMap;\n\nuse std::error::Error;\n\nuse std::net::SocketAddr;\n\n\n\npub struct Reporter;\n\n\n\nimpl Reporter {\n\n pub fn encode(\n\n service_name: &str,\n\n trace_id: u64,\n\n root_parent_span_id: u64,\n\n span_id_prefix: u32,\n\n spans: &[Span],\n\n ) -> Result<Vec<u8>, Box<dyn Error + Send + Sync + 'static>> {\n\n let spans = spans.iter().map(|s| MPSpan {\n", "file_path": "crates/minitrace-datadog/src/lib.rs", "rank": 51, "score": 7.901017846205681 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse std::cell::RefCell;\n\n\n\nuse crate::local::local_collector::LocalCollector;\n\nuse crate::span::span_queue::{SpanHandle, SpanQueue};\n\nuse crate::span::RawSpan;\n\n\n\nthread_local! {\n\n pub(super) static LOCAL_SPAN_LINE: RefCell<LocalSpanLine> = RefCell::new(LocalSpanLine::with_capacity(1024));\n\n}\n\n\n\npub struct LocalSpanLine {\n\n span_queue: SpanQueue,\n\n\n\n local_collector_existing: bool,\n\n current_local_collector_epoch: usize,\n\n}\n\n\n\npub struct LocalSpanHandle {\n", "file_path": "src/local/local_span_line.rs", "rank": 52, "score": 7.788218363805655 }, { "content": "/// `SpanRef` describes causal relationship of the current span to another span (e.g. 'child-of')\n\n#[derive(Debug, Clone)]\n\npub struct SpanRef {\n\n pub kind: SpanRefKind,\n\n pub trace_id_low: i64,\n\n pub trace_id_high: i64,\n\n pub span_id: i64,\n\n}\n\n\n\nimpl From<SpanRef> for Struct {\n\n fn from(f: SpanRef) -> Self {\n\n Struct::from((f.kind as i32, f.trace_id_low, f.trace_id_high, f.span_id))\n\n }\n\n}\n\n\n\n/// `Span` represents a named unit of work performed by a service.\n\n#[derive(Debug, Clone)]\n\npub struct Span {\n\n /// The least significant 64 bits of a traceID.\n\n pub trace_id_low: i64,\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 53, "score": 7.782503397427094 }, { "content": "impl LocalSpanGuard {\n\n #[inline]\n\n pub(crate) fn new(event: &'static str) -> Self {\n\n LOCAL_SPAN_LINE.with(|span_line| {\n\n let mut span_line = span_line.borrow_mut();\n\n let span_handle = span_line.enter_span(event);\n\n Self {\n\n span_handle,\n\n _p: Default::default(),\n\n }\n\n })\n\n }\n\n\n\n #[inline]\n\n pub fn with_properties<I: IntoIterator<Item = (&'static str, String)>, F: FnOnce() -> I>(\n\n self,\n\n properties: F,\n\n ) -> Self {\n\n self.with_span_line(move |span_handle, span_line| {\n\n span_line.add_properties(span_handle, properties)\n", "file_path": "src/local/local_span_guard.rs", "rank": 54, "score": 7.675216765652083 }, { "content": "\n\nimpl SpanGuard {\n\n #[inline]\n\n pub(crate) fn new_with_local_collector(\n\n span: &Span,\n\n local_collector: Option<LocalCollector>,\n\n ) -> Self {\n\n ATTACHED_SPAN.with(|attached_span| {\n\n let mut attached_span = attached_span.borrow_mut();\n\n\n\n if attached_span.is_some() {\n\n panic!(\"Attach too much spans: > 1\")\n\n }\n\n\n\n if let Some(inner) = &span.inner {\n\n *attached_span = Some(AttachedSpan {\n\n span_id: inner.span_id,\n\n acquirers: inner.to_report.iter().map(|(_, acq)| acq.clone()).collect(),\n\n local_collector,\n\n })\n", "file_path": "src/local/span_guard.rs", "rank": 55, "score": 7.446825135115082 }, { "content": " } else {\n\n Some(SpanGuard::new_with_local_collector(\n\n self,\n\n LocalCollector::try_start(),\n\n ))\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn from_local_parent(event: &'static str) -> Self {\n\n AttachedSpan::new_child_span(event)\n\n }\n\n}\n", "file_path": "src/local/span_guard.rs", "rank": 56, "score": 7.4159717510524175 }, { "content": " });\n\n self\n\n }\n\n\n\n #[inline]\n\n pub fn with_property<F: FnOnce() -> (&'static str, String)>(self, property: F) -> Self {\n\n self.with_span_line(move |span_handle, span_line| {\n\n span_line.add_property(span_handle, property);\n\n });\n\n self\n\n }\n\n}\n\n\n\nimpl LocalSpanGuard {\n\n #[inline]\n\n fn with_span_line(&self, f: impl FnOnce(&LocalSpanHandle, &mut LocalSpanLine)) {\n\n if let Some(local_span_handle) = &self.span_handle {\n\n LOCAL_SPAN_LINE.with(|span_line| {\n\n let span_line = &mut *span_line.borrow_mut();\n\n f(local_span_handle, span_line);\n", "file_path": "src/local/local_span_guard.rs", "rank": 57, "score": 7.3310231062272475 }, { "content": "### Local Collector\n\n\n\n `LocalCollector` is provided to retrieve `LocalSpan`s from thread local. Such collected `LocalSpan`s can be mounted to a\n\n normal `Span`.\n\n \n\n At most time, we should use the thread-local tracing mechanism (i.e `LocalSpan` and `LocalCollector`) to achieve the high\n\n performance goal. In other situations where execution is crossing thread bound, and a `Span` has to be passed from one\n\n thread to another thread, we just need to simply mount `LocalSpan`s, which is collected by different `LocalCollector`s,\n\n to the `Span`.\n\n\n\n\n\n## Usage\n\n\n\n```toml\n\n[dependencies]\n\nminitrace = { git = \"https://github.com/tikv/minitrace-rust.git\" }\n\n```\n\n\n\n### Record a Span\n\n\n\nTo record a common span:\n\n```rust\n\nuse minitrace::*;\n\n\n\nlet _span_guard = LocalSpan::enter(\"my event\");\n\n```\n\n\n\nTo add properties:\n\n\n\n```rust\n\nuse minitrace::*;\n\n\n\n// add a property for a span\n\nlet _span_guard = LocalSpan::enter(\"my event\").with_property(|| (\"key\", String::from(\"value\")));\n\n\n\n// or add multiple properties for a span\n\nlet _span_guard = LocalSpan::enter(\"my event\").with_properties(|| {\n\n vec![\n\n (\"key1\", String::from(\"value1\")),\n\n (\"key2\", String::from(\"value2\")),\n\n ]\n\n});\n\n```\n\n\n\n### Synchronous Example\n\n\n\nA common pattern to trace synchronous code:\n\n\n\n- Create a root `Span` and a `Collector` via `Span::root()`, then attach the `Span` to the current thread.\n\n- Add `LocalSpan::enter()`s somewhere, e.g. at the beginning of a code scope, at the beginning of a function, to record spans.\n\n- Make sure the root `Span` and all guards are dropped, then call `Collector`'s `collect` to get all `Span`s.\n\n\n\n\n\n```rust\n\nuse minitrace::*;\n\n\n\nlet collector = {\n\n let (root_span, collector) = Span::root(\"root\");\n\n let _span_guard = root_span.enter();\n\n\n\n let _local_span_guard = LocalSpan::enter(\"child\");\n\n\n\n // do something ...\n\n\n\n collector\n\n};\n\n\n\nlet spans: Vec<span::Span> = collector.collect();\n\n```\n\n\n\n### Asynchronous Example\n\n\n\nTo trace asynchronous code, we usually transmit `Span` from one thread to another thread.\n\n\n", "file_path": "README.md", "rank": 58, "score": 7.228182875322167 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\npub use minstant::Anchor;\n\npub use minstant::Cycle;\n\n\n\npub struct DefaultClock;\n\n\n\nimpl DefaultClock {\n\n #[inline]\n\n pub fn now() -> Cycle {\n\n Cycle::now()\n\n }\n\n\n\n #[inline]\n\n pub fn cycle_to_unix_time_ns(cycle: Cycle, anchor: Anchor) -> u64 {\n\n cycle.into_unix_time_ns(anchor)\n\n }\n\n\n\n #[inline]\n\n pub fn anchor() -> Anchor {\n\n Anchor::new()\n\n }\n\n}\n", "file_path": "src/span/cycle.rs", "rank": 59, "score": 7.20975722862978 }, { "content": " use crate::trace::collector::CollectArgs;\n\n use minitrace_macro::trace;\n\n use std::sync::Arc;\n\n\n\n fn four_spans() {\n\n {\n\n // wide\n\n for _ in 0..2 {\n\n let _g = LocalSpan::enter(\"iter span\")\n\n .with_property(|| (\"tmp_property\", \"tmp_value\".into()));\n\n }\n\n }\n\n\n\n {\n\n #[trace(\"rec span\")]\n\n fn rec(mut i: u32) {\n\n i -= 1;\n\n\n\n if i > 0 {\n\n rec(i);\n", "file_path": "src/lib.rs", "rank": 60, "score": 7.097997225228828 }, { "content": " }\n\n });\n\n\n\n SpanGuard {\n\n _p: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl Span {\n\n #[inline]\n\n pub fn enter(&self) -> SpanGuard {\n\n self.try_enter()\n\n .expect(\"Current thread is occupied by another span\")\n\n }\n\n\n\n #[inline]\n\n pub fn try_enter(&self) -> Option<SpanGuard> {\n\n if AttachedSpan::is_occupied() {\n\n None\n", "file_path": "src/local/span_guard.rs", "rank": 61, "score": 6.970125999184898 }, { "content": " Some(LocalSpanHandle {\n\n span_handle: self.span_queue.start_span(event),\n\n local_collector_epoch: self.current_local_collector_epoch,\n\n })\n\n }\n\n\n\n #[inline]\n\n pub fn exit_span(&mut self, local_span_handle: LocalSpanHandle) {\n\n if self.is_valid(&local_span_handle) {\n\n self.span_queue.finish_span(local_span_handle.span_handle);\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn register_local_collector(&mut self) -> Option<LocalCollector> {\n\n // Only allow one local collector per thread\n\n if self.local_collector_existing {\n\n return None;\n\n }\n\n\n", "file_path": "src/local/local_span_line.rs", "rank": 62, "score": 6.8943262419814175 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nmod thrift;\n\n\n\nuse minitrace::span::Span;\n\nuse std::error::Error;\n\nuse std::net::{SocketAddr, UdpSocket};\n\nuse thrift_codec::message::Message;\n\nuse thrift_codec::CompactEncode;\n\n\n\nuse crate::thrift::{\n\n Batch, EmitBatchNotification, Process, Span as JaegerSpan, SpanRef, SpanRefKind, Tag,\n\n};\n\n\n\npub struct Reporter;\n\n\n\nimpl Reporter {\n\n pub fn encode(\n\n service_name: String,\n\n trace_id: u64,\n", "file_path": "crates/minitrace-jaeger/src/lib.rs", "rank": 63, "score": 6.730605573160831 }, { "content": "// Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse crate::LocalSpanGuard;\n\n\n\npub struct LocalSpan;\n\n\n\nimpl LocalSpan {\n\n pub fn enter(event: &'static str) -> LocalSpanGuard {\n\n LocalSpanGuard::new(event)\n\n }\n\n}\n", "file_path": "src/trace/local_span.rs", "rank": 64, "score": 6.619676757577342 }, { "content": " }\n\n}\n\n\n\nimpl From<Tag> for Struct {\n\n fn from(f: Tag) -> Self {\n\n let mut fields = vec![Field::new(1, f.key()), Field::new(2, f.kind() as i32)];\n\n match f {\n\n Tag::String { value, .. } => fields.push(Field::new(3, value)),\n\n Tag::Double { value, .. } => fields.push(Field::new(4, value)),\n\n Tag::Bool { value, .. } => fields.push(Field::new(5, value)),\n\n Tag::Long { value, .. } => fields.push(Field::new(6, value)),\n\n Tag::Binary { value, .. } => fields.push(Field::new(7, value)),\n\n };\n\n Struct::new(fields)\n\n }\n\n}\n\n\n\n/// `Log` is a timed even with an arbitrary set of tags.\n\n#[derive(Debug, Clone)]\n\npub struct Log {\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 65, "score": 6.227879513296459 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nmod cycle;\n\nmod span_id;\n\n\n\npub(crate) mod span_queue;\n\npub(crate) use self::span_id::DefaultIdGenerator;\n\n\n\npub use self::cycle::{Anchor, Cycle, DefaultClock};\n\npub use self::span_id::SpanId;\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct Span {\n\n pub id: u32,\n\n pub parent_id: u32,\n\n pub begin_unix_time_ns: u64,\n\n pub duration_ns: u64,\n\n pub event: &'static str,\n\n pub properties: Vec<(&'static str, String)>,\n\n}\n", "file_path": "src/span/mod.rs", "rank": 66, "score": 5.84335933041543 }, { "content": " event: &'static str,\n\n spans: impl IntoIterator<Item = &'a Span>,\n\n ) -> Self {\n\n Self::new(\n\n spans\n\n .into_iter()\n\n .filter_map(|span| span.inner.as_ref())\n\n .flat_map(|inner| {\n\n inner\n\n .to_report\n\n .iter()\n\n .map(move |(_, acq)| (inner.span_id, acq))\n\n }),\n\n event,\n\n )\n\n }\n\n\n\n #[inline]\n\n pub fn mount_local_spans(&self, local_spans: Arc<LocalSpans>) {\n\n if let Some(inner) = &self.inner {\n", "file_path": "src/trace/span.rs", "rank": 67, "score": 5.832594548927036 }, { "content": " pub(crate) span_id: SpanId,\n\n\n\n // Report `RawSpan` to `Acquirer` when `SpanInner` is dropping\n\n pub(crate) to_report: Vec<(RawSpan, Acquirer)>,\n\n}\n\n\n\nimpl Span {\n\n #[inline]\n\n pub(crate) fn new<'a>(\n\n acquirers: impl Iterator<Item = (SpanId, &'a Acquirer)>,\n\n event: &'static str,\n\n ) -> Self {\n\n let span_id = DefaultIdGenerator::next_id();\n\n let now = DefaultClock::now();\n\n\n\n let mut to_report = Vec::new();\n\n for (parent_span_id, acq) in acquirers {\n\n if !acq.is_shutdown() {\n\n to_report.push((\n\n RawSpan::begin_with(span_id, parent_span_id, now, event),\n", "file_path": "src/trace/span.rs", "rank": 68, "score": 5.729702733545555 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse criterion::{black_box, criterion_group, criterion_main, Criterion};\n\nuse minitrace::LocalCollector;\n\nuse minitrace::*;\n\nuse minitrace_macro::trace;\n\n\n", "file_path": "benches/trace.rs", "rank": 69, "score": 5.653718546934531 }, { "content": " self.local_collector_existing = true;\n\n self.current_local_collector_epoch = self.current_local_collector_epoch.wrapping_add(1);\n\n\n\n Some(LocalCollector::new(self.current_local_collector_epoch))\n\n }\n\n\n\n pub fn unregister_and_collect(&mut self, local_collector: LocalCollector) -> Vec<RawSpan> {\n\n debug_assert!(self.local_collector_existing);\n\n debug_assert_eq!(\n\n local_collector.local_collector_epoch,\n\n self.current_local_collector_epoch\n\n );\n\n\n\n self.local_collector_existing = false;\n\n self.span_queue.take_queue()\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n self.local_collector_existing = false;\n\n self.span_queue.clear();\n", "file_path": "src/local/local_span_line.rs", "rank": 70, "score": 5.636534664781316 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\npub mod acquirer;\n\npub mod collector;\n\npub mod local_span;\n\npub mod span;\n", "file_path": "src/trace/mod.rs", "rank": 71, "score": 5.625273978848033 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n#[pin_project::pin_project]\n\npub struct InLocalSpan<T> {\n\n #[pin]\n\n inner: T,\n\n event: &'static str,\n\n}\n\n\n\nimpl<T: std::future::Future> std::future::Future for InLocalSpan<T> {\n\n type Output = T::Output;\n\n\n\n fn poll(self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {\n\n let this = self.project();\n\n let _guard = LocalSpan::enter(this.event);\n\n this.inner.poll(cx)\n\n }\n\n}\n", "file_path": "src/future.rs", "rank": 72, "score": 5.524288327925273 }, { "content": " 10,\n\n List::from(f.tags.into_iter().map(Struct::from).collect::<Vec<_>>()),\n\n ));\n\n }\n\n if !f.logs.is_empty() {\n\n fields.push(Field::new(\n\n 11,\n\n List::from(f.logs.into_iter().map(Struct::from).collect::<Vec<_>>()),\n\n ));\n\n }\n\n Struct::new(fields)\n\n }\n\n}\n\n\n\n/// `Process` describes the traced process/service that emits spans.\n\n#[derive(Debug, Clone)]\n\npub struct Process {\n\n /// The name of this service.\n\n pub service_name: String,\n\n\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 73, "score": 5.494782573011564 }, { "content": "pub struct InSpan<T> {\n\n #[pin]\n\n inner: T,\n\n span: Option<Span>,\n\n}\n\n\n\nimpl<T: std::future::Future> std::future::Future for InSpan<T> {\n\n type Output = T::Output;\n\n\n\n fn poll(self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {\n\n let this = self.project();\n\n\n\n let _guard = this.span.as_ref().map(|s| s.try_enter());\n\n let res = this.inner.poll(cx);\n\n\n\n match res {\n\n r @ Poll::Pending => r,\n\n other => {\n\n this.span.take();\n\n other\n", "file_path": "src/future.rs", "rank": 74, "score": 5.4761182660748355 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\npub mod local_collector;\n\npub mod local_span_guard;\n\npub mod local_span_line;\n\npub mod span_guard;\n", "file_path": "src/local/mod.rs", "rank": 75, "score": 5.379694760338717 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse std::task::Poll;\n\n\n\nuse crate::{LocalSpan, Span};\n\n\n\nimpl<T: std::future::Future> FutureExt for T {}\n\n\n", "file_path": "src/future.rs", "rank": 76, "score": 5.185763690571395 }, { "content": "\n\n /// The most significant 64 bits of a traceID; 0 when only 64bit IDs are used.\n\n pub trace_id_high: i64,\n\n\n\n /// Unique span id (only unique within a given trace).\n\n pub span_id: i64,\n\n\n\n /// Since nearly all spans will have parents spans, `ChildOf` refs do not have to be explicit.\n\n ///\n\n /// Should be `0` if the current span is a root span.\n\n pub parent_span_id: i64,\n\n\n\n /// The name of operation.\n\n pub operation_name: String,\n\n\n\n /// Causal references to other spans.\n\n pub references: Vec<SpanRef>,\n\n\n\n /// A bit field used to propagate sampling decisions.\n\n ///\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 77, "score": 5.101742385041855 }, { "content": " }\n\n }\n\n\n\n // deep\n\n rec(2);\n\n }\n\n }\n\n\n\n #[test]\n\n fn single_thread_single_span() {\n\n let spans = {\n\n let (root_span, collector) = Span::root(\"root\");\n\n let _g = root_span.enter();\n\n\n\n four_spans();\n\n\n\n collector\n\n }\n\n .collect_with_args(CollectArgs::default().sync(true));\n\n\n", "file_path": "src/lib.rs", "rank": 78, "score": 5.058203473426604 }, { "content": "\n\n#[derive(Clone, Debug)]\n\npub struct RawSpan {\n\n pub id: SpanId,\n\n pub parent_id: SpanId,\n\n pub begin_cycle: Cycle,\n\n pub event: &'static str,\n\n pub properties: Vec<(&'static str, String)>,\n\n\n\n // Will write this field at post processing\n\n pub end_cycle: Cycle,\n\n}\n\n\n\nimpl RawSpan {\n\n #[inline]\n\n pub(crate) fn begin_with(\n\n id: SpanId,\n\n parent_id: SpanId,\n\n begin_cycles: Cycle,\n\n event: &'static str,\n", "file_path": "src/span/mod.rs", "rank": 79, "score": 4.759507485432627 }, { "content": " if self.is_valid(local_span_handle) {\n\n self.span_queue\n\n .add_property(&local_span_handle.span_handle, property());\n\n }\n\n }\n\n}\n\n\n\nimpl LocalSpanLine {\n\n #[inline]\n\n fn is_valid(&self, local_span_handle: &LocalSpanHandle) -> bool {\n\n self.local_collector_existing\n\n && local_span_handle.local_collector_epoch == self.current_local_collector_epoch\n\n }\n\n}\n", "file_path": "src/local/local_span_line.rs", "rank": 80, "score": 4.755064074326721 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn add_properties<I: IntoIterator<Item = (&'static str, String)>, F: FnOnce() -> I>(\n\n &mut self,\n\n local_span_handle: &LocalSpanHandle,\n\n properties: F,\n\n ) {\n\n if self.is_valid(local_span_handle) {\n\n self.span_queue\n\n .add_properties(&local_span_handle.span_handle, properties());\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn add_property<F: FnOnce() -> (&'static str, String)>(\n\n &mut self,\n\n local_span_handle: &LocalSpanHandle,\n\n property: F,\n\n ) {\n", "file_path": "src/local/local_span_line.rs", "rank": 81, "score": 4.700625739354869 }, { "content": "\n\n (collector1, collector2, collector3)\n\n };\n\n\n\n (\n\n c1.collect_with_args(CollectArgs::default().sync(true)),\n\n c2.collect_with_args(CollectArgs::default().sync(true)),\n\n c3.collect_with_args(CollectArgs::default().sync(true)),\n\n )\n\n };\n\n\n\n assert_eq!(spans1.len(), 5);\n\n assert_eq!(spans2.len(), 5);\n\n assert_eq!(spans3.len(), 5);\n\n }\n\n\n\n #[test]\n\n fn multiple_threads_single_span() {\n\n let spans = {\n\n let (span, collector) = Span::root(\"root\");\n", "file_path": "src/lib.rs", "rank": 82, "score": 4.533715239890624 }, { "content": " .collect::<Vec<_>>()\n\n .serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n\n\n Ok(buf)\n\n }\n\n\n\n pub fn report_blocking(\n\n agent: SocketAddr,\n\n bytes: Vec<u8>,\n\n ) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {\n\n let client = reqwest::blocking::Client::new();\n\n let rep = client\n\n .post(&format!(\"http://{}/v0.4/traces\", agent))\n\n .header(\"Datadog-Meta-Tracer-Version\", \"v1.27.0\")\n\n .header(\"Content-Type\", \"application/msgpack\")\n\n .body(bytes)\n\n .send()?;\n\n\n\n if rep.status().as_u16() >= 400 {\n\n let status = rep.status();\n", "file_path": "crates/minitrace-datadog/src/lib.rs", "rank": 83, "score": 4.475888539758318 }, { "content": " let (spans1, spans2, spans3) = {\n\n let (c1, c2, c3) = {\n\n let (root_span1, collector1) = Span::root(\"root1\");\n\n let (root_span2, collector2) = Span::root(\"root2\");\n\n let (root_span3, collector3) = Span::root(\"root3\");\n\n\n\n let local_collector = LocalCollector::start();\n\n\n\n let local_spans = Arc::new(local_collector.collect());\n\n root_span1.mount_local_spans(local_spans.clone());\n\n root_span2.mount_local_spans(local_spans.clone());\n\n root_span3.mount_local_spans(local_spans);\n\n\n\n (collector1, collector2, collector3)\n\n };\n\n\n\n (\n\n c1.collect_with_args(CollectArgs::default().sync(true)),\n\n c2.collect_with_args(CollectArgs::default().sync(true)),\n\n c3.collect_with_args(CollectArgs::default().sync(true)),\n\n )\n\n };\n\n\n\n assert_eq!(spans1.len(), 1);\n\n assert_eq!(spans2.len(), 1);\n\n assert_eq!(spans3.len(), 1);\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 84, "score": 4.4537440631574 }, { "content": "impl Drop for SpanGuard {\n\n fn drop(&mut self) {\n\n ATTACHED_SPAN.with(|attached_span| {\n\n if let Some(AttachedSpan {\n\n span_id,\n\n acquirers,\n\n local_collector: Some(local_collector),\n\n }) = attached_span.borrow_mut().take()\n\n {\n\n let raw_spans = Arc::new(local_collector.collect());\n\n for acq in acquirers {\n\n acq.submit(SpanCollection::LocalSpans {\n\n local_spans: raw_spans.clone(),\n\n parent_id_of_root: span_id,\n\n })\n\n }\n\n }\n\n })\n\n }\n\n}\n", "file_path": "src/local/span_guard.rs", "rank": 85, "score": 4.4361083692245105 }, { "content": " for (_, acq) in &inner.to_report {\n\n acq.submit(SpanCollection::LocalSpans {\n\n local_spans: local_spans.clone(),\n\n parent_id_of_root: inner.span_id,\n\n })\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for SpanInner {\n\n fn drop(&mut self) {\n\n let now = DefaultClock::now();\n\n for (mut span, collector) in self.to_report.drain(..) {\n\n span.end_with(now);\n\n collector.submit(SpanCollection::Span(span))\n\n }\n\n }\n\n}\n", "file_path": "src/trace/span.rs", "rank": 86, "score": 4.240127155068622 }, { "content": " let _g = span.enter();\n\n\n\n for _ in 0..4 {\n\n let child_span = Span::from_local_parent(\"cross-thread\");\n\n std::thread::spawn(move || {\n\n let _g = child_span.enter();\n\n four_spans();\n\n });\n\n }\n\n\n\n four_spans();\n\n\n\n collector\n\n }\n\n .collect_with_args(CollectArgs::default().sync(true));\n\n\n\n assert_eq!(spans.len(), 25);\n\n }\n\n\n\n #[test]\n", "file_path": "src/lib.rs", "rank": 87, "score": 4.026011829710564 }, { "content": " four_spans();\n\n\n\n let local_spans = Arc::new(local_collector.collect());\n\n root_span1.mount_local_spans(local_spans.clone());\n\n root_span2.mount_local_spans(local_spans);\n\n (collector1, collector2)\n\n };\n\n\n\n (\n\n c1.collect_with_args(CollectArgs::default().sync(true)),\n\n c2.collect_with_args(CollectArgs::default().sync(true)),\n\n )\n\n };\n\n\n\n assert_eq!(spans1.len(), 25);\n\n assert_eq!(spans2.len(), 25);\n\n }\n\n\n\n #[test]\n\n fn multiple_spans_without_local_spans() {\n", "file_path": "src/lib.rs", "rank": 88, "score": 3.9028774005938884 }, { "content": " /// let (span, _collector) = Span::root(\"Task\");\n\n ///\n\n /// let fut = async {\n\n /// 9527\n\n /// }.in_local_span(\"Future\");\n\n ///\n\n /// let task = async {\n\n /// fut.await\n\n /// };\n\n ///\n\n /// tokio::spawn(task.in_span(span));\n\n /// # }\n\n /// ```\n\n #[inline]\n\n fn in_local_span(self, event: &'static str) -> InLocalSpan<Self> {\n\n InLocalSpan { inner: self, event }\n\n }\n\n}\n\n\n\n#[pin_project::pin_project]\n", "file_path": "src/future.rs", "rank": 89, "score": 3.8526691801491184 }, { "content": " ) -> Self {\n\n RawSpan {\n\n id,\n\n parent_id,\n\n begin_cycle: begin_cycles,\n\n event,\n\n properties: vec![],\n\n end_cycle: Cycle::default(),\n\n }\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn end_with(&mut self, end_cycle: Cycle) {\n\n self.end_cycle = end_cycle;\n\n }\n\n\n\n #[inline]\n\n pub fn into_span(self, anchor: Anchor) -> Span {\n\n let begin_unix_time_ns = DefaultClock::cycle_to_unix_time_ns(self.begin_cycle, anchor);\n\n let end_unix_time_ns = DefaultClock::cycle_to_unix_time_ns(self.end_cycle, anchor);\n", "file_path": "src/span/mod.rs", "rank": 90, "score": 3.742093342301662 }, { "content": "## User Interface\n\n\n\nWe support visualization provided by an amazing tracing platform [Jaeger](https://www.jaegertracing.io/).\n\n\n\nTo experience, a dependency should be added as:\n\n \n\n```toml\n\n[dependencies]\n\nminitrace-jaeger = { git = \"https://github.com/tikv/minitrace-rust.git\" }\n\n```\n\n\n\n### Report to Jaeger\n\n\n\n```rust\n\nuse minitrace_jaeger::Reporter;\n\n\n\nlet spans = /* collect from a collector */;\n\n\n\nlet socket = SocketAddr::new(\"127.0.0.1\".parse().unwrap(), 6831);\n\n\n\nconst TRACE_ID: u64 = 42;\n\nconst SPAN_ID_PREFIX: u32 = 42;\n\nconst ROOT_PARENT_SPAN_ID: u64 = 0;\n\nlet bytes = Reporter::encode(\n\n String::from(\"service name\"),\n\n TRACE_ID,\n\n ROOT_PARENT_SPAN_ID,\n\n SPAN_ID_PREFIX,\n\n &spans,\n\n)\n\n.expect(\"encode error\");\n\nReporter::report(socket, &bytes).expect(\"report error\");\n\n```\n\n\n\n### Setup Jaeger\n\n```sh\n\ndocker run --rm -d -p6831:6831/udp -p16686:16686 --name jaeger jaegertracing/all-in-one:latest\n\n```\n\n\n\n### Run Synchronous Example\n\n\n\n```sh\n\ncargo run --example synchronous\n\n```\n\n\n\nOpen http://localhost:16686 to see the results.\n\n\n\n![Jaeger Synchronous](img/jaeger-synchronous.png)\n\n\n\n### Run Asynchronous Example\n\n\n\n```sh\n\ncargo run --example asynchronous\n\n```\n\n\n\nOpen http://localhost:16686 to see the results.\n\n\n\n![Jaeger Asynchronous](img/jaeger-asynchronous.png)\n", "file_path": "README.md", "rank": 91, "score": 3.730270560588586 }, { "content": "use minitrace::*;\n\nuse minitrace_jaeger::Reporter;\n\nuse std::net::SocketAddr;\n\n\n", "file_path": "examples/get_started.rs", "rank": 92, "score": 3.6499988065043736 }, { "content": " value: p.1.to_owned(),\n\n })\n\n .collect(),\n\n logs: vec![],\n\n })\n\n .collect(),\n\n },\n\n };\n\n\n\n let mut bytes = Vec::new();\n\n let msg = Message::from(bn);\n\n msg.compact_encode(&mut bytes)?;\n\n Ok(bytes)\n\n }\n\n\n\n pub fn report(\n\n agent: SocketAddr,\n\n bytes: &[u8],\n\n ) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {\n\n let local_addr: SocketAddr = if agent.is_ipv4() {\n", "file_path": "crates/minitrace-jaeger/src/lib.rs", "rank": 93, "score": 3.5797394226746517 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse minitrace::CollectArgs;\n\nuse minitrace::{LocalSpan, Span};\n\nuse minitrace_datadog::Reporter as DReporter;\n\nuse minitrace_jaeger::Reporter as JReporter;\n\nuse minitrace_macro::trace;\n\n\n", "file_path": "examples/synchronous.rs", "rank": 94, "score": 3.5537304216962706 }, { "content": "// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.\n\n\n\nuse minitrace::*;\n\nuse minitrace_datadog::Reporter as DReporter;\n\nuse minitrace_jaeger::Reporter as JReporter;\n\nuse minitrace_macro::trace_async;\n\n\n", "file_path": "examples/asynchronous.rs", "rank": 95, "score": 3.5034411361185187 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum TagKind {\n\n String = 0,\n\n Double = 1,\n\n Bool = 2,\n\n Long = 3,\n\n Binary = 4,\n\n}\n\n\n\n/// `Tag` is a basic strongly typed key/value pair.\n\n#[derive(Debug, Clone, PartialEq, PartialOrd)]\n\npub enum Tag {\n\n String { key: String, value: String },\n\n Double { key: String, value: f64 },\n\n Bool { key: String, value: bool },\n\n Long { key: String, value: i64 },\n\n Binary { key: String, value: Vec<u8> },\n\n}\n\n\n\nimpl Tag {\n", "file_path": "crates/minitrace-jaeger/src/thrift.rs", "rank": 96, "score": 3.467216948901774 }, { "content": "}\n\n\n\nimpl AttachedSpan {\n\n pub fn new_child_span(event: &'static str) -> Span {\n\n ATTACHED_SPAN.with(|attached_span| {\n\n let attached_span = attached_span.borrow();\n\n if let Some(AttachedSpan {\n\n span_id: parent_span_id,\n\n acquirers,\n\n ..\n\n }) = attached_span.as_ref()\n\n {\n\n Span::new(acquirers.iter().map(|acq| (*parent_span_id, acq)), event)\n\n } else {\n\n Span::empty()\n\n }\n\n })\n\n }\n\n\n\n #[inline]\n", "file_path": "src/local/span_guard.rs", "rank": 97, "score": 3.3089085255502098 }, { "content": " jh.await.unwrap();\n\n }\n\n }\n\n .in_span(span);\n\n\n\n tokio::spawn(f).await.unwrap();\n\n\n\n let spans = collector.collect_with_args(CollectArgs::default().sync(true));\n\n\n\n // Report to Jaeger\n\n let bytes = JReporter::encode(\"asynchronous\".to_owned(), rand::random(), 0, 0, &spans).unwrap();\n\n JReporter::report(\"127.0.0.1:6831\".parse().unwrap(), &bytes).ok();\n\n\n\n // Report to Datadog\n\n let bytes = DReporter::encode(\"asynchronous\", rand::random(), 0, 0, &spans).unwrap();\n\n DReporter::report(\"127.0.0.1:8126\".parse().unwrap(), bytes)\n\n .await\n\n .ok();\n\n}\n", "file_path": "examples/asynchronous.rs", "rank": 98, "score": 3.1446153086495925 }, { "content": " fn multiple_threads_multiple_spans() {\n\n let (spans1, spans2) = {\n\n let (c1, c2) = {\n\n let (root_span1, collector1) = Span::root(\"root1\");\n\n let (root_span2, collector2) = Span::root(\"root2\");\n\n let local_collector = LocalCollector::start();\n\n\n\n for _ in 0..4 {\n\n let merged =\n\n Span::from_parents(\"merged\", vec![&root_span1, &root_span2].into_iter());\n\n std::thread::spawn(move || {\n\n let local_collector = LocalCollector::start();\n\n\n\n four_spans();\n\n\n\n let local_spans = Arc::new(local_collector.collect());\n\n merged.mount_local_spans(local_spans);\n\n });\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 99, "score": 3.115670776679411 } ]
Rust
src/bin/main.rs
yumcyaWiz/rusmallpt
aa7ce93096a6b5735a711cf6585dd969c4456389
use std::f32::consts::{FRAC_PI_2, FRAC_PI_4}; use std::sync::{Arc, Mutex}; use rusmallpt::camera::{Camera, PinholeCamera}; use rusmallpt::core::IntersectableLocal; use rusmallpt::image::Image; use rusmallpt::integrator::{Integrator, NormalIntegrator, PathTracingIntegrator}; use rusmallpt::sampler::Sampler; use rusmallpt::scene::{Material, Scene}; use rusmallpt::shape::{Plane, Sphere}; use rusmallpt::types::Real; use rusmallpt::vec2::Vec2; use rusmallpt::vec3::Vec3; fn simple_scene() -> (PinholeCamera, Scene) { let camera = PinholeCamera::new( Vec3::new(0.0, 0.0, 6.0), Vec3::new(0.0, 0.0, -1.0), FRAC_PI_2, ); let sphere1 = Box::new(Sphere::new(Vec3::new(0.0, 0.0, 0.0), 1.0)); let sphere2 = Box::new(Sphere::new(Vec3::new(-1.5, 0.0, -1.5), 1.0)); let sphere3 = Box::new(Sphere::new(Vec3::new(1.5, 0.0, 1.5), 1.0)); let floor = Box::new(Plane::new( Vec3::new(-3.0, -1.0, 3.0), Vec3::new(6.0, 0.0, 0.0), Vec3::new(0.0, 0.0, -6.0), )); let primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>> = vec![sphere1, sphere2, sphere3, floor]; let materials: Vec<Material> = vec![ Material::new( Vec3::new(0.8, 0.2, 0.2), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.2, 0.8, 0.2), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.2, 0.2, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), ]; (camera, Scene::new(primitives, materials)) } fn cornellbox_scene() -> (PinholeCamera, Scene) { let camera = PinholeCamera::new( Vec3::new(278.0, 273.0, -900.0), Vec3::new(0.0, 0.0, 1.0), FRAC_PI_4, ); let floor = Box::new(Plane::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), Vec3::new(556.0, 0.0, 0.0), )); let right_wall = Box::new(Plane::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 548.8, 0.0), Vec3::new(0.0, 0.0, 559.2), )); let left_wall = Box::new(Plane::new( Vec3::new(556.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), Vec3::new(0.0, 548.8, 0.0), )); let ceil = Box::new(Plane::new( Vec3::new(0.0, 548.8, 0.0), Vec3::new(556.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), )); let back_wall = Box::new(Plane::new( Vec3::new(0.0, 0.0, 559.2), Vec3::new(0.0, 548.8, 0.0), Vec3::new(556.0, 0.0, 0.0), )); let short_box1 = Box::new(Plane::new( Vec3::new(130.0, 165.0, 65.0), Vec3::new(-48.0, 0.0, 160.0), Vec3::new(160.0, 0.0, 49.0), )); let short_box2 = Box::new(Plane::new( Vec3::new(290.0, 0.0, 114.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(-50.0, 0.0, 158.0), )); let short_box3 = Box::new(Plane::new( Vec3::new(130.0, 0.0, 65.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(160.0, 0.0, 49.0), )); let short_box4 = Box::new(Plane::new( Vec3::new(82.0, 0.0, 225.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(48.0, 0.0, -160.0), )); let short_box5 = Box::new(Plane::new( Vec3::new(240.0, 0.0, 272.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(-158.0, 0.0, -47.0), )); let tall_box1 = Box::new(Plane::new( Vec3::new(423.0, 330.0, 247.0), Vec3::new(-158.0, 0.0, 49.0), Vec3::new(49.0, 0.0, 159.0), )); let tall_box2 = Box::new(Plane::new( Vec3::new(423.0, 0.0, 247.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(49.0, 0.0, 159.0), )); let tall_box3 = Box::new(Plane::new( Vec3::new(472.0, 0.0, 406.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(-158.0, 0.0, 50.0), )); let tall_box4 = Box::new(Plane::new( Vec3::new(314.0, 0.0, 456.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(-49.0, 0.0, -160.0), )); let tall_box5 = Box::new(Plane::new( Vec3::new(265.0, 0.0, 296.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(158.0, 0.0, -49.0), )); let light = Box::new(Plane::new( Vec3::new(343.0, 548.6, 227.0), Vec3::new(-130.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 105.0), )); let primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>> = vec![ floor, right_wall, left_wall, ceil, back_wall, short_box1, short_box2, short_box3, short_box4, short_box5, tall_box1, tall_box2, tall_box3, tall_box4, tall_box5, light, ]; let white = Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let red = Material::new( Vec3::new(0.8, 0.05, 0.05), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let green = Material::new( Vec3::new(0.05, 0.8, 0.05), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let light_material = Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(34.0, 19.0, 10.0), ); let materials = vec![ white.clone(), red, green, white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white, light_material, ]; (camera, Scene::new(primitives, materials)) } fn main() { let width = 512; let height = 512; let n_samples = 100; let max_depth = 100; let image = Arc::new(Mutex::new(Image::new(width, height))); let (camera, scene) = cornellbox_scene(); let camera = Arc::new(camera); let scene = Arc::new(scene); let integrator = Arc::new(PathTracingIntegrator::new(max_depth)); let pool = rayon::ThreadPoolBuilder::new() .num_threads(16) .build() .unwrap(); pool.scope(|s| { for i in 0..height { for j in 0..width { let (image, camera, scene, integrator) = ( image.clone(), camera.clone(), scene.clone(), integrator.clone(), ); s.spawn(move |_| { let seed = j + width * i; let mut sampler = Sampler::new(seed as u64); for _k in 0..n_samples { sampler.next_1d(); } let width = width as Real; let height = height as Real; let mut radiance = Vec3::new(0.0, 0.0, 0.0); for _k in 0..n_samples { let uv = Vec2::new( (2.0 * (j as Real + sampler.next_1d()) - width) / height, (2.0 * (i as Real + sampler.next_1d()) - height) / height, ); let ray = camera.sample_ray(uv, &mut sampler); radiance += integrator.integrate(&scene, &mut sampler, &ray); } radiance /= n_samples as Real; image.lock().unwrap().set_pixel(i, j, radiance); }); } } }); image.lock().unwrap().gamma_correction(); image.lock().unwrap().write_ppm(); }
use std::f32::consts::{FRAC_PI_2, FRAC_PI_4}; use std::sync::{Arc, Mutex}; use rusmallpt::camera::{Camera, PinholeCamera}; use rusmallpt::core::IntersectableLocal; use rusmallpt::image::Image; use rusmallpt::integrator::{Integrator, NormalIntegrator, PathTracingIntegrator}; use rusmallpt::sampler::Sampler; use rusmallpt::scene::{Material, Scene}; use rusmallpt::shape::{Plane, Sphere}; use rusmallpt::types::Real; use rusmallpt::vec2::Vec2; use rusmallpt::vec3::Vec3; fn simple_scene() -> (PinholeCamera, Scene) { let camera = PinholeCamera::new( Vec3::new(0.0, 0.0, 6.0), Vec3::new(0.0, 0.0, -1.0), FRAC_PI_2, ); let sphere1 = Box::new(Sphere::new(Vec3::new(0.0, 0.0, 0.0), 1.0)); let sphere2 = Box::new(Sphere::new(Vec3::new(-1.5, 0.0, -1.5), 1.0)); let sphere3 = Box::new(Sphere::new(Vec3::new(1.5, 0.0, 1.5), 1.0)); let floor = Box::new(Plane::new( Vec3::new(-3.0, -1.0, 3.0), Vec3::new(6.0, 0.0, 0.0), Vec3::new(0.0, 0.0, -6.0), )); let primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>> = vec![sphere1, sphere2, sphere3, floor]; let materials: Vec<Material> = vec![ Material::new( Vec3::new(0.8, 0.2, 0.2), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec
fn cornellbox_scene() -> (PinholeCamera, Scene) { let camera = PinholeCamera::new( Vec3::new(278.0, 273.0, -900.0), Vec3::new(0.0, 0.0, 1.0), FRAC_PI_4, ); let floor = Box::new(Plane::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), Vec3::new(556.0, 0.0, 0.0), )); let right_wall = Box::new(Plane::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 548.8, 0.0), Vec3::new(0.0, 0.0, 559.2), )); let left_wall = Box::new(Plane::new( Vec3::new(556.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), Vec3::new(0.0, 548.8, 0.0), )); let ceil = Box::new(Plane::new( Vec3::new(0.0, 548.8, 0.0), Vec3::new(556.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), )); let back_wall = Box::new(Plane::new( Vec3::new(0.0, 0.0, 559.2), Vec3::new(0.0, 548.8, 0.0), Vec3::new(556.0, 0.0, 0.0), )); let short_box1 = Box::new(Plane::new( Vec3::new(130.0, 165.0, 65.0), Vec3::new(-48.0, 0.0, 160.0), Vec3::new(160.0, 0.0, 49.0), )); let short_box2 = Box::new(Plane::new( Vec3::new(290.0, 0.0, 114.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(-50.0, 0.0, 158.0), )); let short_box3 = Box::new(Plane::new( Vec3::new(130.0, 0.0, 65.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(160.0, 0.0, 49.0), )); let short_box4 = Box::new(Plane::new( Vec3::new(82.0, 0.0, 225.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(48.0, 0.0, -160.0), )); let short_box5 = Box::new(Plane::new( Vec3::new(240.0, 0.0, 272.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(-158.0, 0.0, -47.0), )); let tall_box1 = Box::new(Plane::new( Vec3::new(423.0, 330.0, 247.0), Vec3::new(-158.0, 0.0, 49.0), Vec3::new(49.0, 0.0, 159.0), )); let tall_box2 = Box::new(Plane::new( Vec3::new(423.0, 0.0, 247.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(49.0, 0.0, 159.0), )); let tall_box3 = Box::new(Plane::new( Vec3::new(472.0, 0.0, 406.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(-158.0, 0.0, 50.0), )); let tall_box4 = Box::new(Plane::new( Vec3::new(314.0, 0.0, 456.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(-49.0, 0.0, -160.0), )); let tall_box5 = Box::new(Plane::new( Vec3::new(265.0, 0.0, 296.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(158.0, 0.0, -49.0), )); let light = Box::new(Plane::new( Vec3::new(343.0, 548.6, 227.0), Vec3::new(-130.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 105.0), )); let primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>> = vec![ floor, right_wall, left_wall, ceil, back_wall, short_box1, short_box2, short_box3, short_box4, short_box5, tall_box1, tall_box2, tall_box3, tall_box4, tall_box5, light, ]; let white = Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let red = Material::new( Vec3::new(0.8, 0.05, 0.05), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let green = Material::new( Vec3::new(0.05, 0.8, 0.05), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let light_material = Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(34.0, 19.0, 10.0), ); let materials = vec![ white.clone(), red, green, white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white, light_material, ]; (camera, Scene::new(primitives, materials)) } fn main() { let width = 512; let height = 512; let n_samples = 100; let max_depth = 100; let image = Arc::new(Mutex::new(Image::new(width, height))); let (camera, scene) = cornellbox_scene(); let camera = Arc::new(camera); let scene = Arc::new(scene); let integrator = Arc::new(PathTracingIntegrator::new(max_depth)); let pool = rayon::ThreadPoolBuilder::new() .num_threads(16) .build() .unwrap(); pool.scope(|s| { for i in 0..height { for j in 0..width { let (image, camera, scene, integrator) = ( image.clone(), camera.clone(), scene.clone(), integrator.clone(), ); s.spawn(move |_| { let seed = j + width * i; let mut sampler = Sampler::new(seed as u64); for _k in 0..n_samples { sampler.next_1d(); } let width = width as Real; let height = height as Real; let mut radiance = Vec3::new(0.0, 0.0, 0.0); for _k in 0..n_samples { let uv = Vec2::new( (2.0 * (j as Real + sampler.next_1d()) - width) / height, (2.0 * (i as Real + sampler.next_1d()) - height) / height, ); let ray = camera.sample_ray(uv, &mut sampler); radiance += integrator.integrate(&scene, &mut sampler, &ray); } radiance /= n_samples as Real; image.lock().unwrap().set_pixel(i, j, radiance); }); } } }); image.lock().unwrap().gamma_correction(); image.lock().unwrap().write_ppm(); }
3::new(0.2, 0.8, 0.2), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.2, 0.2, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), ]; (camera, Scene::new(primitives, materials)) }
function_block-function_prefixed
[ { "content": "pub trait Camera {\n\n fn sample_ray(&self, uv: Vec2, sampler: &mut Sampler) -> Ray;\n\n}\n\n\n\npub struct PinholeCamera {\n\n position: Vec3, // camera position\n\n forward: Vec3, // camera forward direction\n\n right: Vec3, // camera right direction\n\n up: Vec3, // camera up direction\n\n f: Real, // focal length\n\n}\n\n\n\nimpl PinholeCamera {\n\n pub fn new(position: Vec3, forward: Vec3, fov: Real) -> Self {\n\n let right = forward.cross(Vec3::new(0.0, 1.0, 0.0)).normalize();\n\n let up = right.cross(forward).normalize();\n\n let f = 1.0 / (0.5 * fov).tan();\n\n PinholeCamera {\n\n position,\n\n forward,\n", "file_path": "src/camera.rs", "rank": 0, "score": 40686.63603400417 }, { "content": "fn abs_cos_theta(v: Vec3) -> Real {\n\n v.y().abs()\n\n}\n\n\n\npub struct BxDFSample {\n\n pub f: Vec3, // BxDF value\n\n pub wi: Vec3, // sampled direction\n\n pub pdf: Real, // pdf\n\n}\n\n\n", "file_path": "src/bxdf.rs", "rank": 1, "score": 25595.966826573327 }, { "content": "fn reflect(v: Vec3, n: Vec3) -> Vec3 {\n\n -v + 2.0 * v.dot(n) * n\n\n}\n\n\n", "file_path": "src/bxdf.rs", "rank": 2, "score": 25327.273948219 }, { "content": " }\n\n }\n\n}\n\n\n\n// TODO: make intersector selectable\n\npub struct Scene {\n\n _primitives: Arc<Vec<Box<dyn IntersectableLocal + Send + Sync>>>,\n\n materials: Vec<Material>,\n\n intersector: Intersector,\n\n}\n\n\n\nimpl Scene {\n\n pub fn new(\n\n primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>>,\n\n materials: Vec<Material>,\n\n ) -> Self {\n\n if primitives.len() != materials.len() {\n\n panic!(\"number of primitives does not equal to the number of materials.\");\n\n }\n\n\n", "file_path": "src/scene.rs", "rank": 3, "score": 23668.78792208093 }, { "content": " let primitives = Arc::new(primitives);\n\n Scene {\n\n _primitives: primitives.clone(),\n\n materials,\n\n intersector: Intersector::new(primitives),\n\n }\n\n }\n\n\n\n pub fn has_emission(&self, prim_idx: u32) -> bool {\n\n let emission = self.materials[prim_idx as usize].emission;\n\n emission.x() > 0.0 && emission.y() > 0.0 && emission.z() > 0.0\n\n }\n\n\n\n pub fn get_emission(&self, prim_idx: u32) -> Vec3 {\n\n let material = &self.materials[prim_idx as usize];\n\n material.emission\n\n }\n\n\n\n pub fn get_shading_info(&self, wo_global: Vec3, info: &IntersectInfoGlobal) -> ShadingInfo {\n\n let (t, n, b) = build_orthonormal_basis(info.normal);\n", "file_path": "src/scene.rs", "rank": 4, "score": 23660.817793223905 }, { "content": "use crate::bxdf::{BxDF, Lambert};\n\nuse crate::core::{IntersectInfoGlobal, IntersectableGlobal, IntersectableLocal, Ray, ShadingInfo};\n\nuse crate::intersector::Intersector;\n\nuse crate::vec3::{build_orthonormal_basis, Vec3};\n\n\n\nuse std::sync::Arc;\n\n\n\n#[derive(Clone)]\n\npub struct Material {\n\n pub diffuse: Vec3,\n\n pub specular: Vec3,\n\n pub emission: Vec3,\n\n}\n\n\n\nimpl Material {\n\n pub fn new(diffuse: Vec3, specular: Vec3, emission: Vec3) -> Self {\n\n Material {\n\n diffuse,\n\n specular,\n\n emission,\n", "file_path": "src/scene.rs", "rank": 5, "score": 23660.135812602824 }, { "content": " ShadingInfo {\n\n x: info.pos,\n\n n,\n\n wo: wo_global.world_to_local(t, n, b),\n\n t,\n\n b,\n\n }\n\n }\n\n\n\n pub fn get_bxdf(&self, prim_idx: u32) -> Box<dyn BxDF> {\n\n let material = &self.materials[prim_idx as usize];\n\n Box::new(Lambert::new(material.diffuse))\n\n }\n\n}\n\n\n\nimpl IntersectableGlobal for Scene {\n\n fn intersect(&self, ray: &Ray) -> Option<IntersectInfoGlobal> {\n\n self.intersector.intersect(ray)\n\n }\n\n}\n", "file_path": "src/scene.rs", "rank": 6, "score": 23658.121837079496 }, { "content": " right,\n\n up,\n\n f,\n\n }\n\n }\n\n}\n\n\n\nimpl Camera for PinholeCamera {\n\n fn sample_ray(&self, uv: Vec2, _sampler: &mut Sampler) -> Ray {\n\n let sensor_pos = self.position + uv.x() * self.right + uv.y() * self.up;\n\n let pinhole_pos = self.position + self.f * self.forward;\n\n Ray::new(sensor_pos, (pinhole_pos - sensor_pos).normalize())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::f32::consts::FRAC_PI_2;\n\n\n\n use crate::camera::*;\n", "file_path": "src/camera.rs", "rank": 7, "score": 23591.677094306535 }, { "content": "use crate::core::Ray;\n\nuse crate::sampler::Sampler;\n\nuse crate::types::Real;\n\nuse crate::vec2::*;\n\nuse crate::vec3::*;\n\n\n", "file_path": "src/camera.rs", "rank": 8, "score": 23590.69649795006 }, { "content": "\n\n #[test]\n\n fn init_camera() {\n\n let camera = PinholeCamera::new(\n\n Vec3::new(0.0, 0.0, 0.0),\n\n Vec3::new(0.0, 0.0, -1.0),\n\n FRAC_PI_2,\n\n );\n\n assert_eq!(camera.position, Vec3::new(0.0, 0.0, 0.0));\n\n assert_eq!(camera.forward, Vec3::new(0.0, 0.0, -1.0));\n\n assert_eq!(camera.right, Vec3::new(1.0, 0.0, 0.0));\n\n assert_eq!(camera.up, Vec3::new(0.0, 1.0, 0.0));\n\n }\n\n\n\n #[test]\n\n fn sample_ray() {\n\n let camera = PinholeCamera::new(\n\n Vec3::new(0.0, 0.0, 0.0),\n\n Vec3::new(0.0, 0.0, -1.0),\n\n FRAC_PI_2,\n", "file_path": "src/camera.rs", "rank": 9, "score": 23590.638583434236 }, { "content": " );\n\n let mut sampler = Sampler::new(0);\n\n let sensor_pos = Vec3::new(1.0, 1.0, 0.0);\n\n let pinhole_pos = Vec3::new(0.0, 0.0, -1.0);\n\n\n\n assert_eq!(\n\n camera.sample_ray(Vec2::new(1.0, 1.0), &mut sampler),\n\n Ray::new(sensor_pos, (pinhole_pos - sensor_pos).normalize())\n\n );\n\n }\n\n}\n", "file_path": "src/camera.rs", "rank": 10, "score": 23588.897842111866 }, { "content": "pub fn cosine_weighted_hemisphere(uv: Vec2) -> (Vec3, Real) {\n\n let theta = 0.5 * (1.0 - 2.0 * uv.x()).clamp(-1.0, 1.0).acos();\n\n let phi = 2.0 * PI * uv.y();\n\n\n\n let cos_theta = theta.cos();\n\n let pdf = FRAC_1_PI * cos_theta;\n\n\n\n (spherical_to_cartesian(theta, phi), pdf)\n\n}\n", "file_path": "src/sampler.rs", "rank": 11, "score": 21914.599962894674 }, { "content": "pub fn build_orthonormal_basis(v: Vec3) -> (Vec3, Vec3, Vec3) {\n\n #[allow(unused_assignments)]\n\n let mut lx = Vec3::new(1.0, 0.0, 0.0);\n\n let ly = v;\n\n #[allow(unused_assignments)]\n\n let mut lz = Vec3::new(0.0, 0.0, 1.0);\n\n\n\n if ly.y().abs() < 0.9 {\n\n lx = -ly.cross(Vec3::new(0.0, 1.0, 0.0)).normalize();\n\n } else {\n\n lx = -ly.cross(Vec3::new(0.0, 0.0, -1.0)).normalize();\n\n }\n\n lz = lx.cross(ly).normalize();\n\n\n\n (lx, ly, lz)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::vec3::*;\n", "file_path": "src/vec3.rs", "rank": 12, "score": 21414.63392376292 }, { "content": "pub fn spherical_to_cartesian(theta: Real, phi: Real) -> Vec3 {\n\n let sin_phi = phi.sin();\n\n let cos_phi = phi.cos();\n\n let sin_theta = theta.sin();\n\n let cos_theta = theta.cos();\n\n Vec3::new(cos_phi * sin_theta, cos_theta, sin_phi * sin_theta)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::core::*;\n\n use std::f32::consts::FRAC_PI_4;\n\n\n\n #[test]\n\n fn ray_position() {\n\n let ray = Ray::new(Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 1.0));\n\n assert_eq!(ray.position(1.0), Vec3::new(0.0, 0.0, 1.0));\n\n }\n\n\n\n #[test]\n", "file_path": "src/core.rs", "rank": 13, "score": 21414.63392376292 }, { "content": "use crate::core::{IntersectInfoGlobal, IntersectableGlobal, IntersectableLocal, Ray};\n\n\n\nuse std::sync::Arc;\n\n\n\npub struct Intersector {\n\n intersectables: Arc<Vec<Box<dyn IntersectableLocal + Send + Sync>>>,\n\n}\n\n\n\nimpl Intersector {\n\n pub fn new(intersectables: Arc<Vec<Box<dyn IntersectableLocal + Send + Sync>>>) -> Self {\n\n Intersector { intersectables }\n\n }\n\n}\n\n\n\nimpl IntersectableGlobal for Intersector {\n\n fn intersect(&self, ray: &Ray) -> Option<IntersectInfoGlobal> {\n\n let mut t = ray.tmax;\n\n let mut info: Option<IntersectInfoGlobal> = None;\n\n for (idx, intersectable) in self.intersectables.iter().enumerate() {\n\n if let Some(surf_info) = intersectable.intersect(ray) {\n", "file_path": "src/intersector.rs", "rank": 14, "score": 6.588009246496378 }, { "content": "use crate::core::{IntersectableGlobal, Ray};\n\nuse crate::sampler::Sampler;\n\nuse crate::scene::Scene;\n\nuse crate::vec3::Vec3;\n\n\n", "file_path": "src/integrator.rs", "rank": 15, "score": 6.015164618227382 }, { "content": "use crate::core::{IntersectInfoLocal, IntersectableLocal, Ray};\n\nuse crate::types::Real;\n\nuse crate::vec3::Vec3;\n\n\n\npub struct Sphere {\n\n center: Vec3,\n\n radius: Real,\n\n}\n\n\n\nimpl Sphere {\n\n pub fn new(center: Vec3, radius: Real) -> Self {\n\n Sphere { center, radius }\n\n }\n\n}\n\n\n\nimpl IntersectableLocal for Sphere {\n\n fn intersect(&self, ray: &Ray) -> Option<IntersectInfoLocal> {\n\n let b = (ray.origin - self.center).dot(ray.direction);\n\n let c = (ray.origin - self.center).length2() - self.radius * self.radius;\n\n let d = b * b - c;\n", "file_path": "src/shape.rs", "rank": 16, "score": 4.712044391308302 }, { "content": " t,\n\n pos,\n\n normal: self.normal,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::shape::*;\n\n\n\n #[test]\n\n fn sphere_intersect() {\n\n let sphere = Sphere::new(Vec3::new(0.0, 0.0, 0.0), 1.0);\n\n let ray = Ray::new(Vec3::new(0.0, 0.0, -2.0), Vec3::new(0.0, 0.0, 1.0));\n\n assert_eq!(\n\n sphere.intersect(&ray),\n\n Some(IntersectInfoLocal {\n\n t: 1.0,\n\n pos: Vec3::new(0.0, 0.0, -1.0),\n", "file_path": "src/shape.rs", "rank": 17, "score": 4.37495340312768 }, { "content": "use std::fs;\n\n\n\nuse crate::vec3::Vec3;\n\n\n\npub struct Image {\n\n width: usize,\n\n height: usize,\n\n pixels: Vec<f32>,\n\n}\n\n\n\nimpl Image {\n\n pub fn new(width: usize, height: usize) -> Self {\n\n Image {\n\n width,\n\n height,\n\n pixels: vec![0.0; 3 * width * height],\n\n }\n\n }\n\n\n\n pub fn get_width(&self) -> usize {\n", "file_path": "src/image.rs", "rank": 18, "score": 4.3226299211006625 }, { "content": "use std::f32::consts::FRAC_1_PI;\n\n\n\nuse crate::core::ShadingInfo;\n\nuse crate::sampler::{cosine_weighted_hemisphere, Sampler};\n\nuse crate::types::Real;\n\nuse crate::vec3::Vec3;\n\n\n\n// NOTE: assuming vectors in tangent space\n\n\n", "file_path": "src/bxdf.rs", "rank": 19, "score": 3.1574782735700033 }, { "content": "use std::f32::consts::{FRAC_1_PI, PI};\n\n\n\nuse rand::{Rng, SeedableRng};\n\nuse rand_pcg::Pcg32;\n\n\n\nuse crate::core::spherical_to_cartesian;\n\nuse crate::types::Real;\n\nuse crate::vec2::Vec2;\n\nuse crate::vec3::Vec3;\n\n\n\npub struct Sampler {\n\n rng: Pcg32,\n\n}\n\n\n\nimpl Sampler {\n\n pub fn new(seed: u64) -> Self {\n\n Sampler {\n\n rng: Pcg32::seed_from_u64(seed),\n\n }\n\n }\n", "file_path": "src/sampler.rs", "rank": 20, "score": 3.0877260649574563 }, { "content": "pub mod bxdf;\n\npub mod camera;\n\npub mod core;\n\npub mod image;\n\npub mod integrator;\n\npub mod intersector;\n\npub mod sampler;\n\npub mod scene;\n\npub mod shape;\n\npub mod types;\n\npub mod vec2;\n\npub mod vec3;\n", "file_path": "src/lib.rs", "rank": 21, "score": 3.003762305240246 }, { "content": "// NOTE: global means it contains hit primitive index\n\npub trait IntersectableGlobal {\n\n fn intersect(&self, ray: &Ray) -> Option<IntersectInfoGlobal>;\n\n}\n\n\n", "file_path": "src/core.rs", "rank": 22, "score": 2.6560481736699506 }, { "content": " for _depth in 0..self.max_depth {\n\n if let Some(info) = scene.intersect(&ray) {\n\n // russian roulette\n\n let russian_roulette_prob = throughput.max().min(1.0);\n\n if sampler.next_1d() >= russian_roulette_prob {\n\n break;\n\n }\n\n throughput /= russian_roulette_prob;\n\n\n\n // terminate if ray hits light\n\n if scene.has_emission(info.prim_idx) {\n\n radiance += throughput * scene.get_emission(info.prim_idx);\n\n break;\n\n }\n\n\n\n // sample direction by BxDF\n\n let shading_info = scene.get_shading_info(-ray.direction, &info);\n\n\n\n // sample direction\n\n let bxdf = scene.get_bxdf(info.prim_idx);\n", "file_path": "src/integrator.rs", "rank": 23, "score": 2.6049127154055087 }, { "content": "// NOTE: local means it doesn't contain hit primitive index\n\npub trait IntersectableLocal {\n\n fn intersect(&self, ray: &Ray) -> Option<IntersectInfoLocal>;\n\n}\n\n\n", "file_path": "src/core.rs", "rank": 24, "score": 2.5848434779502885 }, { "content": " let v2 = 3.0;\n\n assert_eq!(v1 * v2, Vec3::new(3.0, 6.0, 9.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_ref_mul_vec_scalar() {\n\n let v1 = &Vec3::new(1.0, 2.0, 3.0);\n\n let v2 = 3.0;\n\n assert_eq!(v1 * v2, Vec3::new(3.0, 6.0, 9.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_mul_scalar_vec() {\n\n let v1 = 3.0;\n\n let v2 = Vec3::new(1.0, 2.0, 3.0);\n\n assert_eq!(v1 * v2, Vec3::new(3.0, 6.0, 9.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_ref_mul_scalar_vec() {\n", "file_path": "src/vec3.rs", "rank": 25, "score": 2.435373343984507 }, { "content": " #[test]\n\n fn vec3_ref_div_vec_scalar() {\n\n let v1 = &Vec3::new(1.0, 2.0, 4.0);\n\n let v2 = 2.0;\n\n assert_eq!(v1 / v2, Vec3::new(0.5, 1.0, 2.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_div_scalar_vec() {\n\n let v1 = 2.0;\n\n let v2 = Vec3::new(1.0, 2.0, 4.0);\n\n assert_eq!(v1 / v2, Vec3::new(2.0, 1.0, 0.5));\n\n }\n\n\n\n #[test]\n\n fn vec3_ref_div_scalar_vec() {\n\n let v1 = 2.0;\n\n let v2 = &Vec3::new(1.0, 2.0, 4.0);\n\n assert_eq!(v1 / v2, Vec3::new(2.0, 1.0, 0.5));\n\n }\n", "file_path": "src/vec3.rs", "rank": 26, "score": 2.3751816920931677 }, { "content": "use std::ops::{Add, Div, Mul, Sub};\n\n\n\nuse crate::types::Real;\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub struct Vec2 {\n\n elements: [Real; 2],\n\n}\n\n\n\nimpl Vec2 {\n\n pub fn new(x: Real, y: Real) -> Self {\n\n Vec2 { elements: [x, y] }\n\n }\n\n\n\n pub fn x(&self) -> Real {\n\n self.elements[0]\n\n }\n\n\n\n pub fn y(&self) -> Real {\n\n self.elements[1]\n", "file_path": "src/vec2.rs", "rank": 27, "score": 2.1591587857942054 }, { "content": "use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign};\n\n\n\nuse crate::types::Real;\n\n\n\n// NOTE: y-up, (+x, +y, -z)\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub struct Vec3 {\n\n elements: [Real; 3],\n\n}\n\n\n\nimpl Vec3 {\n\n pub fn new(x: Real, y: Real, z: Real) -> Self {\n\n Vec3 {\n\n elements: [x, y, z],\n\n }\n\n }\n\n\n\n pub fn x(&self) -> Real {\n\n self.elements[0]\n", "file_path": "src/vec3.rs", "rank": 28, "score": 1.9770305802644517 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct PathTracingIntegrator {\n\n max_depth: u32,\n\n}\n\n\n\nimpl PathTracingIntegrator {\n\n pub fn new(max_depth: u32) -> Self {\n\n PathTracingIntegrator { max_depth }\n\n }\n\n}\n\n\n\nimpl Integrator for PathTracingIntegrator {\n\n fn integrate(&self, scene: &Scene, sampler: &mut Sampler, ray_in: &Ray) -> Vec3 {\n\n let mut radiance = Vec3::new(0.0, 0.0, 0.0);\n\n let mut ray = ray_in.clone();\n\n let mut throughput = Vec3::new(1.0, 1.0, 1.0);\n\n\n", "file_path": "src/integrator.rs", "rank": 29, "score": 1.925067768885686 }, { "content": "use crate::types::Real;\n\nuse crate::vec3::Vec3;\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Ray {\n\n pub origin: Vec3, // ray origin\n\n pub direction: Vec3, // ray direction\n\n pub tmin: Real, // minimum hittable distance\n\n pub tmax: Real, // maximum hittable distance\n\n}\n\n\n\nimpl Ray {\n\n pub fn new(origin: Vec3, direction: Vec3) -> Self {\n\n Ray {\n\n origin,\n\n direction,\n\n tmin: 1E-3,\n\n tmax: 1E9,\n\n }\n\n }\n", "file_path": "src/core.rs", "rank": 30, "score": 1.8056798711525675 }, { "content": "\n\n#[derive(Debug, PartialEq)]\n\npub struct ShadingInfo {\n\n pub x: Vec3, // position\n\n pub n: Vec3, // shading normal\n\n pub wo: Vec3, // outgoing direction in tangent space\n\n pub t: Vec3, // tangent vector\n\n pub b: Vec3, // bitangent vector\n\n}\n\n\n\n// NOTE: local means it doesn't contain hit primitive index\n", "file_path": "src/core.rs", "rank": 31, "score": 1.4542451934841218 }, { "content": " assert_eq!(v1 - v2, Vec3::new(-3.0, -3.0, -3.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_val_ref_sub() {\n\n let v1 = Vec3::new(1.0, 2.0, 3.0);\n\n let v2 = &Vec3::new(4.0, 5.0, 6.0);\n\n assert_eq!(v1 - v2, Vec3::new(-3.0, -3.0, -3.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_ref_ref_sub() {\n\n let v1 = &Vec3::new(1.0, 2.0, 3.0);\n\n let v2 = &Vec3::new(4.0, 5.0, 6.0);\n\n assert_eq!(v1 - v2, Vec3::new(-3.0, -3.0, -3.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_mul_vec_scalar() {\n\n let v1 = Vec3::new(1.0, 2.0, 3.0);\n", "file_path": "src/vec3.rs", "rank": 32, "score": 1.2431905817547193 }, { "content": " fn vec3_val_ref_mul() {\n\n let v1 = Vec3::new(1.0, 2.0, 3.0);\n\n let v2 = &Vec3::new(4.0, 5.0, 6.0);\n\n assert_eq!(v1 * v2, Vec3::new(4.0, 10.0, 18.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_ref_ref_mul() {\n\n let v1 = &Vec3::new(1.0, 2.0, 3.0);\n\n let v2 = &Vec3::new(4.0, 5.0, 6.0);\n\n assert_eq!(v1 * v2, Vec3::new(4.0, 10.0, 18.0));\n\n }\n\n\n\n #[test]\n\n fn vec3_div_vec_scalar() {\n\n let v1 = Vec3::new(1.0, 2.0, 4.0);\n\n let v2 = 2.0;\n\n assert_eq!(v1 / v2, Vec3::new(0.5, 1.0, 2.0));\n\n }\n\n\n", "file_path": "src/vec3.rs", "rank": 33, "score": 1.2431905817547193 }, { "content": "\n\n pub fn position(&self, t: Real) -> Vec3 {\n\n self.origin + t * self.direction\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct IntersectInfoLocal {\n\n pub t: Real, // distance to hit point\n\n pub pos: Vec3, // hit position\n\n pub normal: Vec3, // hit normal\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct IntersectInfoGlobal {\n\n pub t: Real, // distance to hit point\n\n pub pos: Vec3, // hit position\n\n pub normal: Vec3, // hit normal\n\n pub prim_idx: u32, // index of hit primitive\n\n}\n", "file_path": "src/core.rs", "rank": 34, "score": 1.0919256108544273 }, { "content": "impl_vec2_scalar_operator!(Div, div, &Vec2, /, Real);\n\nimpl_scalar_vec2_operator!(Div, div, Real, /, Vec2);\n\nimpl_scalar_vec2_operator!(Div, div, Real, /, &Vec2);\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::vec2::*;\n\n\n\n #[test]\n\n fn vec2_val_val_add() {\n\n let v1 = Vec2::new(1.0, 2.0);\n\n let v2 = Vec2::new(3.0, 4.0);\n\n assert_eq!(v1 + v2, Vec2::new(4.0, 6.0));\n\n }\n\n\n\n #[test]\n\n fn vec2_val_ref_add() {\n\n let v1 = Vec2::new(1.0, 2.0);\n\n let v2 = &Vec2::new(3.0, 4.0);\n\n assert_eq!(v1 + v2, Vec2::new(4.0, 6.0));\n", "file_path": "src/vec2.rs", "rank": 35, "score": 0.9941725060663273 } ]
Rust
src/config.rs
foeb/bootimage
e159e4095ca03826a5b366b1cf528804a06bf6bc
use failure::{Error, ResultExt}; use std::path::PathBuf; use toml::Value; #[derive(Debug, Clone)] pub struct Config { pub manifest_path: PathBuf, pub default_target: Option<String>, pub output: Option<PathBuf>, pub bootloader: BootloaderConfig, pub minimum_image_size: Option<u64>, pub run_command: Vec<String>, pub package_filepath: Option<PathBuf>, } #[derive(Debug, Clone)] pub struct BootloaderConfig { pub name: Option<String>, pub target: PathBuf, pub default_features: bool, pub features: Vec<String>, } pub(crate) fn read_config(manifest_path: PathBuf) -> Result<Config, Error> { use std::{fs::File, io::Read}; let cargo_toml: Value = { let mut content = String::new(); File::open(&manifest_path) .with_context(|e| format!("Failed to open Cargo.toml: {}", e))? .read_to_string(&mut content) .with_context(|e| format!("Failed to read Cargo.toml: {}", e))?; content .parse::<Value>() .with_context(|e| format!("Failed to parse Cargo.toml: {}", e))? }; let metadata = cargo_toml .get("package") .and_then(|table| table.get("metadata")) .and_then(|table| table.get("bootimage")); let metadata = match metadata { None => { return Ok(ConfigBuilder { manifest_path: Some(manifest_path), ..Default::default() } .into()); } Some(metadata) => metadata.as_table().ok_or(format_err!( "Bootimage configuration invalid: {:?}", metadata ))?, }; /* * The user shouldn't specify any features if they're using a precompiled bootloader, as we * don't actually compile it. */ if cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader_precompiled")) .and_then(|table| { table .get("features") .or_else(|| table.get("default-features")) }) .is_some() { return Err(format_err!( "Can't change features of precompiled bootloader!" )); } let bootloader_dependency = cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader")); let bootloader_default_features = match bootloader_dependency.and_then(|table| table.get("default-features")) { None => None, Some(Value::Boolean(default_features)) => Some(*default_features), Some(_) => { return Err(format_err!( "Bootloader 'default-features' field should be a bool!" )); } }; let bootloader_features = match cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader")) .and_then(|table| table.get("features")) { None => None, Some(Value::Array(array)) => { let mut features = Vec::new(); for feature_string in array { match feature_string { Value::String(feature) => features.push(feature.clone()), _ => return Err(format_err!("Bootloader features are malformed!")), } } Some(features) } Some(_) => return Err(format_err!("Bootloader features are malformed!")), }; let mut config = ConfigBuilder { manifest_path: Some(manifest_path), bootloader: BootloaderConfigBuilder { features: bootloader_features, default_features: bootloader_default_features, ..Default::default() }, ..Default::default() }; for (key, value) in metadata { match (key.as_str(), value.clone()) { ("default-target", Value::String(s)) => config.default_target = From::from(s), ("output", Value::String(s)) => config.output = Some(PathBuf::from(s)), ("bootloader", Value::Table(t)) => { for (key, value) in t { match (key.as_str(), value) { ("name", Value::String(s)) => config.bootloader.name = From::from(s), ("target", Value::String(s)) => { config.bootloader.target = Some(PathBuf::from(s)) } (k @ "precompiled", _) | (k @ "version", _) | (k @ "git", _) | (k @ "branch", _) | (k @ "path", _) => Err(format_err!( "the \ `package.metadata.bootimage.bootloader` key `{}` was deprecated\n\n\ In case you just updated bootimage from an earlier version, \ check out the migration guide at \ https://github.com/rust-osdev/bootimage/pull/16.", k ))?, (key, value) => Err(format_err!( "unexpected \ `package.metadata.bootimage.bootloader` key `{}` with value `{}`", key, value ))?, } } } ("minimum-image-size", Value::Integer(x)) => { if x >= 0 { config.minimum_image_size = Some((x * 1024 * 1024) as u64); } else { Err(format_err!( "unexpected `package.metadata.bootimage` \ key `minimum-image-size` with negative value `{}`", value ))? } } ("run-command", Value::Array(array)) => { let mut command = Vec::new(); for value in array { match value { Value::String(s) => command.push(s), _ => Err(format_err!("run-command must be a list of strings"))?, } } config.run_command = Some(command); } ("package-file", Value::String(path)) => { config.package_filepath = Some(PathBuf::from(path)); } (key, value) => Err(format_err!( "unexpected `package.metadata.bootimage` \ key `{}` with value `{}`", key, value ))?, } } Ok(config.into()) } #[derive(Default)] struct ConfigBuilder { manifest_path: Option<PathBuf>, default_target: Option<String>, output: Option<PathBuf>, bootloader: BootloaderConfigBuilder, minimum_image_size: Option<u64>, run_command: Option<Vec<String>>, package_filepath: Option<PathBuf>, } #[derive(Default)] struct BootloaderConfigBuilder { name: Option<String>, target: Option<PathBuf>, features: Option<Vec<String>>, default_features: Option<bool>, } impl Into<Config> for ConfigBuilder { fn into(self) -> Config { Config { manifest_path: self.manifest_path.expect("manifest path must be set"), default_target: self.default_target, output: self.output, bootloader: self.bootloader.into(), minimum_image_size: self.minimum_image_size, run_command: self.run_command.unwrap_or(vec![ "qemu-system-x86_64".into(), "-drive".into(), "format=raw,file={}".into(), ]), package_filepath: self.package_filepath, } } } impl Into<BootloaderConfig> for BootloaderConfigBuilder { fn into(self) -> BootloaderConfig { BootloaderConfig { name: self.name, target: self .target .unwrap_or(PathBuf::from("x86_64-bootloader.json")), features: self.features.unwrap_or(Vec::with_capacity(0)), default_features: self.default_features.unwrap_or(true), } } }
use failure::{Error, ResultExt}; use std::path::PathBuf; use toml::Value; #[derive(Debug, Clone)] pub struct Config { pub manifest_path: PathBuf, pub default_target: Option<String>, pub output: Option<PathBuf>, pub bootloader: BootloaderConfig, pub minimum_image_size: Option<u64>, pub run_command: Vec<String>, pub package_filepath: Option<PathBuf>, } #[derive(Debug, Clone)] pub struct BootloaderConfig { pub name: Option<String>, pub target: PathBuf, pub default_features: bool, pub features: Vec<String>, } pub(crate) fn read_config(manifest_path: PathBuf) -> Result<Config, Error> { use std::{fs::File, io::Read}; let cargo_toml: Value = { let mut content = String::new(); File::open(&manifest_path) .with_context(|e| format!("Failed to open Cargo.toml: {}", e))? .read_to_string(&mut content) .with_context(|e| format!("Failed to read Cargo.toml: {}", e))?; content .parse::<Value>() .with_context(|e| format!("Failed to parse Cargo.toml: {}", e))? }; let metadata = cargo_toml .get("package") .and_then(|table| table.get("metadata")) .and_then(|table| table.get("bootimage")); let metadata = match metadata { None => { return Ok(ConfigBuilder { manifest_path: Some(manifest_path), ..Default::default() } .into()); } Some(metadata) => metadata.as_table().ok_or(format_err!( "Bootimage configuration invalid: {:?}", metadata ))?, }; /* * The user shouldn't specify any features if they're using a precompiled bootloader, as we * don't actually compile it. */ if cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader_precompiled")) .and_then(|table| { table .get("features") .or_else(|| table.get("default-features")) }) .is_some() { return Err(format_err!( "Can't change features of precompiled bootloader!" )); } let bootloader_dependency = cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader")); let bootloader_default_features =
; let bootloader_features = match cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader")) .and_then(|table| table.get("features")) { None => None, Some(Value::Array(array)) => { let mut features = Vec::new(); for feature_string in array { match feature_string { Value::String(feature) => features.push(feature.clone()), _ => return Err(format_err!("Bootloader features are malformed!")), } } Some(features) } Some(_) => return Err(format_err!("Bootloader features are malformed!")), }; let mut config = ConfigBuilder { manifest_path: Some(manifest_path), bootloader: BootloaderConfigBuilder { features: bootloader_features, default_features: bootloader_default_features, ..Default::default() }, ..Default::default() }; for (key, value) in metadata { match (key.as_str(), value.clone()) { ("default-target", Value::String(s)) => config.default_target = From::from(s), ("output", Value::String(s)) => config.output = Some(PathBuf::from(s)), ("bootloader", Value::Table(t)) => { for (key, value) in t { match (key.as_str(), value) { ("name", Value::String(s)) => config.bootloader.name = From::from(s), ("target", Value::String(s)) => { config.bootloader.target = Some(PathBuf::from(s)) } (k @ "precompiled", _) | (k @ "version", _) | (k @ "git", _) | (k @ "branch", _) | (k @ "path", _) => Err(format_err!( "the \ `package.metadata.bootimage.bootloader` key `{}` was deprecated\n\n\ In case you just updated bootimage from an earlier version, \ check out the migration guide at \ https://github.com/rust-osdev/bootimage/pull/16.", k ))?, (key, value) => Err(format_err!( "unexpected \ `package.metadata.bootimage.bootloader` key `{}` with value `{}`", key, value ))?, } } } ("minimum-image-size", Value::Integer(x)) => { if x >= 0 { config.minimum_image_size = Some((x * 1024 * 1024) as u64); } else { Err(format_err!( "unexpected `package.metadata.bootimage` \ key `minimum-image-size` with negative value `{}`", value ))? } } ("run-command", Value::Array(array)) => { let mut command = Vec::new(); for value in array { match value { Value::String(s) => command.push(s), _ => Err(format_err!("run-command must be a list of strings"))?, } } config.run_command = Some(command); } ("package-file", Value::String(path)) => { config.package_filepath = Some(PathBuf::from(path)); } (key, value) => Err(format_err!( "unexpected `package.metadata.bootimage` \ key `{}` with value `{}`", key, value ))?, } } Ok(config.into()) } #[derive(Default)] struct ConfigBuilder { manifest_path: Option<PathBuf>, default_target: Option<String>, output: Option<PathBuf>, bootloader: BootloaderConfigBuilder, minimum_image_size: Option<u64>, run_command: Option<Vec<String>>, package_filepath: Option<PathBuf>, } #[derive(Default)] struct BootloaderConfigBuilder { name: Option<String>, target: Option<PathBuf>, features: Option<Vec<String>>, default_features: Option<bool>, } impl Into<Config> for ConfigBuilder { fn into(self) -> Config { Config { manifest_path: self.manifest_path.expect("manifest path must be set"), default_target: self.default_target, output: self.output, bootloader: self.bootloader.into(), minimum_image_size: self.minimum_image_size, run_command: self.run_command.unwrap_or(vec![ "qemu-system-x86_64".into(), "-drive".into(), "format=raw,file={}".into(), ]), package_filepath: self.package_filepath, } } } impl Into<BootloaderConfig> for BootloaderConfigBuilder { fn into(self) -> BootloaderConfig { BootloaderConfig { name: self.name, target: self .target .unwrap_or(PathBuf::from("x86_64-bootloader.json")), features: self.features.unwrap_or(Vec::with_capacity(0)), default_features: self.default_features.unwrap_or(true), } } }
match bootloader_dependency.and_then(|table| table.get("default-features")) { None => None, Some(Value::Boolean(default_features)) => Some(*default_features), Some(_) => { return Err(format_err!( "Bootloader 'default-features' field should be a bool!" )); } }
if_condition
[ { "content": "fn build_bootloader(metadata: &CargoMetadata, config: &Config) -> Result<Box<[u8]>, Error> {\n\n use std::io::Read;\n\n\n\n let bootloader_metadata = metadata.packages.iter().find(|p| {\n\n if let Some(name) = config.bootloader.name.as_ref() {\n\n p.name == name.as_str()\n\n } else {\n\n p.name == \"bootloader\" || p.name == \"bootloader_precompiled\"\n\n }\n\n });\n\n let bootloader_metadata =\n\n match bootloader_metadata {\n\n Some(package_metadata) => package_metadata.clone(),\n\n None => Err(format_err!(\"Bootloader dependency not found\\n\\n\\\n\n You need to add a dependency on the `bootloader` or `bootloader_precompiled` crates \\\n\n in your Cargo.toml.\\n\\nIn case you just updated bootimage from an earlier version, \\\n\n check out the migration guide at https://github.com/rust-osdev/bootimage/pull/16. \\\n\n Alternatively, you can downgrade to bootimage 0.4 again by executing \\\n\n `cargo install bootimage --version {} --force`.\", r#\"\"^0.4\"\"#\n\n ))?,\n", "file_path": "src/build.rs", "rank": 0, "score": 105201.8346038142 }, { "content": "fn run_impl(args: &Args, config: &Config, output_path: &Path) -> Result<(), Error> {\n\n let command = &config.run_command[0];\n\n let mut command = process::Command::new(command);\n\n for arg in &config.run_command[1..] {\n\n command.arg(arg.replace(\n\n \"{}\",\n\n output_path.to_str().expect(\"output must be valid unicode\"),\n\n ));\n\n }\n\n command.args(&args.run_args);\n\n command\n\n .status()\n\n .with_context(|e| format!(\"Failed to execute run `{:?}`: {}\", command, e))?;\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CargoMetadataError {\n\n error: String,\n\n}\n\n\n\nimpl fmt::Display for CargoMetadataError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.error)\n\n }\n\n}\n\n\n\nimpl failure::Fail for CargoMetadataError {}\n\n\n", "file_path": "src/build.rs", "rank": 1, "score": 79212.0964758108 }, { "content": "fn read_cargo_metadata(args: &Args) -> Result<CargoMetadata, Error> {\n\n run_cargo_fetch(args);\n\n let metadata =\n\n cargo_metadata::metadata_deps(args.manifest_path().as_ref().map(PathBuf::as_path), true)\n\n .map_err(|e| CargoMetadataError {\n\n error: format!(\"{}\", e),\n\n })?;\n\n Ok(metadata)\n\n}\n\n\n", "file_path": "src/build.rs", "rank": 3, "score": 76952.37274551937 }, { "content": "pub fn main() {\n\n use std::io::Write;\n\n if let Err(err) = run() {\n\n writeln!(io::stderr(), \"Error: {}\", err).unwrap();\n\n process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 65693.91120059557 }, { "content": "fn run() -> Result<(), failure::Error> {\n\n let command = args::parse_args();\n\n match command {\n\n Command::NoSubcommand => help::no_subcommand(),\n\n Command::Build(args) => build::build(args),\n\n Command::Run(args) => build::run(args),\n\n Command::Test(args) => test::test(args),\n\n Command::Help => Ok(help::help()),\n\n Command::BuildHelp => Ok(help::build_help()),\n\n Command::RunHelp => Ok(help::run_help()),\n\n Command::TestHelp => Ok(help::test_help()),\n\n Command::Version => Ok(println!(\"bootimage {}\", env!(\"CARGO_PKG_VERSION\"))),\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 6, "score": 54390.39543285387 }, { "content": "fn build_kernel(\n\n out_dir: &Path,\n\n bin_name: &str,\n\n args: &args::Args,\n\n verbose: bool,\n\n) -> Result<File, Error> {\n\n // compile kernel\n\n if verbose {\n\n println!(\"Building kernel\");\n\n }\n\n let exit_status = run_xbuild(&args.cargo_args)\n\n .with_context(|e| format!(\"Failed to run `cargo xbuild`: {}\", e))?;\n\n if !exit_status.success() {\n\n process::exit(1)\n\n }\n\n\n\n let mut kernel_path = out_dir.to_owned();\n\n kernel_path.push(bin_name);\n\n let kernel = File::open(kernel_path)\n\n .with_context(|e| format!(\"Failed to open kernel output file: {}\", e))?;\n\n Ok(kernel)\n\n}\n\n\n", "file_path": "src/build.rs", "rank": 7, "score": 33949.72968800587 }, { "content": "fn create_disk_image(\n\n root_dir: &Path,\n\n out_dir: &Path,\n\n bin_name: &str,\n\n config: &Config,\n\n mut kernel: File,\n\n mut maybe_package: Option<File>,\n\n kernel_info_block: KernelInfoBlock,\n\n bootloader_data: &[u8],\n\n verbose: bool,\n\n) -> Result<PathBuf, Error> {\n\n use std::io::{Read, Write};\n\n\n\n let mut output_path = PathBuf::from(out_dir);\n\n let file_name = format!(\"bootimage-{}.bin\", bin_name);\n\n output_path.push(file_name);\n\n\n\n if let Some(ref output) = config.output {\n\n output_path = output.clone();\n\n }\n", "file_path": "src/build.rs", "rank": 8, "score": 32824.83668814144 }, { "content": "fn run_cargo_fetch(args: &Args) {\n\n let mut command = process::Command::new(\"cargo\");\n\n command.arg(\"fetch\");\n\n if let Some(manifest_path) = args.manifest_path() {\n\n command.arg(\"--manifest-path\");\n\n command.arg(manifest_path);\n\n }\n\n if !command.status().map(|s| s.success()).unwrap_or(false) {\n\n process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "src/build.rs", "rank": 9, "score": 26901.13580507946 }, { "content": "fn parse_build_args<A>(args: A) -> Command\n\nwhere\n\n A: Iterator<Item = String>,\n\n{\n\n let mut manifest_path: Option<PathBuf> = None;\n\n let mut bin_name: Option<String> = None;\n\n let mut target: Option<String> = None;\n\n let mut release: Option<bool> = None;\n\n let mut cargo_args = Vec::new();\n\n let mut run_args = Vec::new();\n\n let mut run_args_started = false;\n\n {\n\n fn set<T>(arg: &mut Option<T>, value: Option<T>) {\n\n let previous = mem::replace(arg, value);\n\n assert!(\n\n previous.is_none(),\n\n \"multiple arguments of same type provided\"\n\n )\n\n };\n\n\n", "file_path": "src/args.rs", "rank": 10, "score": 24561.364230252533 }, { "content": "fn run_xbuild(args: &[String]) -> io::Result<process::ExitStatus> {\n\n let mut command = process::Command::new(\"cargo\");\n\n command.arg(\"xbuild\");\n\n command.args(args);\n\n let exit_status = command.status()?;\n\n\n\n if !exit_status.success() {\n\n let mut help_command = process::Command::new(\"cargo\");\n\n help_command.arg(\"xbuild\").arg(\"--help\");\n\n help_command.stdout(process::Stdio::null());\n\n help_command.stderr(process::Stdio::null());\n\n if let Ok(help_exit_status) = help_command.status() {\n\n if !help_exit_status.success() {\n\n let mut stderr = io::stderr();\n\n writeln!(\n\n stderr,\n\n \"Failed to run `cargo xbuild`. Perhaps it is not installed?\"\n\n )?;\n\n writeln!(stderr, \"Run `cargo install cargo-xbuild` to install it.\")?;\n\n }\n\n }\n\n }\n\n\n\n Ok(exit_status)\n\n}\n\n\n", "file_path": "src/build.rs", "rank": 21, "score": 20255.35110222908 }, { "content": "fn create_kernel_info_block(kernel_size: u64, maybe_package_size: Option<u64>) -> KernelInfoBlock {\n\n let kernel_size = if kernel_size <= u64::from(u32::max_value()) {\n\n kernel_size as u32\n\n } else {\n\n panic!(\"Kernel can't be loaded by BIOS bootloader because is too big\")\n\n };\n\n\n\n let package_size = if let Some(size) = maybe_package_size {\n\n if size <= u64::from(u32::max_value()) {\n\n size as u32\n\n } else {\n\n panic!(\"Package can't be loaded by BIOS bootloader because is too big\")\n\n }\n\n } else {\n\n 0\n\n };\n\n\n\n let mut kernel_info_block = [0u8; BLOCK_SIZE];\n\n LittleEndian::write_u32(&mut kernel_info_block[0..4], kernel_size);\n\n LittleEndian::write_u32(&mut kernel_info_block[8..12], package_size);\n\n\n\n kernel_info_block\n\n}\n\n\n", "file_path": "src/build.rs", "rank": 22, "score": 17540.094927630344 }, { "content": "# bootimage\n\n\n\nCreates a bootable disk image from a Rust OS kernel.\n\n\n\n## Installation\n\n\n\n```\n\n> cargo install bootimage\n\n```\n\n\n\n## Usage\n\n\n\nFirst you need to add a dependency on the `bootloader` crate:\n\n\n\n```toml\n\n# in your Cargo.toml\n\n\n\n[dependencies]\n\nbootloader = \"0.2.0-alpha\"\n\n```\n\n\n\nNow you can build the kernel project and create a bootable disk image from it by running:\n\n\n\n```\n\n> bootimage build --target your_custom_target.json [other_args]\n\n```\n\n\n\nThe command will invoke [`cargo xbuild`](https://github.com/rust-osdev/cargo-xbuild), forwarding all passed options. Then it will download and build a bootloader, by default the [rust-osdev/bootloader](https://github.com/rust-osdev/bootloader). Finally, it combines the kernel and the bootloader into a bootable disk image.\n\n\n\n## Configuration\n\n\n\nConfiguration is done through a through a `[package.metadata.bootimage]` table in the `Cargo.toml`. The following options are available:\n\n\n\n```toml\n\n [package.metadata.bootimage]\n\n default-target = \"\" # This target is used if no `--target` is passed\n\n output = \"bootimage.bin\" # The output file name\n\n minimum-image-size = 0 # The minimum output file size (in MiB)\n\n # The command invoked on `bootimage run`\n\n # (the \"{}\" will be replaced with the path to the bootable disk image)\n\n run-command = [\"qemu-system-x86_64\", \"-drive\", \"format=raw,file={}\"]\n\n\n\n [package.metadata.bootimage.bootloader]\n\n name = \"bootloader\" # The bootloader crate name\n\n target = \"x86_64-bootloader.json\" # Target triple for compiling the bootloader\n\n```\n\n\n\n## License\n\nDual-licensed under MIT or the Apache License (Version 2.0).\n", "file_path": "Readme.md", "rank": 23, "score": 12961.006067952954 }, { "content": "# 0.6.4\n\n\n\n- Canonicalize paths before comparing them when invoking `bootimage test`\n\n - This caused an error on Windows where the path in the cargo metadata is not fully canonicalized\n\n- Improve CI infrastructure\n\n\n\n# 0.6.3\n\n\n\n- Canonicalize paths before comparing them when invoking `bootimage build`\n\n - This caused an error on Windows where the path in the cargo metadata is not fully canonicalized\n\n\n\n# 0.6.2\n\n\n\n- Fix build on Windows (don't use the `.` directory)\n\n\n\n# 0.6.1\n\n\n\n- Fix: bootimage should now work correctly with `--manifest-path`\n\n\n\n# 0.6.0\n\n\n\n(Yanked from crates.io because of a bug fixed in 0.6.1.)\n\n\n\n**Breaking**:\n\n\n\n- When no `--manifest-path` argument is passed, `bootimage` defaults to the `Cargo.toml` in the current directory instead of the workspace root.\n\n - This fixes compilation of projects that are part of a workspace\n", "file_path": "Changelog.md", "rank": 24, "score": 12954.636134190097 }, { "content": " };\n\n\n\n let bootloader_manifest_path = Path::new(&bootloader_metadata.manifest_path);\n\n let bootloader_dir = bootloader_manifest_path.parent().unwrap();\n\n\n\n let mut bootloader_target_path = PathBuf::from(bootloader_dir);\n\n bootloader_target_path.push(&config.bootloader.target);\n\n\n\n let bootloader_elf_path = if bootloader_metadata.name == \"bootloader_precompiled\" {\n\n let mut bootloader_elf_path = bootloader_dir.to_path_buf();\n\n bootloader_elf_path.push(\"bootloader\");\n\n bootloader_elf_path\n\n } else {\n\n let mut args = vec![\n\n String::from(\"--manifest-path\"),\n\n bootloader_metadata.manifest_path.clone(),\n\n String::from(\"--target\"),\n\n bootloader_target_path.display().to_string(),\n\n String::from(\"--release\"),\n\n String::from(\"--features\"),\n", "file_path": "src/build.rs", "rank": 25, "score": 15.25431921685143 }, { "content": "use args::Args;\n\nuse build;\n\nuse failure::{Error, ResultExt};\n\nuse rayon::prelude::*;\n\nuse std::io::Write;\n\nuse std::path::{Path, PathBuf};\n\nuse std::time::Duration;\n\nuse std::{fs, io, process};\n\nuse wait_timeout::ChildExt;\n\n\n\npub(crate) fn test(args: Args) -> Result<(), Error> {\n\n let (args, config, metadata, root_dir, out_dir) = build::common_setup(args)?;\n\n\n\n let test_args = args.clone();\n\n\n\n let test_config = {\n\n let mut test_config = config.clone();\n\n test_config.output = None;\n\n test_config\n\n };\n", "file_path": "src/test.rs", "rank": 26, "score": 14.902455246619141 }, { "content": " }\n\n\n\n pub fn set_target(&mut self, target: String) {\n\n assert!(self.target.is_none());\n\n self.target = Some(target.clone());\n\n self.cargo_args.push(\"--target\".into());\n\n self.cargo_args.push(target);\n\n }\n\n\n\n pub fn set_bin_name(&mut self, bin_name: String) {\n\n assert!(self.bin_name.is_none());\n\n self.bin_name = Some(bin_name.clone());\n\n self.cargo_args.push(\"--bin\".into());\n\n self.cargo_args.push(bin_name);\n\n }\n\n}\n", "file_path": "src/args.rs", "rank": 27, "score": 14.421618079620576 }, { "content": " .iter()\n\n .find(|p| {\n\n Path::new(&p.manifest_path)\n\n .canonicalize()\n\n .map(|path| path == config.manifest_path)\n\n .unwrap_or(false)\n\n })\n\n .expect(\"Could not read crate name from cargo metadata\");\n\n let bin_name: String = args.bin_name().as_ref().unwrap_or(&crate_.name).clone();\n\n\n\n let kernel = build_kernel(&out_dir, &bin_name, &args, verbose)?;\n\n\n\n let maybe_package = if let Some(ref path) = config.package_filepath {\n\n Some(\n\n File::open(path)\n\n .with_context(|e| format!(\"Unable to open specified package file: {}\", e))?,\n\n )\n\n } else {\n\n None\n\n };\n", "file_path": "src/build.rs", "rank": 28, "score": 13.244117661996148 }, { "content": "\n\n let test_targets = metadata\n\n .packages\n\n .iter()\n\n .find(|p| {\n\n Path::new(&p.manifest_path)\n\n .canonicalize()\n\n .map(|path| path == config.manifest_path)\n\n .unwrap_or(false)\n\n })\n\n .expect(\"Could not read crate name from cargo metadata\")\n\n .targets\n\n .iter()\n\n .filter(|t| t.kind == [\"bin\"] && t.name.starts_with(\"test-\"))\n\n .map(|target| {\n\n println!(\"BUILD: {}\", target.name);\n\n\n\n let mut target_args = test_args.clone();\n\n target_args.set_bin_name(target.name.clone());\n\n let test_path = build::build_impl(\n", "file_path": "src/test.rs", "rank": 29, "score": 13.235153605850144 }, { "content": " let config = config::read_config(manifest_path)?;\n\n\n\n if args.target().is_none() {\n\n if let Some(ref target) = config.default_target {\n\n let mut canonicalized_target = crate_root.clone();\n\n canonicalized_target.push(target);\n\n args.set_target(canonicalized_target.to_string_lossy().into_owned());\n\n }\n\n }\n\n\n\n if let &Some(ref target) = args.target() {\n\n if !target.ends_with(\".json\") {\n\n use std::io::{self, Write};\n\n use std::process;\n\n\n\n writeln!(\n\n io::stderr(),\n\n \"Please pass a path to `--target` (with `.json` extension`): `--target {}.json`\",\n\n target\n\n )\n", "file_path": "src/build.rs", "rank": 30, "score": 12.93175276485624 }, { "content": " error: format!(\"{}\", e),\n\n })?;\n\n let mut bootloader_elf_path = PathBuf::from(bootloader_metadata.target_directory);\n\n bootloader_elf_path.push(config.bootloader.target.file_stem().unwrap());\n\n bootloader_elf_path.push(\"release\");\n\n bootloader_elf_path.push(\"bootloader\");\n\n bootloader_elf_path\n\n };\n\n\n\n let mut bootloader_elf_bytes = Vec::new();\n\n let mut bootloader = File::open(&bootloader_elf_path)\n\n .with_context(|e| format!(\"Could not open bootloader: {}\", e))?;\n\n bootloader\n\n .read_to_end(&mut bootloader_elf_bytes)\n\n .with_context(|e| format!(\"Could not read bootloader: {}\", e))?;\n\n\n\n // copy bootloader section of ELF file to bootloader_path\n\n let elf_file = xmas_elf::ElfFile::new(&bootloader_elf_bytes).unwrap();\n\n xmas_elf::header::sanity_check(&elf_file).unwrap();\n\n let bootloader_section = elf_file\n\n .find_section_by_name(\".bootloader\")\n\n .expect(\"bootloader must have a .bootloader section\");\n\n\n\n Ok(Vec::from(bootloader_section.raw_data(&elf_file)).into_boxed_slice())\n\n}\n\n\n", "file_path": "src/build.rs", "rank": 31, "score": 12.791050423337074 }, { "content": "\n\n let maybe_package_size = if let Some(ref file) = maybe_package {\n\n Some(\n\n file.metadata()\n\n .with_context(|e| format!(\"Failed to read specified package file: {}\", e))?\n\n .len(),\n\n )\n\n } else {\n\n None\n\n };\n\n\n\n let kernel_size = kernel\n\n .metadata()\n\n .with_context(|e| format!(\"Failed to read kernel output file: {}\", e))?\n\n .len();\n\n let kernel_info_block = create_kernel_info_block(kernel_size, maybe_package_size);\n\n\n\n let bootloader = build_bootloader(&metadata, &config)\n\n .with_context(|e| format!(\"Failed to build bootloader: {}\", e))?;\n\n\n", "file_path": "src/build.rs", "rank": 32, "score": 12.321094315080046 }, { "content": " config\n\n .bootloader\n\n .features\n\n .iter()\n\n .fold(String::new(), |i, j| i + \" \" + j),\n\n ];\n\n\n\n if !config.bootloader.default_features {\n\n args.push(String::from(\"--no-default-features\"));\n\n }\n\n\n\n println!(\"Building bootloader v{}\", bootloader_metadata.version);\n\n let exit_status =\n\n run_xbuild(&args).with_context(|e| format!(\"Failed to run `cargo xbuild`: {}\", e))?;\n\n if !exit_status.success() {\n\n process::exit(1)\n\n }\n\n\n\n let bootloader_metadata = cargo_metadata::metadata(bootloader_manifest_path.into())\n\n .map_err(|e| CargoMetadataError {\n", "file_path": "src/build.rs", "rank": 33, "score": 11.392377472858982 }, { "content": " for line in output[7..].lines() {\n\n writeln!(io::stderr(), \" {}\", line)?;\n\n }\n\n } else {\n\n test_result = TestResult::Invalid;\n\n writeln!(io::stderr(), \"FAIL: Invalid Output:\")?;\n\n for line in output.lines() {\n\n writeln!(io::stderr(), \" {}\", line)?;\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok((target.name.clone(), test_result))\n\n })\n\n .collect::<Result<Vec<(String, TestResult)>, Error>>()?;\n\n\n\n println!(\"\");\n\n if tests.iter().all(|t| t.1 == TestResult::Ok) {\n\n println!(\"All tests succeeded.\");\n", "file_path": "src/test.rs", "rank": 34, "score": 10.42342649260204 }, { "content": " &target_args,\n\n &test_config,\n\n &metadata,\n\n &root_dir,\n\n &out_dir,\n\n false,\n\n )\n\n .expect(&format!(\"Failed to build test: {}\", target.name));\n\n println!(\"\");\n\n\n\n (target, test_path)\n\n })\n\n .collect::<Vec<(&cargo_metadata::Target, PathBuf)>>();\n\n\n\n let tests = test_targets\n\n .par_iter()\n\n .map(|(target, test_path)| {\n\n println!(\"RUN: {}\", target.name);\n\n\n\n let test_result;\n", "file_path": "src/test.rs", "rank": 35, "score": 10.360748227174621 }, { "content": " .unwrap();\n\n process::exit(1);\n\n }\n\n }\n\n\n\n let out_dir = out_dir(&args, &metadata);\n\n\n\n Ok((args, config, metadata, crate_root, out_dir))\n\n}\n\n\n\npub(crate) fn build_impl(\n\n args: &Args,\n\n config: &Config,\n\n metadata: &CargoMetadata,\n\n root_dir: &Path,\n\n out_dir: &Path,\n\n verbose: bool,\n\n) -> Result<PathBuf, Error> {\n\n let crate_ = metadata\n\n .packages\n", "file_path": "src/build.rs", "rank": 36, "score": 10.230166404554256 }, { "content": "\n\n if verbose {\n\n println!(\n\n \"Creating disk image at {}\",\n\n output_path\n\n .strip_prefix(root_dir)\n\n .unwrap_or(output_path.as_path())\n\n .display()\n\n );\n\n }\n\n let mut output = File::create(&output_path)\n\n .with_context(|e| format!(\"Could not create output bootimage file: {}\", e))?;\n\n output\n\n .write_all(&bootloader_data)\n\n .with_context(|e| format!(\"Could not write output bootimage file: {}\", e))?;\n\n output\n\n .write_all(&kernel_info_block)\n\n .with_context(|e| format!(\"Could not write output bootimage file: {}\", e))?;\n\n\n\n fn write_file_to_file(output: &mut File, datafile: &mut File) -> Result<usize, Error> {\n", "file_path": "src/build.rs", "rank": 37, "score": 10.208521108434969 }, { "content": " target: Option<String>,\n\n /// The release flag (also present in `cargo_args`).\n\n release: bool,\n\n}\n\n\n\nimpl Args {\n\n pub fn manifest_path(&self) -> &Option<PathBuf> {\n\n &self.manifest_path\n\n }\n\n\n\n pub fn bin_name(&self) -> &Option<String> {\n\n &self.bin_name\n\n }\n\n\n\n pub fn target(&self) -> &Option<String> {\n\n &self.target\n\n }\n\n\n\n pub fn release(&self) -> bool {\n\n self.release\n", "file_path": "src/args.rs", "rank": 38, "score": 10.102412416922618 }, { "content": " }\n\n _ if arg.starts_with(\"--bin=\") => {\n\n set(\n\n &mut bin_name,\n\n Some(String::from(arg.trim_left_matches(\"--bin=\"))),\n\n );\n\n cargo_args.push(arg);\n\n }\n\n \"--target\" => {\n\n let next = arg_iter.next();\n\n set(&mut target, next.clone());\n\n cargo_args.push(arg);\n\n if let Some(next) = next {\n\n cargo_args.push(next);\n\n }\n\n }\n\n _ if arg.starts_with(\"--target=\") => {\n\n set(\n\n &mut target,\n\n Some(String::from(arg.trim_left_matches(\"--target=\"))),\n", "file_path": "src/args.rs", "rank": 39, "score": 10.019421372876778 }, { "content": " let data_size = datafile.metadata()?.len();\n\n let mut buffer = [0u8; 1024];\n\n let mut acc = 0;\n\n loop {\n\n let (n, interrupted) = match datafile.read(&mut buffer) {\n\n Ok(0) => break,\n\n Ok(n) => (n, false),\n\n Err(ref e) if e.kind() == io::ErrorKind::Interrupted => (0, true),\n\n Err(e) => Err(e)?,\n\n };\n\n if !interrupted {\n\n acc += n;\n\n output.write_all(&buffer[..n])?\n\n }\n\n }\n\n\n\n assert!(data_size == acc as u64);\n\n\n\n Ok(acc)\n\n }\n", "file_path": "src/build.rs", "rank": 40, "score": 9.979282642728734 }, { "content": "use std::path::{Path, PathBuf};\n\nuse std::{env, mem};\n\nuse Command;\n\n\n\npub(crate) fn parse_args() -> Command {\n\n let mut args = env::args().skip(1);\n\n let first = args.next();\n\n match first.as_ref().map(|s| s.as_str()) {\n\n Some(\"build\") => parse_build_args(args),\n\n Some(\"run\") => match parse_build_args(args) {\n\n Command::Build(args) => Command::Run(args),\n\n Command::BuildHelp => Command::RunHelp,\n\n cmd => cmd,\n\n },\n\n Some(\"test\") => match parse_build_args(args) {\n\n Command::Build(args) => {\n\n assert_eq!(\n\n args.bin_name, None,\n\n \"No `--bin` argument allowed for `bootimage test`\"\n\n );\n", "file_path": "src/args.rs", "rank": 41, "score": 9.79438579645309 }, { "content": " let target_dir = PathBuf::from(&metadata.target_directory);\n\n let mut out_dir = target_dir;\n\n if let &Some(ref target) = args.target() {\n\n out_dir.push(Path::new(target).file_stem().unwrap().to_str().unwrap());\n\n }\n\n if args.release() {\n\n out_dir.push(\"release\");\n\n } else {\n\n out_dir.push(\"debug\");\n\n }\n\n out_dir\n\n }\n\n\n\n let metadata = read_cargo_metadata(&args)?;\n\n let manifest_path = args\n\n .manifest_path()\n\n .as_ref()\n\n .map(Clone::clone)\n\n .unwrap_or(Path::new(\"Cargo.toml\").canonicalize().unwrap());\n\n let crate_root = manifest_path.parent().unwrap().to_path_buf();\n", "file_path": "src/build.rs", "rank": 42, "score": 9.218490716180824 }, { "content": "use args::{self, Args};\n\nuse byteorder::{ByteOrder, LittleEndian};\n\nuse cargo_metadata::{self, Metadata as CargoMetadata};\n\nuse config::{self, Config};\n\nuse failure::{self, Error, ResultExt};\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse std::path::{Path, PathBuf};\n\nuse std::{fmt, io, process};\n\nuse xmas_elf;\n\n\n\nconst BLOCK_SIZE: usize = 512;\n", "file_path": "src/build.rs", "rank": 43, "score": 9.050007807318998 }, { "content": " None => {\n\n child\n\n .kill()\n\n .with_context(|e| format!(\"Failed to kill QEMU: {}\", e))?;\n\n child\n\n .wait()\n\n .with_context(|e| format!(\"Failed to wait for QEMU process: {}\", e))?;\n\n test_result = TestResult::TimedOut;\n\n writeln!(io::stderr(), \"Timed Out\")?;\n\n }\n\n Some(_) => {\n\n let output = fs::read_to_string(&output_file).with_context(|e| {\n\n format_err!(\"Failed to read test output file {}: {}\", output_file, e)\n\n })?;\n\n if output.starts_with(\"ok\\n\") {\n\n test_result = TestResult::Ok;\n\n println!(\"OK: {}\", target.name);\n\n } else if output.starts_with(\"failed\\n\") {\n\n test_result = TestResult::Failed;\n\n writeln!(io::stderr(), \"FAIL:\")?;\n", "file_path": "src/test.rs", "rank": 44, "score": 8.315559985212754 }, { "content": "\n\n fn pad_file(output: &mut File, written_size: usize, padding: &[u8]) -> Result<(), Error> {\n\n let padding_size = (padding.len() - (written_size % padding.len())) % padding.len();\n\n output\n\n .write_all(&padding[..padding_size])\n\n .with_context(|e| format!(\"Could not write to output file: {}\", e))?;\n\n Ok(())\n\n }\n\n\n\n // write out kernel elf file\n\n\n\n let kernel_size = write_file_to_file(&mut output, &mut kernel)?;\n\n\n\n pad_file(&mut output, kernel_size, &[0; 512])?;\n\n\n\n if let Some(ref mut package) = maybe_package {\n\n println!(\"Writing specified package to output\");\n\n let package_size = write_file_to_file(&mut output, package)?;\n\n pad_file(&mut output, package_size, &[0; 512])?;\n\n }\n", "file_path": "src/build.rs", "rank": 45, "score": 8.09811689323057 }, { "content": " cargo_args,\n\n run_args,\n\n bin_name,\n\n target,\n\n manifest_path,\n\n release: release.unwrap_or(false),\n\n })\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Args {\n\n /// All arguments that are passed to cargo.\n\n pub cargo_args: Vec<String>,\n\n /// All arguments that are passed to the runner.\n\n pub run_args: Vec<String>,\n\n /// The manifest path (also present in `cargo_args`).\n\n manifest_path: Option<PathBuf>,\n\n /// The name of the binary (passed `--bin` argument) (also present in `cargo_args`).\n\n bin_name: Option<String>,\n\n /// The target triple (also present in `cargo_args`).\n", "file_path": "src/args.rs", "rank": 46, "score": 7.494246385036485 }, { "content": "\n\n if let Some(min_size) = config.minimum_image_size {\n\n // we already wrote to output successfully,\n\n // both metadata and set_len should succeed.\n\n if output.metadata()?.len() < min_size {\n\n output.set_len(min_size)?;\n\n }\n\n }\n\n\n\n Ok(output_path)\n\n}\n", "file_path": "src/build.rs", "rank": 47, "score": 7.135376454680477 }, { "content": " let mut arg_iter = args.into_iter();\n\n while let Some(arg) = arg_iter.next() {\n\n if run_args_started {\n\n run_args.push(arg);\n\n continue;\n\n }\n\n match arg.as_ref() {\n\n \"--help\" | \"-h\" => {\n\n return Command::BuildHelp;\n\n }\n\n \"--version\" => {\n\n return Command::Version;\n\n }\n\n \"--bin\" => {\n\n let next = arg_iter.next();\n\n set(&mut bin_name, next.clone());\n\n cargo_args.push(arg);\n\n if let Some(next) = next {\n\n cargo_args.push(next);\n\n }\n", "file_path": "src/args.rs", "rank": 48, "score": 6.2580085696232075 }, { "content": " create_disk_image(\n\n root_dir,\n\n out_dir,\n\n &bin_name,\n\n &config,\n\n kernel,\n\n maybe_package,\n\n kernel_info_block,\n\n &bootloader,\n\n verbose,\n\n )\n\n}\n\n\n", "file_path": "src/build.rs", "rank": 49, "score": 6.224751880653065 }, { "content": " let output_file = format!(\"{}-output.txt\", test_path.display());\n\n\n\n let mut command = process::Command::new(\"qemu-system-x86_64\");\n\n command.arg(\"-drive\");\n\n command.arg(format!(\"format=raw,file={}\", test_path.display()));\n\n command.arg(\"-device\");\n\n command.arg(\"isa-debug-exit,iobase=0xf4,iosize=0x04\");\n\n command.arg(\"-display\");\n\n command.arg(\"none\");\n\n command.arg(\"-serial\");\n\n command.arg(format!(\"file:{}\", output_file));\n\n command.stderr(process::Stdio::null());\n\n let mut child = command\n\n .spawn()\n\n .with_context(|e| format_err!(\"Failed to launch QEMU: {:?}\\n{}\", command, e))?;\n\n let timeout = Duration::from_secs(60);\n\n match child\n\n .wait_timeout(timeout)\n\n .with_context(|e| format!(\"Failed to wait with timeout: {}\", e))?\n\n {\n", "file_path": "src/test.rs", "rank": 50, "score": 5.678323524922913 }, { "content": "extern crate byteorder;\n\nextern crate cargo_metadata;\n\nextern crate rayon;\n\nextern crate toml;\n\nextern crate wait_timeout;\n\nextern crate xmas_elf;\n\n#[macro_use]\n\nextern crate failure;\n\n\n\nuse args::Args;\n\nuse std::{io, process};\n\n\n\nmod args;\n\nmod build;\n\nmod config;\n\nmod help;\n\nmod test;\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 5.425104422914432 }, { "content": " print!(\"{}\", TEST_HELP);\n\n}\n\n\n\npub(crate) fn no_subcommand() -> ! {\n\n println!(\"Please invoke `bootimage` with a subcommand (e.g. `bootimage build`).\");\n\n println!();\n\n println!(\"See `bootimage --help` for more information.\");\n\n process::exit(1);\n\n}\n", "file_path": "src/help/mod.rs", "rank": 52, "score": 5.045109437356137 }, { "content": " );\n\n cargo_args.push(arg);\n\n }\n\n \"--manifest-path\" => {\n\n let next = arg_iter.next();\n\n set(\n\n &mut manifest_path,\n\n next.as_ref().map(|p| {\n\n Path::new(&p)\n\n .canonicalize()\n\n .expect(\"--manifest-path invalid\")\n\n }),\n\n );\n\n cargo_args.push(arg);\n\n if let Some(next) = next {\n\n cargo_args.push(next);\n\n }\n\n }\n\n _ if arg.starts_with(\"--manifest-path=\") => {\n\n let path = Path::new(arg.trim_left_matches(\"--manifest-path=\"))\n", "file_path": "src/args.rs", "rank": 53, "score": 4.364270711228342 }, { "content": " .canonicalize()\n\n .expect(\"--manifest-path invalid\");\n\n set(&mut manifest_path, Some(path));\n\n cargo_args.push(arg);\n\n }\n\n \"--release\" => {\n\n set(&mut release, Some(true));\n\n cargo_args.push(arg);\n\n }\n\n \"--\" => {\n\n run_args_started = true;\n\n }\n\n _ => {\n\n cargo_args.push(arg);\n\n }\n\n };\n\n }\n\n }\n\n\n\n Command::Build(Args {\n", "file_path": "src/args.rs", "rank": 54, "score": 4.0075668648312455 }, { "content": "use std::process;\n\n\n\nconst HELP: &str = include_str!(\"help.txt\");\n\nconst BUILD_HELP: &str = include_str!(\"build_help.txt\");\n\nconst RUN_HELP: &str = include_str!(\"run_help.txt\");\n\nconst TEST_HELP: &str = include_str!(\"test_help.txt\");\n\n\n\npub(crate) fn help() {\n\n print!(\"{}\", HELP);\n\n}\n\n\n\npub(crate) fn build_help() {\n\n print!(\"{}\", BUILD_HELP);\n\n}\n\n\n\npub(crate) fn run_help() {\n\n print!(\"{}\", RUN_HELP);\n\n}\n\n\n\npub(crate) fn test_help() {\n", "file_path": "src/help/mod.rs", "rank": 55, "score": 3.9767724392479966 } ]
Rust
der/src/asn1/bit_string.rs
xiaoyuxlu/utils
06276215ebdf9e5258c6daadcace9f9c64f57065
use crate::{ Any, ByteSlice, Encodable, Encoder, Error, ErrorKind, Header, Length, Result, Tag, Tagged, }; use core::convert::TryFrom; #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct BitString<'a> { inner: ByteSlice<'a>, } impl<'a> BitString<'a> { pub fn new(slice: &'a [u8]) -> Result<Self> { ByteSlice::new(slice) .map(|inner| Self { inner }) .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into()) } pub fn as_bytes(&self) -> &'a [u8] { self.inner.as_bytes() } pub fn len(&self) -> Length { self.inner.len() } pub fn is_empty(&self) -> bool { self.inner.is_empty() } fn header(self) -> Result<Header> { Ok(Header { tag: Tag::BitString, length: (self.inner.len() + 1u16)?, }) } } impl AsRef<[u8]> for BitString<'_> { fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl<'a> From<&BitString<'a>> for BitString<'a> { fn from(value: &BitString<'a>) -> BitString<'a> { *value } } impl<'a> TryFrom<Any<'a>> for BitString<'a> { type Error = Error; fn try_from(any: Any<'a>) -> Result<BitString<'a>> { any.tag().assert_eq(Tag::BitString)?; if let Some(bs) = any.as_bytes().get(1..) { if any.as_bytes()[0] == 0 { return ByteSlice::new(bs) .map(|inner| Self { inner }) .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into()); } } Err(ErrorKind::Length { tag: Self::TAG }.into()) } } impl<'a> From<BitString<'a>> for Any<'a> { fn from(bit_string: BitString<'a>) -> Any<'a> { Any { tag: Tag::BitString, value: bit_string.inner, } } } impl<'a> From<BitString<'a>> for &'a [u8] { fn from(bit_string: BitString<'a>) -> &'a [u8] { bit_string.as_bytes() } } impl<'a> Encodable for BitString<'a> { fn encoded_len(&self) -> Result<Length> { self.header()?.encoded_len()? + 1u16 + self.inner.len() } fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> { self.header()?.encode(encoder)?; encoder.byte(0)?; encoder.bytes(self.as_bytes()) } } impl<'a> Tagged for BitString<'a> { const TAG: Tag = Tag::BitString; } #[cfg(test)] mod tests { use super::{Any, BitString, ErrorKind, Result, Tag}; use core::convert::TryInto; fn parse_bitstring_from_any(bytes: &[u8]) -> Result<BitString<'_>> { Any::new(Tag::BitString, bytes)?.try_into() } #[test] fn reject_non_prefixed_bitstring() { let err = parse_bitstring_from_any(&[]).err().unwrap(); assert_eq!( err.kind(), ErrorKind::Length { tag: Tag::BitString } ); } #[test] fn reject_non_zero_prefix() { let err = parse_bitstring_from_any(&[1, 1, 2, 3]).err().unwrap(); assert_eq!( err.kind(), ErrorKind::Length { tag: Tag::BitString } ); } #[test] fn decode_empty_bitstring() { let bs = parse_bitstring_from_any(&[0]).unwrap(); assert_eq!(bs.as_ref(), &[]); } #[test] fn decode_non_empty_bitstring() { let bs = parse_bitstring_from_any(&[0, 1, 2, 3]).unwrap(); assert_eq!(bs.as_ref(), &[1, 2, 3]); } }
use crate::{ Any, ByteSlice, Encodable, Encoder, Error, ErrorKind, Header, Length, Result, Tag, Tagged, }; use core::convert::TryFrom; #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct BitString<'a> { inner: ByteSlice<'a>, } impl<'a> BitString<'a> { pub fn new(slice: &'a [u8]) -> Result<Self> { ByteSlice::new(slice) .map(|inner| Self { inner }) .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into()) } pub fn as_bytes(&self) -> &'a [u8] { self.inner.as_bytes() } pub fn len(&self) -> Length { self.inner.len() } pub fn is_empty(&self) -> bool { self.inner.is_empty() } fn header(self) -> Result<Header> { Ok(Header { tag: Tag::BitString, length: (self.inner.len() + 1u16)?, }) } } impl AsRef<[u8]> for BitString<'_> { fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl<'a> From<&BitString<'a>> for BitString<'a> { fn from(value: &BitString<'a>) -> BitString<'a> { *value } } impl<'a> TryFrom<Any<'a>> for BitString<'a> { type Error = Error;
} impl<'a> From<BitString<'a>> for Any<'a> { fn from(bit_string: BitString<'a>) -> Any<'a> { Any { tag: Tag::BitString, value: bit_string.inner, } } } impl<'a> From<BitString<'a>> for &'a [u8] { fn from(bit_string: BitString<'a>) -> &'a [u8] { bit_string.as_bytes() } } impl<'a> Encodable for BitString<'a> { fn encoded_len(&self) -> Result<Length> { self.header()?.encoded_len()? + 1u16 + self.inner.len() } fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> { self.header()?.encode(encoder)?; encoder.byte(0)?; encoder.bytes(self.as_bytes()) } } impl<'a> Tagged for BitString<'a> { const TAG: Tag = Tag::BitString; } #[cfg(test)] mod tests { use super::{Any, BitString, ErrorKind, Result, Tag}; use core::convert::TryInto; fn parse_bitstring_from_any(bytes: &[u8]) -> Result<BitString<'_>> { Any::new(Tag::BitString, bytes)?.try_into() } #[test] fn reject_non_prefixed_bitstring() { let err = parse_bitstring_from_any(&[]).err().unwrap(); assert_eq!( err.kind(), ErrorKind::Length { tag: Tag::BitString } ); } #[test] fn reject_non_zero_prefix() { let err = parse_bitstring_from_any(&[1, 1, 2, 3]).err().unwrap(); assert_eq!( err.kind(), ErrorKind::Length { tag: Tag::BitString } ); } #[test] fn decode_empty_bitstring() { let bs = parse_bitstring_from_any(&[0]).unwrap(); assert_eq!(bs.as_ref(), &[]); } #[test] fn decode_non_empty_bitstring() { let bs = parse_bitstring_from_any(&[0, 1, 2, 3]).unwrap(); assert_eq!(bs.as_ref(), &[1, 2, 3]); } }
fn try_from(any: Any<'a>) -> Result<BitString<'a>> { any.tag().assert_eq(Tag::BitString)?; if let Some(bs) = any.as_bytes().get(1..) { if any.as_bytes()[0] == 0 { return ByteSlice::new(bs) .map(|inner| Self { inner }) .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into()); } } Err(ErrorKind::Length { tag: Self::TAG }.into()) }
function_block-full_function
[ { "content": "/// Obtain the length of an ASN.1 `SEQUENCE` of [`Encodable`] values when\n\n/// serialized as ASN.1 DER, including the `SEQUENCE` tag and length prefix.\n\npub fn encoded_len(encodables: &[&dyn Encodable]) -> Result<Length> {\n\n let inner_len = encoded_len_inner(encodables)?;\n\n Header::new(Tag::Sequence, inner_len)?.encoded_len() + inner_len\n\n}\n\n\n\n/// Obtain the inner length of an ASN.1 `SEQUENCE` of [`Encodable`] values\n\n/// excluding the tag and length.\n\npub(crate) fn encoded_len_inner(encodables: &[&dyn Encodable]) -> Result<Length> {\n\n encodables\n\n .iter()\n\n .fold(Ok(Length::zero()), |sum, encodable| {\n\n sum + encodable.encoded_len()?\n\n })\n\n}\n\n\n\n/// ASN.1 `SEQUENCE` type.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub struct Sequence<'a> {\n\n /// Inner value\n\n inner: ByteSlice<'a>,\n", "file_path": "der/src/asn1/sequence.rs", "rank": 0, "score": 236482.11450597644 }, { "content": "#[inline(always)]\n\nfn decode_padding(input: &[u8]) -> Result<(usize, i16), InvalidEncodingError> {\n\n if input.len() % 4 != 0 {\n\n return Err(InvalidEncodingError);\n\n }\n\n\n\n let unpadded_len = match *input {\n\n [.., b0, b1] => {\n\n let pad_len = is_pad_ct(b0) + is_pad_ct(b1);\n\n input.len() - pad_len as usize\n\n }\n\n _ => input.len(),\n\n };\n\n\n\n let padding_len = input.len() - unpadded_len;\n\n\n\n let err = match *input {\n\n [.., b0] if padding_len == 1 => is_pad_ct(b0) ^ 1,\n\n [.., b0, b1] if padding_len == 2 => (is_pad_ct(b0) & is_pad_ct(b1)) ^ 1,\n\n _ => {\n\n if padding_len == 0 {\n", "file_path": "base64ct/src/encoding.rs", "rank": 1, "score": 209175.55017611803 }, { "content": "/// Read a git-flavoured VLQ value from `&data[*pos..]`.\n\n/// Increments `pos` to a number of read bytes.\n\n///\n\n/// This function returns `None` if buffer does not contain enough bytes\n\n/// or if VLQ is bigger than 4 bytes.\n\n///\n\n/// See the test submodule for example values.\n\nfn read_vlq(data: &[u8], pos: &mut usize) -> Result<usize, Error> {\n\n let b = data.get(*pos).ok_or(Error::UnexpectedEnd)?;\n\n *pos += 1;\n\n let mut next = b & NEXT_MASK;\n\n let mut val = (b & VAL_MASK) as usize;\n\n\n\n macro_rules! step {\n\n () => {\n\n if next == 0 {\n\n return Ok(val);\n\n }\n\n let b = data.get(*pos).ok_or(Error::UnexpectedEnd)?;\n\n *pos += 1;\n\n next = b & NEXT_MASK;\n\n let t = (b & VAL_MASK) as usize;\n\n val = ((val + 1) << 7) + t;\n\n };\n\n }\n\n\n\n step!();\n", "file_path": "blobby/src/lib.rs", "rank": 2, "score": 179216.26809708722 }, { "content": "fn encode(reader: impl BufRead, mut writer: impl Write) -> io::Result<usize> {\n\n let mut blobs = Vec::new();\n\n for line in reader.lines() {\n\n let blob = hex::decode(line?.as_str())\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;\n\n blobs.push(blob);\n\n }\n\n\n\n let mut idx_map = HashMap::new();\n\n for blob in blobs.iter().filter(|b| b.len() != 0) {\n\n let v = idx_map.entry(blob.as_slice()).or_insert(0);\n\n *v += 1;\n\n }\n\n\n\n let mut idx: Vec<&[u8]> = idx_map\n\n .iter()\n\n .filter(|(_, &v)| v > 1)\n\n .map(|(&k, _)| k)\n\n .collect();\n\n idx.sort_by_key(|e| {\n", "file_path": "blobby/examples/convert.rs", "rank": 3, "score": 171581.51828941348 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let args: Vec<String> = env::args().skip(1).collect();\n\n let is_encode = match args[0].as_str() {\n\n \"encode\" => true,\n\n \"decode\" => false,\n\n _ => Err(\"unknown mode\")?,\n\n };\n\n let in_path = args[1].as_str();\n\n let out_path = args[2].as_str();\n\n let in_file = BufReader::new(File::open(in_path)?);\n\n let out_file = BufWriter::new(File::create(out_path)?);\n\n\n\n let n = if is_encode {\n\n encode(in_file, out_file)?\n\n } else {\n\n decode(in_file, out_file)?\n\n };\n\n\n\n println!(\"Processed {} record(s)\", n);\n\n\n\n Ok(())\n\n}\n", "file_path": "blobby/examples/convert.rs", "rank": 4, "score": 159931.7152869883 }, { "content": "#[inline(always)]\n\nfn is_pad_ct(input: u8) -> i16 {\n\n ((((PAD as i16 - 1) - input as i16) & (input as i16 - (PAD as i16 + 1))) >> 8) & 1\n\n}\n\n\n\n#[inline(always)]\n\nconst fn encoded_len_inner(n: usize, padded: bool) -> Option<usize> {\n\n // TODO: replace with `checked_mul` and `map` on stabilization\n\n if n > usize::MAX / 4 {\n\n return None;\n\n }\n\n\n\n let q = 4 * n;\n\n\n\n if padded {\n\n Some(((q / 3) + 3) & !3)\n\n } else {\n\n Some((q / 3) + (q % 3 != 0) as usize)\n\n }\n\n}\n", "file_path": "base64ct/src/encoding.rs", "rank": 5, "score": 141370.56560205447 }, { "content": "fn encode_vlq(mut val: usize, buf: &mut [u8; 4]) -> &[u8] {\n\n macro_rules! step {\n\n ($n:expr) => {\n\n buf[$n] = if $n == 3 {\n\n (val & (VAL_MASK as usize)) as u8\n\n } else {\n\n val -= 1;\n\n NEXT_MASK | (val & (VAL_MASK as usize)) as u8\n\n };\n\n val >>= 7;\n\n if val == 0 {\n\n return &buf[$n..];\n\n }\n\n };\n\n }\n\n\n\n step!(3);\n\n step!(2);\n\n step!(1);\n\n step!(0);\n\n panic!(\"integer is too big\")\n\n}\n\n\n", "file_path": "blobby/examples/convert.rs", "rank": 6, "score": 138155.14214132994 }, { "content": "/// Trait for padding messages divided into blocks\n\npub trait Padding<BlockSize: ArrayLength<u8>> {\n\n /// Pads `block` filled with data up to `pos` (i.e length of a message\n\n /// stored in the block is equal to `pos`).\n\n ///\n\n /// # Panics\n\n /// If `pos` is bigger than `BlockSize`. Most paddin algorithms also\n\n /// panic if they are equal.\n\n fn pad(block: &mut Block<BlockSize>, pos: usize);\n\n\n\n /// Unpad data in the `block`.\n\n ///\n\n /// Returns `Err(UnpadError)` if the block containts malformed padding.\n\n fn unpad(block: &Block<BlockSize>) -> Result<&[u8], UnpadError>;\n\n}\n\n\n\n/// Pad block with zeros.\n\n///\n\n/// ```\n\n/// use block_padding::{ZeroPadding, Padding};\n\n/// use generic_array::{GenericArray, typenum::U8};\n", "file_path": "block-padding/src/lib.rs", "rank": 7, "score": 135218.5463924687 }, { "content": "/// Obtain the length of a collection.\n\npub trait Length {\n\n /// Get the length of this collection.\n\n fn len(&self) -> usize;\n\n\n\n /// Is the collection empty?\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 8, "score": 128129.13484423718 }, { "content": "/// Trait which generalizes digest functionality of buffers.\n\npub trait DigestBuffer<BlockSize: ArrayLength<u8>>: Default {\n\n /// Digest data in `input` in blocks of size `BlockSize` using\n\n /// the `compress` function, which accepts slice of blocks.\n\n fn digest_blocks(&mut self, input: &[u8], compress: impl FnMut(&[Block<BlockSize>]));\n\n\n\n /// Reset buffer by setting cursor position to zero.\n\n fn reset(&mut self);\n\n}\n", "file_path": "block-buffer/src/lib.rs", "rank": 9, "score": 127380.79880551509 }, { "content": "#[inline(always)]\n\nfn set(a: &mut [u8], b: &[u8]) {\n\n a.copy_from_slice(b);\n\n}\n\n\n", "file_path": "block-buffer/src/buffer.rs", "rank": 10, "score": 120702.39186313757 }, { "content": "#[inline(always)]\n\nfn xor(a: &mut [u8], b: &[u8]) {\n\n debug_assert_eq!(a.len(), b.len());\n\n a.iter_mut().zip(b.iter()).for_each(|(a, &b)| *a ^= b);\n\n}\n\n\n", "file_path": "block-buffer/src/buffer.rs", "rank": 11, "score": 120702.39186313757 }, { "content": "/// Types with an associated ASN.1 [`Tag`].\n\npub trait Tagged {\n\n /// ASN.1 tag\n\n const TAG: Tag;\n\n}\n\n\n\n/// ASN.1 tags.\n\n///\n\n/// Tags are the leading byte of the Tag-Length-Value encoding used by ASN.1\n\n/// DER and identify the type of the subsequent value.\n\n#[derive(Copy, Clone, Eq, PartialEq)]\n\n#[allow(clippy::identity_op)]\n\n#[non_exhaustive]\n\n#[repr(u8)]\n\npub enum Tag {\n\n /// `BOOLEAN` tag.\n\n Boolean = 0x01,\n\n\n\n /// `INTEGER` tag.\n\n Integer = 0x02,\n\n\n", "file_path": "der/src/tag.rs", "rank": 12, "score": 111696.46895135997 }, { "content": "pub fn encrypt_in_place<'b>(\n\n params: &Parameters<'_>,\n\n password: impl AsRef<[u8]>,\n\n buffer: &'b mut [u8],\n\n pos: usize,\n\n) -> Result<&'b [u8], CryptoError> {\n\n let pbkdf2_params = params.kdf.pbkdf2().ok_or(CryptoError)?;\n\n\n\n let key = EncryptionKey::derive_with_pbkdf2::<Sha256>(\n\n password.as_ref(),\n\n &pbkdf2_params,\n\n params.encryption.key_size(),\n\n )?;\n\n\n\n match params.encryption {\n\n EncryptionScheme::Aes128Cbc { iv } => {\n\n let cipher = Aes128Cbc::new_var(key.as_slice(), iv).map_err(|_| CryptoError)?;\n\n cipher.encrypt(buffer, pos).map_err(|_| CryptoError)\n\n }\n\n EncryptionScheme::Aes256Cbc { iv } => {\n\n let cipher = Aes256Cbc::new_var(key.as_slice(), iv).map_err(|_| CryptoError)?;\n\n cipher.encrypt(buffer, pos).map_err(|_| CryptoError)\n\n }\n\n }\n\n}\n\n\n", "file_path": "pkcs5/src/pbes2/encryption.rs", "rank": 13, "score": 111635.77011383022 }, { "content": "/// Decrypt a message encrypted with PBES2-based key derivation\n\npub fn decrypt_in_place<'a>(\n\n params: &Parameters<'_>,\n\n password: impl AsRef<[u8]>,\n\n buffer: &'a mut [u8],\n\n) -> Result<&'a [u8], CryptoError> {\n\n let pbkdf2_params = params.kdf.pbkdf2().ok_or(CryptoError)?;\n\n\n\n let key = EncryptionKey::derive_with_pbkdf2::<Sha256>(\n\n password.as_ref(),\n\n &pbkdf2_params,\n\n params.encryption.key_size(),\n\n )?;\n\n\n\n match params.encryption {\n\n EncryptionScheme::Aes128Cbc { iv } => {\n\n let cipher = Aes128Cbc::new_var(key.as_slice(), iv).map_err(|_| CryptoError)?;\n\n cipher.decrypt(buffer).map_err(|_| CryptoError)\n\n }\n\n EncryptionScheme::Aes256Cbc { iv } => {\n\n let cipher = Aes256Cbc::new_var(key.as_slice(), iv).map_err(|_| CryptoError)?;\n\n cipher.decrypt(buffer).map_err(|_| CryptoError)\n\n }\n\n }\n\n}\n\n\n", "file_path": "pkcs5/src/pbes2/encryption.rs", "rank": 14, "score": 111635.77011383022 }, { "content": "/// Base64 encoding trait.\n\n///\n\n/// This trait must be imported to make use of any Base64 variant defined\n\n/// in this crate.\n\npub trait Encoding {\n\n /// Decode a Base64 string into the provided destination buffer.\n\n fn decode(src: impl AsRef<[u8]>, dst: &mut [u8]) -> Result<&[u8], Error>;\n\n\n\n /// Decode a Base64 string in-place.\n\n fn decode_in_place(buf: &mut [u8]) -> Result<&[u8], InvalidEncodingError>;\n\n\n\n /// Decode a Base64 string into a byte vector.\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn decode_vec(input: &str) -> Result<Vec<u8>, Error>;\n\n\n\n /// Encode the input byte slice as Base64.\n\n ///\n\n /// Writes the result into the provided destination slice, returning an\n\n /// ASCII-encoded Base64 string value.\n\n fn encode<'a>(src: &[u8], dst: &'a mut [u8]) -> Result<&'a str, InvalidLengthError>;\n\n\n\n /// Encode input byte slice into a [`String`] containing Base64.\n\n ///\n", "file_path": "base64ct/src/encoding.rs", "rank": 15, "score": 111121.41718253624 }, { "content": "/// Encoding trait.\n\npub trait Encodable {\n\n /// Compute the length of this value in bytes when encoded as ASN.1 DER.\n\n fn encoded_len(&self) -> Result<Length>;\n\n\n\n /// Encode this value as ASN.1 DER using the provided [`Encoder`].\n\n fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()>;\n\n\n\n /// Encode this value to the provided byte slice, returning a sub-slice\n\n /// containing the encoded message.\n\n fn encode_to_slice<'a>(&self, buf: &'a mut [u8]) -> Result<&'a [u8]> {\n\n let mut encoder = Encoder::new(buf);\n\n self.encode(&mut encoder)?;\n\n Ok(encoder.finish()?)\n\n }\n\n\n\n /// Encode this message as ASN.1 DER, appending it to the provided\n\n /// byte vector.\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn encode_to_vec(&self, buf: &mut Vec<u8>) -> Result<Length> {\n", "file_path": "der/src/encodable.rs", "rank": 16, "score": 111113.49945430254 }, { "content": "#[inline(never)]\n\nfn get_raw_data() -> Vec<u8> {\n\n (0..RAW_LEN).map(|i| i as u8).collect()\n\n}\n\n\n", "file_path": "base64ct/benches/mod.rs", "rank": 17, "score": 110356.71325541001 }, { "content": "fn unreachable<S, B: ArrayLength<u8>>(_: &mut S) -> ParBlock<B, U1> {\n\n unreachable!();\n\n}\n", "file_path": "block-buffer/src/buffer.rs", "rank": 18, "score": 107539.12469279162 }, { "content": "struct Foo {\n\n secret: u64,\n\n}\n\n\n\nopaque_debug::implement!(Foo);\n\n\n", "file_path": "opaque-debug/tests/mod.rs", "rank": 19, "score": 105702.87520804361 }, { "content": "#[test]\n\nfn debug_formatting() {\n\n let s = format!(\"{:?}\", Foo { secret: 42 });\n\n assert_eq!(s, \"Foo { ... }\");\n\n}\n", "file_path": "opaque-debug/tests/mod.rs", "rank": 20, "score": 104902.83599785343 }, { "content": "#[proc_macro]\n\npub fn hex(input: TokenStream) -> TokenStream {\n\n let ts = TokenStream::from_iter(TokenTreeIter::new(input));\n\n TokenStream::from(TokenTree::Group(Group::new(Delimiter::Bracket, ts)))\n\n}\n", "file_path": "hex-literal/src/lib.rs", "rank": 21, "score": 97666.0303463254 }, { "content": "#[test]\n\nfn encode_enum_variants() {\n\n let mut buf = [0u8; 128];\n\n\n\n let utc_time = Time::from_bytes(UTC_TIMESTAMP).unwrap();\n\n let mut encoder = Encoder::new(&mut buf);\n\n utc_time.encode(&mut encoder).unwrap();\n\n assert_eq!(UTC_TIMESTAMP, encoder.finish().unwrap());\n\n\n\n let general_time = Time::from_bytes(GENERAL_TIMESTAMP).unwrap();\n\n let mut encoder = Encoder::new(&mut buf);\n\n general_time.encode(&mut encoder).unwrap();\n\n assert_eq!(GENERAL_TIMESTAMP, encoder.finish().unwrap());\n\n}\n", "file_path": "der/tests/derive.rs", "rank": 22, "score": 94111.56488027994 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn encode_rsa_2048_pem() {\n\n let pk = SubjectPublicKeyInfo::try_from(RSA_2048_DER_EXAMPLE).unwrap();\n\n let pk_encoded = PublicKeyDocument::from(pk).to_pem();\n\n assert_eq!(RSA_2048_PEM_EXAMPLE.trim_end(), pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 23, "score": 91760.42189876703 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn encode_rsa_2048_der() {\n\n let pk = PrivateKeyInfo::try_from(RSA_2048_DER_EXAMPLE).unwrap();\n\n assert_eq!(RSA_2048_DER_EXAMPLE, pk.to_der().as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 24, "score": 91760.42189876703 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn encode_ed25519_pem() {\n\n let pk = SubjectPublicKeyInfo::try_from(ED25519_DER_EXAMPLE).unwrap();\n\n let pk_encoded = PublicKeyDocument::from(pk).to_pem();\n\n assert_eq!(ED25519_PEM_EXAMPLE.trim_end(), pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 25, "score": 91760.42189876703 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn encode_ed25519_der() {\n\n let pk = SubjectPublicKeyInfo::try_from(ED25519_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_vec().unwrap();\n\n assert_eq!(ED25519_DER_EXAMPLE, pk_encoded.as_slice());\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 26, "score": 91760.42189876703 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn encode_rsa_2048_pem() {\n\n let pk = PrivateKeyInfo::try_from(RSA_2048_DER_EXAMPLE).unwrap();\n\n assert_eq!(RSA_2048_PEM_EXAMPLE.trim_end(), &*pk.to_pem());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 27, "score": 91760.42189876703 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn encode_ed25519_der() {\n\n let pk = PrivateKeyInfo::try_from(ED25519_DER_EXAMPLE).unwrap();\n\n assert_eq!(ED25519_DER_EXAMPLE, pk.to_der().as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 28, "score": 91760.42189876703 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn encode_ed25519_pem() {\n\n let pk = PrivateKeyInfo::try_from(ED25519_DER_EXAMPLE).unwrap();\n\n assert_eq!(ED25519_PEM_EXAMPLE.trim_end(), &*pk.to_pem());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 29, "score": 91760.42189876703 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn encode_rsa_2048_der() {\n\n let pk = SubjectPublicKeyInfo::try_from(RSA_2048_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_vec().unwrap();\n\n assert_eq!(RSA_2048_DER_EXAMPLE, pk_encoded.as_slice());\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 30, "score": 91760.42189876703 }, { "content": "#[test]\n\nfn encode_pbes2_pbkdf2_sha1_aes128cbc() {\n\n let mut buffer = [0u8; 1024];\n\n\n\n let scheme = pkcs5::EncryptionScheme::try_from(PBES2_PBKDF2_SHA1_AES128CBC_ALG_ID).unwrap();\n\n let mut encoder = der::Encoder::new(&mut buffer);\n\n scheme.encode(&mut encoder).unwrap();\n\n\n\n let encoded_der = encoder.finish().unwrap();\n\n assert_eq!(encoded_der, PBES2_PBKDF2_SHA1_AES128CBC_ALG_ID);\n\n}\n\n\n\n/// Encoding tests\n", "file_path": "pkcs5/tests/pbes2.rs", "rank": 31, "score": 89556.3287641802 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn encode_ec_p256_pem() {\n\n let pk = PrivateKeyInfo::try_from(EC_P256_DER_EXAMPLE).unwrap();\n\n assert_eq!(EC_P256_PEM_EXAMPLE.trim_end(), &*pk.to_pem());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 32, "score": 89556.3287641802 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn encode_ec_p256_pem() {\n\n let pk = SubjectPublicKeyInfo::try_from(EC_P256_DER_EXAMPLE).unwrap();\n\n let pk_encoded = PublicKeyDocument::from(pk).to_pem();\n\n assert_eq!(EC_P256_PEM_EXAMPLE.trim_end(), pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 33, "score": 89556.3287641802 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn encode_ec_p256_der() {\n\n let pk = SubjectPublicKeyInfo::try_from(EC_P256_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_vec().unwrap();\n\n assert_eq!(EC_P256_DER_EXAMPLE, pk_encoded.as_slice());\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 34, "score": 89556.3287641802 }, { "content": "#[test]\n\nfn encode_pbes2_pbkdf2_sha256_aes256cbc() {\n\n let mut buffer = [0u8; 1024];\n\n\n\n let scheme = pkcs5::EncryptionScheme::try_from(PBES2_PBKDF2_SHA256_AES256CBC_ALG_ID).unwrap();\n\n let mut encoder = der::Encoder::new(&mut buffer);\n\n scheme.encode(&mut encoder).unwrap();\n\n\n\n let encoded_der = encoder.finish().unwrap();\n\n assert_eq!(encoded_der, PBES2_PBKDF2_SHA256_AES256CBC_ALG_ID);\n\n}\n", "file_path": "pkcs5/tests/pbes2.rs", "rank": 35, "score": 89556.3287641802 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn encode_ec_p256_der() {\n\n let pk = PrivateKeyInfo::try_from(EC_P256_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_der();\n\n assert_eq!(EC_P256_DER_EXAMPLE, pk_encoded.as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 36, "score": 89556.3287641802 }, { "content": "/// ASN.1 `CHOICE` denotes a union of one or more possible alternatives.\n\n///\n\n/// The types MUST have distinct tags.\n\n///\n\n/// This crate models choice as a trait, with a blanket impl for all types\n\n/// which impl `Decodable + Encodable + Tagged` (i.e. they are modeled as\n\n/// a `CHOICE` with only one possible variant)\n\n// TODO(tarcieri): refactor enum custom derive to use `Choice`\n\npub trait Choice<'a>: Decodable<'a> + Encodable {\n\n /// Is the provided [`Tag`] decodable as a variant of this `CHOICE`?\n\n fn can_decode(tag: Tag) -> bool;\n\n}\n\n\n\nimpl<'a, T> Choice<'a> for T\n\nwhere\n\n T: Decodable<'a> + Encodable + Tagged,\n\n{\n\n fn can_decode(tag: Tag) -> bool {\n\n T::TAG == tag\n\n }\n\n}\n", "file_path": "der/src/asn1/choice.rs", "rank": 37, "score": 87288.33830484212 }, { "content": "#[bench]\n\nfn encode_bench(b: &mut Bencher) {\n\n let mut buf = get_b64_data().into_bytes();\n\n let raw_data = get_raw_data();\n\n b.iter(|| {\n\n let out = base64ct::encode(&raw_data, &mut buf).unwrap();\n\n test::black_box(out);\n\n });\n\n b.bytes = RAW_LEN as u64;\n\n}\n", "file_path": "base64ct/benches/mod.rs", "rank": 38, "score": 86814.7678267032 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn encode_ed25519_encpriv_aes256_sha256_pem() {\n\n let pk = EncryptedPrivateKeyInfo::try_from(ED25519_DER_AES256_SHA256_EXAMPLE).unwrap();\n\n assert_eq!(ED25519_PEM_AES256_SHA256_EXAMPLE.trim_end(), &*pk.to_pem());\n\n}\n\n\n", "file_path": "pkcs8/tests/encrypted_private_key.rs", "rank": 39, "score": 83700.22462377147 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn encode_ed25519_encpriv_aes256_sha256_der() {\n\n let pk = EncryptedPrivateKeyInfo::try_from(ED25519_DER_AES256_SHA256_EXAMPLE).unwrap();\n\n assert_eq!(ED25519_DER_AES256_SHA256_EXAMPLE, pk.to_der().as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/encrypted_private_key.rs", "rank": 40, "score": 83700.22462377147 }, { "content": "#[inline(always)]\n\nfn decoded_len(input_len: usize) -> usize {\n\n // overflow-proof computation of `(3*n)/4`\n\n let k = input_len / 4;\n\n let l = input_len - 4 * k;\n\n 3 * k + (3 * l) / 4\n\n}\n\n\n\n/// Validate padding is well-formed and compute unpadded length.\n\n///\n\n/// Returns length-related errors eagerly as a [`Result`], and data-dependent\n\n/// errors (i.e. malformed padding bytes) as `i16` to be combined with other\n\n/// encoding-related errors prior to branching.\n", "file_path": "base64ct/src/encoding.rs", "rank": 41, "score": 82540.25411424821 }, { "content": "fn decode<R: BufRead, W: Write>(mut reader: R, mut writer: W) -> io::Result<usize> {\n\n let mut data = Vec::new();\n\n reader.read_to_end(&mut data)?;\n\n let res: Vec<_> = BlobIterator::new(&data)\n\n .map_err(|e| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"invalid blobby data: {:?}\", e),\n\n )\n\n })?\n\n .collect();\n\n for blob in res.iter() {\n\n let blob = blob.map_err(|e| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"invalid blobby data: {:?}\", e),\n\n )\n\n })?;\n\n writer.write_all(hex::encode(blob).as_bytes())?;\n\n writer.write_all(b\"\\n\")?;\n\n }\n\n Ok(res.len())\n\n}\n\n\n", "file_path": "blobby/examples/convert.rs", "rank": 42, "score": 74103.40516032773 }, { "content": "/// ASN.1 `CHOICE` alternative: one of the ASN.1 types comprising the `CHOICE`\n\n/// which maps to an enum variant.\n\nstruct Alternative {\n\n /// ASN.1 type for this alternative.\n\n pub asn1_type: Asn1Type,\n\n\n\n /// [`Ident`] for the corresponding enum variant.\n\n pub ident: Ident,\n\n}\n\n\n\nimpl Alternative {\n\n /// Register a `CHOICE` alternative for a variant\n\n pub fn register(alternatives: &mut Alternatives, asn1_type: Asn1Type, variant: &Variant) {\n\n let alternative = Self {\n\n asn1_type,\n\n ident: variant.ident.clone(),\n\n };\n\n\n\n if let Some(duplicate) = alternatives.insert(asn1_type, alternative) {\n\n panic!(\n\n \"duplicate ASN.1 type `{}` for enum variants `{}` and `{}`\",\n\n asn1_type, duplicate.ident, variant.ident\n\n );\n\n }\n\n }\n\n}\n", "file_path": "der/derive/src/choice.rs", "rank": 43, "score": 67428.81087325692 }, { "content": "/// Encryption key as derived by PBKDF2\n\n// TODO(tarcieri): zeroize?\n\nstruct EncryptionKey {\n\n buffer: [u8; MAX_KEY_LEN],\n\n length: usize,\n\n}\n\n\n\nimpl EncryptionKey {\n\n /// Derive key using PBKDF2.\n\n fn derive_with_pbkdf2<D>(\n\n password: &[u8],\n\n params: &Pbkdf2Params<'_>,\n\n length: usize,\n\n ) -> Result<Self, CryptoError>\n\n where\n\n D: Update + BlockInput + FixedOutput + Reset + Default + Clone + Sync,\n\n D::BlockSize: ArrayLength<u8>,\n\n {\n\n // We only support PBKDF2-SHA256 for now\n\n if params.prf != Pbkdf2Prf::HmacWithSha256 {\n\n return Err(CryptoError);\n\n }\n", "file_path": "pkcs5/src/pbes2/encryption.rs", "rank": 44, "score": 66147.21660842677 }, { "content": "struct TokenTreeIter {\n\n buf: Vec<u8>,\n\n pos: usize,\n\n is_punct: bool,\n\n}\n\n\n\nimpl TokenTreeIter {\n\n fn new(input: TokenStream) -> Self {\n\n let mut ts = ignore_groups(input).into_iter();\n\n let input_str = match (ts.next(), ts.next()) {\n\n (Some(TokenTree::Literal(literal)), None) => literal.to_string(),\n\n _ => panic!(\"expected single string literal\"),\n\n };\n\n let mut buf: Vec<u8> = input_str.into();\n\n\n\n match buf.as_slice() {\n\n [b'\"', .., b'\"'] => (),\n\n _ => panic!(\"expected single string literal\"),\n\n };\n\n buf.pop();\n", "file_path": "hex-literal/src/lib.rs", "rank": 45, "score": 64952.488197555314 }, { "content": "/// Truncate the collection to the provided length.\n\npub trait Truncate {\n\n /// Truncate this buffer to the given number of elements.\n\n ///\n\n /// If `len` is bigger than the current number of elements (or the total\n\n /// capacity of the buffer) no changes are made to the contents.\n\n fn truncate(&mut self, len: usize);\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 46, "score": 63384.45541071541 }, { "content": "/// Core encoder/decoder functions for a particular Base64 variant\n\npub trait Variant {\n\n /// Is this encoding padded?\n\n const PADDED: bool;\n\n\n\n /// First character in this Base64 alphabet\n\n const BASE: u8;\n\n\n\n /// Decoder passes\n\n const DECODER: &'static [Decode];\n\n\n\n /// Encoder passes\n\n const ENCODER: &'static [Encode];\n\n\n\n /// Decode 3 bytes of a Base64 message.\n\n #[inline(always)]\n\n fn decode_3bytes(src: &[u8], dst: &mut [u8]) -> i16 {\n\n debug_assert_eq!(src.len(), 4);\n\n debug_assert!(dst.len() >= 3, \"dst too short: {}\", dst.len());\n\n\n\n let c0 = Self::decode_6bits(src[0]);\n", "file_path": "base64ct/src/variant.rs", "rank": 47, "score": 63384.12429592979 }, { "content": "/// Double and inverse double over GF(2^n).\n\n///\n\n/// This trait is implemented for 64, 128 and 256 bit block sizes. Big-endian\n\n/// order is used.\n\npub trait Dbl {\n\n /// Double block. (alternatively: multiply block by x)\n\n ///\n\n /// If most significant bit of the block equals to zero will return\n\n /// `block<<1`, otherwise `(block<<1)^C`, where `C` is the non-leading\n\n /// coefficients of the lexicographically first irreducible degree-b binary\n\n /// polynomial with the minimal number of ones.\n\n fn dbl(self) -> Self;\n\n\n\n /// Reverse double block. (alternatively: divbide block by x)\n\n ///\n\n /// If least significant bit of the block equals to zero will return\n\n /// `block>>1`, otherwise `(block>>1)^(1<<n)^(C>>1)`\n\n fn inv_dbl(self) -> Self;\n\n}\n\n\n\nimpl Dbl for GenericArray<u8, U8> {\n\n fn dbl(self) -> Self {\n\n let mut val: u64 = unsafe { mem::transmute_copy(&self) };\n\n val = val.to_be();\n", "file_path": "dbl/src/lib.rs", "rank": 48, "score": 63383.978341258495 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\npub trait ToPrivateKey {\n\n /// Serialize a [`PrivateKeyDocument`] containing a PKCS#8-encoded private key.\n\n fn to_pkcs8_der(&self) -> PrivateKeyDocument;\n\n\n\n /// Create an [`EncryptedPrivateKeyDocument`] containing the ciphertext of\n\n /// a PKCS#8 encoded private key encrypted under the given `password`.\n\n #[cfg(feature = \"encryption\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"encryption\")))]\n\n fn to_pkcs8_encrypted_der(\n\n &self,\n\n rng: impl CryptoRng + RngCore,\n\n password: impl AsRef<[u8]>,\n\n ) -> Result<EncryptedPrivateKeyDocument> {\n\n self.to_pkcs8_der().encrypt(rng, password)\n\n }\n\n\n\n /// Serialize this private key as PEM-encoded PKCS#8.\n\n #[cfg(feature = \"pem\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"pem\")))]\n\n fn to_pkcs8_pem(&self) -> Zeroizing<String> {\n", "file_path": "pkcs8/src/traits.rs", "rank": 49, "score": 62108.832699072205 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\npub trait ToPublicKey {\n\n /// Serialize a [`PublicKeyDocument`] containing a SPKI-encoded public key.\n\n fn to_public_key_der(&self) -> PublicKeyDocument;\n\n\n\n /// Serialize this public key as PEM-encoded SPKI.\n\n #[cfg(feature = \"pem\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"pem\")))]\n\n fn to_public_key_pem(&self) -> String {\n\n self.to_public_key_der().to_pem()\n\n }\n\n\n\n /// Write ASN.1 DER-encoded public key to the given path\n\n #[cfg(feature = \"std\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"std\")))]\n\n fn write_public_key_der_file(&self, path: impl AsRef<Path>) -> Result<()> {\n\n self.to_public_key_der().write_der_file(path)\n\n }\n\n\n\n /// Write ASN.1 DER-encoded public key to the given path\n\n #[cfg(all(feature = \"pem\", feature = \"std\"))]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"pem\")))]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"std\")))]\n\n fn write_public_key_pem_file(&self, path: impl AsRef<Path>) -> Result<()> {\n\n self.to_public_key_der().write_pem_file(path)\n\n }\n\n}\n", "file_path": "pkcs8/src/traits.rs", "rank": 50, "score": 62108.832699072205 }, { "content": "/// Registry of `CHOICE` alternatives for a given enum\n\ntype Alternatives = std::collections::BTreeMap<Asn1Type, Alternative>;\n\n\n\n/// Derive the `Choice` trait for an enum.\n\npub(crate) struct DeriveChoice {\n\n /// `CHOICE` alternatives for this enum.\n\n alternatives: Alternatives,\n\n\n\n /// Tags included in the impl body for `der::Choice`.\n\n choice_body: TokenStream,\n\n\n\n /// Enum match arms for the impl body for `TryFrom<der::Any<'_>>`.\n\n decode_body: TokenStream,\n\n\n\n /// Enum match arms for the impl body for `der::Encodable::encode`.\n\n encode_body: TokenStream,\n\n\n\n /// Enum match arms for the impl body for `der::Encodable::encoded_len`.\n\n encoded_len_body: TokenStream,\n\n}\n\n\n", "file_path": "der/derive/src/choice.rs", "rank": 51, "score": 60554.32681990675 }, { "content": "/// Collection types implement all of the traits in this crate.\n\npub trait Collection<T>:\n\n AsRef<[T]> + AsMut<[T]> + Default + Length + Truncate + TryExtend<T> + TryPush<T>\n\n{\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 52, "score": 59995.29689375181 }, { "content": "/// [`TryCollect`] is an extension to [`Iterator`] which allows for performing\n\n/// a fallible collection into a collection type.\n\npub trait TryCollect<A> {\n\n fn try_collect<B>(&mut self) -> Result<B, B::Error>\n\n where\n\n B: TryFromIterator<A>;\n\n}\n\n\n\nimpl<A, T> TryCollect<A> for T\n\nwhere\n\n T: Iterator<Item = A>,\n\n{\n\n fn try_collect<B>(&mut self) -> Result<B, B::Error>\n\n where\n\n B: TryFromIterator<A>,\n\n {\n\n B::try_from_iter(self)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\nmod vec_impls {\n", "file_path": "collectable/src/lib.rs", "rank": 53, "score": 59990.47264949093 }, { "content": "/// Fallible equivalent of [`core::iter::Extend`] - extends a collection\n\n/// with the contents of an iterator, but with the option to return an error\n\n/// in the event the container's capacity has been exceeded.\n\n///\n\n/// [`core::iter::Extend`]: https://doc.rust-lang.org/core/iter/trait.Extend.html\n\npub trait TryExtend<A> {\n\n /// Error type.\n\n type Error;\n\n\n\n /// Try to extend the collection from the given iterator.\n\n fn try_extend<T>(&mut self, iter: T) -> Result<(), Self::Error>\n\n where\n\n T: IntoIterator<Item = A>;\n\n\n\n /// Try to extend the collection from the given slice.\n\n fn try_extend_from_slice(&mut self, slice: &[A]) -> Result<(), Self::Error>\n\n where\n\n A: Clone,\n\n {\n\n self.try_extend(slice.iter().cloned())\n\n }\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 54, "score": 59989.51378340484 }, { "content": "#[test]\n\nfn from_str() {\n\n let oid1 = EXAMPLE_OID_1_STRING.parse::<ObjectIdentifier>().unwrap();\n\n assert_eq!(oid1.arc(0).unwrap(), 1);\n\n assert_eq!(oid1.arc(1).unwrap(), 2);\n\n assert_eq!(oid1, EXAMPLE_OID_1);\n\n\n\n let oid2 = EXAMPLE_OID_2_STRING.parse::<ObjectIdentifier>().unwrap();\n\n assert_eq!(oid2.arc(0).unwrap(), 2);\n\n assert_eq!(oid2.arc(1).unwrap(), 16);\n\n assert_eq!(oid2, EXAMPLE_OID_2);\n\n\n\n // Too short\n\n assert!(\"1.2\".parse::<ObjectIdentifier>().is_err());\n\n\n\n // Truncated\n\n assert!(\"1.2.840.10045.2.\".parse::<ObjectIdentifier>().is_err());\n\n\n\n // Invalid first arc\n\n assert!(\"3.2.840.10045.2.1\".parse::<ObjectIdentifier>().is_err());\n\n\n\n // Invalid second arc\n\n assert!(\"1.40.840.10045.2.1\".parse::<ObjectIdentifier>().is_err());\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 55, "score": 58837.5934573304 }, { "content": "#[test]\n\nfn from_ber() {\n\n let oid1 = ObjectIdentifier::from_ber(EXAMPLE_OID_1_BER).unwrap();\n\n assert_eq!(oid1.arc(0).unwrap(), 1);\n\n assert_eq!(oid1.arc(1).unwrap(), 2);\n\n assert_eq!(oid1, EXAMPLE_OID_1);\n\n\n\n let oid2 = ObjectIdentifier::from_ber(EXAMPLE_OID_2_BER).unwrap();\n\n assert_eq!(oid2.arc(0).unwrap(), 2);\n\n assert_eq!(oid2.arc(1).unwrap(), 16);\n\n assert_eq!(oid2, EXAMPLE_OID_2);\n\n\n\n // Empty\n\n assert!(ObjectIdentifier::from_ber(&[]).is_err());\n\n\n\n // Truncated\n\n assert!(ObjectIdentifier::from_ber(&[42]).is_err());\n\n assert!(ObjectIdentifier::from_ber(&[42, 134]).is_err());\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 56, "score": 58837.5934573304 }, { "content": "#[test]\n\nfn as_bytes() {\n\n assert_eq!(EXAMPLE_OID_1.as_bytes(), EXAMPLE_OID_1_BER);\n\n assert_eq!(EXAMPLE_OID_2.as_bytes(), EXAMPLE_OID_2_BER);\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 57, "score": 58837.5934573304 }, { "content": "#[test]\n\nfn display() {\n\n assert_eq!(EXAMPLE_OID_1.to_string(), EXAMPLE_OID_1_STRING);\n\n assert_eq!(EXAMPLE_OID_2.to_string(), EXAMPLE_OID_2_STRING);\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 58, "score": 58837.5934573304 }, { "content": "/// Parse a private key object from a PKCS#8 encoded document.\n\npub trait FromPrivateKey: Sized {\n\n /// Parse the [`PrivateKeyInfo`] from a PKCS#8-encoded document.\n\n fn from_pkcs8_private_key_info(private_key_info: PrivateKeyInfo<'_>) -> Result<Self>;\n\n\n\n /// Deserialize PKCS#8 private key from ASN.1 DER-encoded data\n\n /// (binary format).\n\n fn from_pkcs8_der(bytes: &[u8]) -> Result<Self> {\n\n Self::from_pkcs8_private_key_info(PrivateKeyInfo::try_from(bytes)?)\n\n }\n\n\n\n /// Deserialize encrypted PKCS#8 private key from ASN.1 DER-encoded data\n\n /// (binary format) and attempt to decrypt it using the provided password.\n\n #[cfg(feature = \"encryption\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"encryption\")))]\n\n fn from_pkcs8_encrypted_der(bytes: &[u8], password: impl AsRef<[u8]>) -> Result<Self> {\n\n EncryptedPrivateKeyInfo::try_from(bytes)?\n\n .decrypt(password)\n\n .and_then(|doc| Self::from_pkcs8_doc(&doc))\n\n }\n\n\n", "file_path": "pkcs8/src/traits.rs", "rank": 59, "score": 58801.33249893612 }, { "content": "/// Parse a public key object from an encoded SPKI document.\n\npub trait FromPublicKey: Sized {\n\n /// Parse [`SubjectPublicKeyInfo`] into a public key object.\n\n fn from_spki(spki: SubjectPublicKeyInfo<'_>) -> Result<Self>;\n\n\n\n /// Deserialize object from ASN.1 DER-encoded [`SubjectPublicKeyInfo`]\n\n /// (binary format).\n\n fn from_public_key_der(bytes: &[u8]) -> Result<Self> {\n\n Self::from_spki(SubjectPublicKeyInfo::try_from(bytes)?)\n\n }\n\n\n\n /// Deserialize PKCS#8 private key from a [`PrivateKeyDocument`].\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn from_public_key_doc(doc: &PublicKeyDocument) -> Result<Self> {\n\n Self::from_spki(doc.spki())\n\n }\n\n\n\n /// Deserialize PEM-encoded [`SubjectPublicKeyInfo`].\n\n ///\n\n /// Keys in this format begin with the following delimiter:\n", "file_path": "pkcs8/src/traits.rs", "rank": 60, "score": 58801.33249893612 }, { "content": "/// Try to push an element onto a collection\n\npub trait TryPush<T> {\n\n /// Try to push an element onto a collection.\n\n ///\n\n /// Returns the original element if it's full.\n\n fn try_push(&mut self, item: T) -> Result<(), T>;\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 61, "score": 58797.05983454139 }, { "content": "/// Decoding trait.\n\n///\n\n/// This trait provides the core abstraction upon which all decoding operations\n\n/// are based.\n\n///\n\n/// # Blanket impl for `TryFrom<Any>`\n\n///\n\n/// In almost all cases you do not need to impl this trait yourself, but rather\n\n/// can instead impl `TryFrom<Any<'a>, Error = Error>` and receive a blanket\n\n/// impl of this trait.\n\npub trait Decodable<'a>: Sized {\n\n /// Attempt to decode this message using the provided decoder.\n\n fn decode(decoder: &mut Decoder<'a>) -> Result<Self>;\n\n\n\n /// Parse `Self` from the provided byte slice.\n\n fn from_bytes(bytes: &'a [u8]) -> Result<Self> {\n\n let mut decoder = Decoder::new(bytes);\n\n let result = Self::decode(&mut decoder)?;\n\n decoder.finish(result)\n\n }\n\n}\n\n\n\nimpl<'a, T> Decodable<'a> for T\n\nwhere\n\n T: TryFrom<Any<'a>, Error = Error>,\n\n{\n\n fn decode(decoder: &mut Decoder<'a>) -> Result<T> {\n\n Any::decode(decoder)\n\n .and_then(Self::try_from)\n\n .or_else(|e| decoder.error(e.kind()))\n\n }\n\n}\n", "file_path": "der/src/decodable.rs", "rank": 62, "score": 58201.65929333122 }, { "content": "#[test]\n\n#[should_panic]\n\nfn parse_empty() {\n\n ObjectIdentifier::parse(\"\");\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 63, "score": 57584.701907827504 }, { "content": "#[test]\n\nfn decode_enum_variants() {\n\n let utc_time = Time::from_bytes(UTC_TIMESTAMP).unwrap();\n\n assert_eq!(utc_time.unix_duration().as_secs(), 673573540);\n\n\n\n let general_time = Time::from_bytes(GENERAL_TIMESTAMP).unwrap();\n\n assert_eq!(general_time.unix_duration().as_secs(), 673573540);\n\n}\n\n\n", "file_path": "der/tests/derive.rs", "rank": 64, "score": 57584.701907827504 }, { "content": "#[test]\n\n#[should_panic]\n\nfn new_empty() {\n\n ObjectIdentifier::new(&[]);\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 65, "score": 57584.701907827504 }, { "content": "#[test]\n\n#[should_panic]\n\nfn parse_too_short() {\n\n ObjectIdentifier::parse(\"1.2\");\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 66, "score": 57584.701907827504 }, { "content": "#[test]\n\n#[should_panic]\n\nfn new_too_short() {\n\n ObjectIdentifier::new(&[1, 2]);\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 67, "score": 57584.701907827504 }, { "content": "#[test]\n\nfn reject_trailing_whitespace() {\n\n let input = \"OKC9tOTKagohutGPa6/n4ij7LQjpxAPj7tlOOOf5z4i\\n\";\n\n let mut buf = [0u8; 1024];\n\n assert_eq!(\n\n Base64Bcrypt::decode(input, &mut buf),\n\n Err(Error::InvalidEncoding)\n\n );\n\n}\n\n\n", "file_path": "base64ct/tests/bcrypt.rs", "rank": 68, "score": 57584.701907827504 }, { "content": "#[test]\n\nfn reject_trailing_whitespace() {\n\n let input = \"OKC9tOTKagohutGPa6/n4ij7LQjpxAPj7tlOOOf5z4i\\n\";\n\n let mut buf = [0u8; 1024];\n\n assert_eq!(\n\n Base64Crypt::decode(input, &mut buf),\n\n Err(Error::InvalidEncoding)\n\n );\n\n}\n\n\n", "file_path": "base64ct/tests/crypt.rs", "rank": 69, "score": 57584.701907827504 }, { "content": "/// Try to build a collection type from an [`Iterator`].\n\n///\n\n/// Fallible in the event the capacity of the underlying container type is\n\n/// exceeded.\n\npub trait TryFromIterator<A>: Sized {\n\n /// Error type.\n\n type Error;\n\n\n\n /// Try to create a new collection from the given iterator, potentially\n\n /// returning an error if the underlying collection's capacity is exceeded.\n\n fn try_from_iter<T>(iter: T) -> Result<Self, Self::Error>\n\n where\n\n T: IntoIterator<Item = A>;\n\n}\n\n\n\nimpl<A, C: Default + TryExtend<A>> TryFromIterator<A> for C {\n\n type Error = <Self as TryExtend<A>>::Error;\n\n\n\n fn try_from_iter<T>(iter: T) -> Result<Self, Self::Error>\n\n where\n\n T: IntoIterator<Item = A>,\n\n {\n\n let mut collection = Self::default();\n\n collection.try_extend(iter)?;\n\n Ok(collection)\n\n }\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 70, "score": 57008.384882856946 }, { "content": "/// Messages encoded as an ASN.1 `SEQUENCE`.\n\n///\n\n/// The \"message\" pattern this trait provides is not an ASN.1 concept,\n\n/// but rather a pattern for writing ASN.1 DER decoders and encoders which\n\n/// map ASN.1 `SEQUENCE` types to Rust structs with a minimum of code.\n\n///\n\n/// Types which impl this trait receive blanket impls for the [`Decodable`],\n\n/// [`Encodable`], and [`Tagged`] traits.\n\npub trait Message<'a>: Decodable<'a> {\n\n /// Call the provided function with a slice of [`Encodable`] trait objects\n\n /// representing the fields of this message.\n\n ///\n\n /// This method uses a callback because structs with fields which aren't\n\n /// directly [`Encodable`] may need to construct temporary values from\n\n /// their fields prior to encoding.\n\n fn fields<F, T>(&self, f: F) -> Result<T>\n\n where\n\n F: FnOnce(&[&dyn Encodable]) -> Result<T>;\n\n}\n\n\n\nimpl<'a, M> Encodable for M\n\nwhere\n\n M: Message<'a>,\n\n{\n\n fn encoded_len(&self) -> Result<Length> {\n\n self.fields(sequence::encoded_len)\n\n }\n\n\n", "file_path": "der/src/message.rs", "rank": 71, "score": 56676.62819314294 }, { "content": "#[test]\n\nfn decode_ed25519_der() {\n\n let pk = PrivateKeyInfo::try_from(ED25519_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(pk.algorithm.oid, \"1.3.101.112\".parse().unwrap());\n\n assert_eq!(pk.algorithm.parameters, None);\n\n\n\n // Extracted with:\n\n // $ openssl asn1parse -inform der -in tests/examples/ed25519-priv.der\n\n assert_eq!(\n\n pk.private_key,\n\n &hex!(\"042017ED9C73E9DB649EC189A612831C5FC570238207C1AA9DFBD2C53E3FF5E5EA85\")[..]\n\n );\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 72, "score": 56412.7007911688 }, { "content": "#[test]\n\n#[cfg(feature = \"std\")]\n\nfn read_der_file() {\n\n let pkcs8_doc = PublicKeyDocument::read_der_file(\"tests/examples/p256-pub.der\").unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 73, "score": 56412.7007911688 }, { "content": "#[test]\n\n#[cfg(all(feature = \"pem\", feature = \"std\"))]\n\nfn read_pem_file() {\n\n let pkcs8_doc = PublicKeyDocument::read_pem_file(\"tests/examples/p256-pub.pem\").unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n}\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 74, "score": 56412.7007911688 }, { "content": "#[test]\n\nfn unpadded_reject_trailing_equals() {\n\n let input = \"OKC9tOTKagohutGPa6/n4ij7LQjpxAPj7tlOOOf5z4i=\";\n\n let mut buf = [0u8; 1024];\n\n assert_eq!(\n\n Base64Crypt::decode(input, &mut buf),\n\n Err(Error::InvalidEncoding)\n\n );\n\n}\n", "file_path": "base64ct/tests/crypt.rs", "rank": 75, "score": 56412.7007911688 }, { "content": "#[test]\n\nfn decode_rsa_2048_der() {\n\n let spki = SubjectPublicKeyInfo::try_from(RSA_2048_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(spki.algorithm.oid, \"1.2.840.113549.1.1.1\".parse().unwrap());\n\n assert!(spki.algorithm.parameters.unwrap().is_null());\n\n assert_eq!(spki.subject_public_key, &hex!(\"3082010A0282010100B6C42C515F10A6AAF282C63EDBE24243A170F3FA2633BD4833637F47CA4F6F36E03A5D29EFC3191AC80F390D874B39E30F414FCEC1FCA0ED81E547EDC2CD382C76F61C9018973DB9FA537972A7C701F6B77E0982DFC15FC01927EE5E7CD94B4F599FF07013A7C8281BDF22DCBC9AD7CABB7C4311C982F58EDB7213AD4558B332266D743AED8192D1884CADB8B14739A8DADA66DC970806D9C7AC450CB13D0D7C575FB198534FC61BC41BC0F0574E0E0130C7BBBFBDFDC9F6A6E2E3E2AFF1CBEAC89BA57884528D55CFB08327A1E8C89F4E003CF2888E933241D9D695BCBBACDC90B44E3E095FA37058EA25B13F5E295CBEAC6DE838AB8C50AF61E298975B872F0203010001\")[..]);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 76, "score": 56412.7007911688 }, { "content": "#[test]\n\nfn decode_ed25519_der() {\n\n let spki = SubjectPublicKeyInfo::try_from(ED25519_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(spki.algorithm.oid, \"1.3.101.112\".parse().unwrap());\n\n assert_eq!(spki.algorithm.parameters, None);\n\n assert_eq!(\n\n spki.subject_public_key,\n\n &hex!(\"4D29167F3F1912A6F7ADFA293A051A15C05EC67B8F17267B1C5550DCE853BD0D\")[..]\n\n );\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 77, "score": 56412.7007911688 }, { "content": "#[test]\n\nfn try_from_u32_slice() {\n\n let oid1 = ObjectIdentifier::try_from([1, 2, 840, 10045, 2, 1].as_ref()).unwrap();\n\n assert_eq!(oid1.arc(0).unwrap(), 1);\n\n assert_eq!(oid1.arc(1).unwrap(), 2);\n\n assert_eq!(EXAMPLE_OID_1, oid1);\n\n\n\n let oid2 = ObjectIdentifier::try_from([2, 16, 840, 1, 101, 3, 4, 1, 42].as_ref()).unwrap();\n\n assert_eq!(oid2.arc(0).unwrap(), 2);\n\n assert_eq!(oid2.arc(1).unwrap(), 16);\n\n assert_eq!(EXAMPLE_OID_2, oid2);\n\n\n\n // Too short\n\n assert!(ObjectIdentifier::try_from([1, 2].as_ref()).is_err());\n\n\n\n // Invalid first arc\n\n assert!(ObjectIdentifier::try_from([3, 2, 840, 10045, 3, 1, 7].as_ref()).is_err());\n\n\n\n // Invalid second arc\n\n assert!(ObjectIdentifier::try_from([1, 40, 840, 10045, 3, 1, 7].as_ref()).is_err());\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 78, "score": 56412.7007911688 }, { "content": "#[test]\n\n#[cfg(feature = \"std\")]\n\nfn read_der_file() {\n\n let pkcs8_doc = PrivateKeyDocument::read_der_file(\"tests/examples/p256-priv.der\").unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 79, "score": 56412.7007911688 }, { "content": "#[test]\n\nfn unpadded_reject_trailing_equals() {\n\n let input = \"OKC9tOTKagohutGPa6/n4ij7LQjpxAPj7tlOOOf5z4i=\";\n\n let mut buf = [0u8; 1024];\n\n assert_eq!(\n\n Base64Bcrypt::decode(input, &mut buf),\n\n Err(Error::InvalidEncoding)\n\n );\n\n}\n", "file_path": "base64ct/tests/bcrypt.rs", "rank": 80, "score": 56412.7007911688 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn decode_rsa_2048_pem() {\n\n let pkcs8_doc: PrivateKeyDocument = RSA_2048_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), RSA_2048_DER_EXAMPLE);\n\n\n\n // Ensure `PrivateKeyDocument` parses successfully\n\n let pk_info = PrivateKeyInfo::try_from(RSA_2048_DER_EXAMPLE).unwrap();\n\n assert_eq!(pkcs8_doc.private_key_info().algorithm, pk_info.algorithm);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 81, "score": 56412.7007911688 }, { "content": "#[test]\n\nfn decode_rsa_2048_der() {\n\n let pk = PrivateKeyInfo::try_from(RSA_2048_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(pk.algorithm.oid, \"1.2.840.113549.1.1.1\".parse().unwrap());\n\n assert!(pk.algorithm.parameters.unwrap().is_null());\n\n\n\n // Extracted with:\n\n // $ openssl asn1parse -inform der -in tests/examples/rsa2048-priv.der\n\n assert_eq!(pk.private_key, &hex!(\"308204A30201000282010100B6C42C515F10A6AAF282C63EDBE24243A170F3FA2633BD4833637F47CA4F6F36E03A5D29EFC3191AC80F390D874B39E30F414FCEC1FCA0ED81E547EDC2CD382C76F61C9018973DB9FA537972A7C701F6B77E0982DFC15FC01927EE5E7CD94B4F599FF07013A7C8281BDF22DCBC9AD7CABB7C4311C982F58EDB7213AD4558B332266D743AED8192D1884CADB8B14739A8DADA66DC970806D9C7AC450CB13D0D7C575FB198534FC61BC41BC0F0574E0E0130C7BBBFBDFDC9F6A6E2E3E2AFF1CBEAC89BA57884528D55CFB08327A1E8C89F4E003CF2888E933241D9D695BCBBACDC90B44E3E095FA37058EA25B13F5E295CBEAC6DE838AB8C50AF61E298975B872F0203010001028201007ECC8362C0EDB0741164215E22F74AB9D91BA06900700CF63690E5114D8EE6BDCFBB2E3F9614692A677A083F168A5E52E5968E6407B9D97C6E0E4064F82DA0B758A14F17B9B7D41F5F48E28D6551704F56E69E7AA9FA630FC76428C06D25E455DCFC55B7AC2B4F76643FDED3FE15FF78ABB27E65ACC4AAD0BDF6DB27EF60A6910C5C4A085ED43275AB19C1D997A32C6EFFCE7DF2D1935F6E601EEDE161A12B5CC27CA21F81D2C99C3D1EA08E90E3053AB09BEFA724DEF0D0C3A3C1E9740C0D9F76126A149EC0AA7D8078205484254D951DB07C4CF91FB6454C096588FD5924DBABEB359CA2025268D004F9D66EB3D6F7ADC1139BAD40F16DDE639E11647376C102818100DCC061242D4E92AFAEE72AC513CA65B9F77036F9BD7E0E6E61461A7EF7654225EC153C7E5C31A6157A6E5A13FF6E178E8758C1CB33D9D6BBE3179EF18998E422ECDCBED78F4ECFDBE5F4FCD8AEC2C9D0DC86473CA9BD16D9D238D21FB5DDEFBEB143CA61D0BD6AA8D91F33A097790E9640DBC91085DC5F26343BA3138F6B2D6702818100D3F314757E40E954836F92BE24236AF2F0DA04A34653C180AF67E960086D93FDE65CB23EFD9D09374762F5981E361849AF68CDD75394FF6A4E06EB69B209E4228DB2DFA70E40F7F9750A528176647B788D0E5777A2CB8B22E3CD267FF70B4F3B02D3AAFB0E18C590A564B03188B0AA5FC48156B07622214243BD1227EFA7F2F902818100CE68B7AC1B0D100D636E55488753C5C09843FDB390E2705DF7689457C9BD8D9765E30978617E2EFC8048F4C324206DB86087B654E97BB3D464E7EE3F8CD83FE10436F7DF18E9A963C4E64911D67EDE34042F2E26E3D3A1AD346ADAD6B9B7F67708CB094E62DEE9FF4D5D6669AF988AF2255D1CE8ED317C6A7D8691DA354D12DB02818025F6E5944220286B4DFBBF4235C0EE5843D2198091895120D6CA7B200B826D3ECE738E2E00498FAC0A2A6CA969C7F0C3CA1AB0BC40297132BE7538D7BEDF4CB0EFC6B98EF7DBA54F56AA99AABCE534C49C27947D4678C51C63C78C7CE1687231B4C8EB587AE6EF0480CBAF4FC0173CFD587A7E67AF515FB9B9DE75111839722902818031995406D406207CADEAEA35B38D040C5F8A9A1AE0827E9ED06B153D83B6821935B4B36A82BE9D56C791B58C27271A5793D53A1D657C08997960B1433E5171987F452F144A7C72306D63E1D3FFC0B71B75AB08F2E45A482E988451CBE478E12EB228D07456C924B66F6CED048D853F533E31A68614F1C3CE6D8EC9983CE72AF7\")[..]);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 82, "score": 56412.7007911688 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn decode_ed25519_pem() {\n\n let doc: PublicKeyDocument = ED25519_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(doc.as_ref(), ED25519_DER_EXAMPLE);\n\n\n\n // Ensure `PublicKeyDocument` parses successfully\n\n let spki = SubjectPublicKeyInfo::try_from(ED25519_DER_EXAMPLE).unwrap();\n\n assert_eq!(doc.spki(), spki);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 83, "score": 56412.7007911688 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn decode_ed25519_pem() {\n\n let pkcs8_doc: PrivateKeyDocument = ED25519_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), ED25519_DER_EXAMPLE);\n\n\n\n // Ensure `PrivateKeyDocument` parses successfully\n\n let pk_info = PrivateKeyInfo::try_from(ED25519_DER_EXAMPLE).unwrap();\n\n assert_eq!(pkcs8_doc.private_key_info().algorithm, pk_info.algorithm);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 84, "score": 56412.7007911688 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn decode_rsa_2048_pem() {\n\n let doc: PublicKeyDocument = RSA_2048_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(doc.as_ref(), RSA_2048_DER_EXAMPLE);\n\n\n\n // Ensure `PublicKeyDocument` parses successfully\n\n let spki = SubjectPublicKeyInfo::try_from(RSA_2048_DER_EXAMPLE).unwrap();\n\n assert_eq!(doc.spki(), spki);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 85, "score": 56412.7007911688 }, { "content": "#[test]\n\n#[cfg(all(feature = \"pem\", feature = \"std\"))]\n\nfn read_pem_file() {\n\n let pkcs8_doc = PrivateKeyDocument::read_pem_file(\"tests/examples/p256-priv.pem\").unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n}\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 86, "score": 56412.7007911688 }, { "content": "#[test]\n\nfn decode_ec_p256_der() {\n\n let spki = SubjectPublicKeyInfo::try_from(EC_P256_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(spki.algorithm.oid, \"1.2.840.10045.2.1\".parse().unwrap());\n\n\n\n assert_eq!(\n\n spki.algorithm.parameters.unwrap().oid().unwrap(),\n\n \"1.2.840.10045.3.1.7\".parse().unwrap()\n\n );\n\n\n\n assert_eq!(spki.subject_public_key, &hex!(\"041CACFFB55F2F2CEFD89D89EB374B2681152452802DEEA09916068137D839CF7FC481A44492304D7EF66AC117BEFE83A8D08F155F2B52F9F618DD447029048E0F\")[..]);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 87, "score": 55314.001292555855 }, { "content": "#[test]\n\n#[cfg(feature = \"std\")]\n\nfn read_der_file() {\n\n let pkcs8_doc = EncryptedPrivateKeyDocument::read_der_file(\n\n \"tests/examples/ed25519-encpriv-aes256-sha256.der\",\n\n )\n\n .unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), ED25519_DER_AES256_SHA256_EXAMPLE);\n\n}\n\n\n", "file_path": "pkcs8/tests/encrypted_private_key.rs", "rank": 88, "score": 55314.001292555855 }, { "content": "#[test]\n\n#[should_panic]\n\nfn parse_invalid_first_arc() {\n\n ObjectIdentifier::parse(\"3.2.840.10045.3.1.7\");\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 89, "score": 55314.001292555855 }, { "content": "#[test]\n\nfn decode_pbes2_pbkdf2_sha1_aes128cbc() {\n\n let scheme = pkcs5::EncryptionScheme::try_from(PBES2_PBKDF2_SHA1_AES128CBC_ALG_ID).unwrap();\n\n let params = scheme.pbes2().unwrap();\n\n\n\n let pbkdf2_params = params.kdf.pbkdf2().unwrap();\n\n assert_eq!(pbkdf2_params.salt, &hex!(\"e8765e01e43b6bad\"));\n\n assert_eq!(pbkdf2_params.iteration_count, 2048);\n\n assert_eq!(pbkdf2_params.key_length, None);\n\n assert_eq!(pbkdf2_params.prf, pbes2::Pbkdf2Prf::HmacWithSha1);\n\n\n\n match params.encryption {\n\n pbes2::EncryptionScheme::Aes128Cbc { iv } => {\n\n assert_eq!(iv, &hex!(\"223080a71bcd2b9a256d876c924979d2\"));\n\n }\n\n other => panic!(\"unexpected encryption scheme: {:?}\", other),\n\n }\n\n}\n\n\n\n/// Decoding tests\n", "file_path": "pkcs5/tests/pbes2.rs", "rank": 90, "score": 55314.001292555855 }, { "content": "#[test]\n\nfn decode_ec_p256_der() {\n\n let pk = PrivateKeyInfo::try_from(EC_P256_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(pk.algorithm.oid, \"1.2.840.10045.2.1\".parse().unwrap());\n\n\n\n assert_eq!(\n\n pk.algorithm.parameters.unwrap().oid().unwrap(),\n\n \"1.2.840.10045.3.1.7\".parse().unwrap()\n\n );\n\n\n\n // Extracted with:\n\n // $ openssl asn1parse -inform der -in tests/examples/p256-priv.der\n\n assert_eq!(pk.private_key, &hex!(\"306B020101042069624171561A63340DE0E7D869F2A05492558E1A04868B6A9F854A866788188DA144034200041CACFFB55F2F2CEFD89D89EB374B2681152452802DEEA09916068137D839CF7FC481A44492304D7EF66AC117BEFE83A8D08F155F2B52F9F618DD447029048E0F\")[..]);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 91, "score": 55314.001292555855 }, { "content": "#[test]\n\n#[should_panic]\n\nfn new_invalid_first_arc() {\n\n ObjectIdentifier::new(&[3, 2, 840, 10045, 3, 1, 7]);\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 92, "score": 55314.001292555855 }, { "content": "#[test]\n\n#[cfg(all(feature = \"pem\", feature = \"std\"))]\n\nfn read_pem_file() {\n\n let pkcs8_doc = EncryptedPrivateKeyDocument::read_pem_file(\n\n \"tests/examples/ed25519-encpriv-aes256-sha256.pem\",\n\n )\n\n .unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), ED25519_DER_AES256_SHA256_EXAMPLE);\n\n}\n", "file_path": "pkcs8/tests/encrypted_private_key.rs", "rank": 93, "score": 55314.001292555855 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn decode_ec_p256_pem() {\n\n let doc: PublicKeyDocument = EC_P256_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n\n\n // Ensure `PublicKeyDocument` parses successfully\n\n let spki = SubjectPublicKeyInfo::try_from(EC_P256_DER_EXAMPLE).unwrap();\n\n assert_eq!(doc.spki(), spki);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 94, "score": 55314.001292555855 }, { "content": "#[test]\n\n#[should_panic]\n\nfn new_invalid_second_arc() {\n\n ObjectIdentifier::new(&[1, 40, 840, 10045, 3, 1, 7]);\n\n}\n\n\n", "file_path": "const-oid/tests/lib.rs", "rank": 95, "score": 55314.001292555855 }, { "content": "#[test]\n\nfn decode_pbes2_pbkdf2_sha256_aes256cbc() {\n\n let scheme = pkcs5::EncryptionScheme::try_from(PBES2_PBKDF2_SHA256_AES256CBC_ALG_ID).unwrap();\n\n let params = scheme.pbes2().unwrap();\n\n\n\n let pbkdf2_params = params.kdf.pbkdf2().unwrap();\n\n assert_eq!(pbkdf2_params.salt, &hex!(\"79d982e70df91a88\"));\n\n assert_eq!(pbkdf2_params.iteration_count, 2048);\n\n assert_eq!(pbkdf2_params.key_length, None);\n\n assert_eq!(pbkdf2_params.prf, pbes2::Pbkdf2Prf::HmacWithSha256);\n\n\n\n match params.encryption {\n\n pbes2::EncryptionScheme::Aes256Cbc { iv } => {\n\n assert_eq!(iv, &hex!(\"b2d02d78b2efd9dff694cf8e0af40925\"));\n\n }\n\n other => panic!(\"unexpected encryption scheme: {:?}\", other),\n\n }\n\n}\n\n\n\n/// Encoding tests\n", "file_path": "pkcs5/tests/pbes2.rs", "rank": 96, "score": 55314.001292555855 }, { "content": "#[test]\n\n#[should_panic]\n\nfn parse_invalid_second_arc() {\n\n ObjectIdentifier::parse(\"1.40.840.10045.3.1.7\");\n\n}\n", "file_path": "const-oid/tests/lib.rs", "rank": 97, "score": 55314.001292555855 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn decode_ec_p256_pem() {\n\n let pkcs8_doc: PrivateKeyDocument = EC_P256_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n\n\n // Ensure `PrivateKeyDocument` parses successfully\n\n let pk_info = PrivateKeyInfo::try_from(EC_P256_DER_EXAMPLE).unwrap();\n\n assert_eq!(pkcs8_doc.private_key_info().algorithm, pk_info.algorithm);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 98, "score": 55314.001292555855 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"big-uint\")))]\n\npub trait BigUIntSize: Unsigned {}\n\n\n\nmacro_rules! impl_size {\n\n ($($int:ident),+) => {\n\n $(impl BigUIntSize for typenum::consts::$int {})+\n\n };\n\n}\n\n\n\n// Sizes supported by the current implementation (1 - 512 bytes)\n\nimpl_size!(\n\n U1, U2, U3, U4, U5, U6, U7, U8, U9, U10, U11, U12, U13, U14, U15, U16, U17, U18, U19, U20, U21,\n\n U22, U23, U24, U25, U26, U27, U28, U29, U30, U31, U32, U33, U34, U35, U36, U37, U38, U39, U40,\n\n U41, U42, U43, U44, U45, U46, U47, U48, U49, U50, U51, U52, U53, U54, U55, U56, U57, U58, U59,\n\n U60, U61, U62, U63, U64, U65, U66, U67, U68, U69, U70, U71, U72, U73, U74, U75, U76, U77, U78,\n\n U79, U80, U81, U82, U83, U84, U85, U86, U87, U88, U89, U90, U91, U92, U93, U94, U95, U96, U97,\n\n U98, U99, U100, U101, U102, U103, U104, U105, U106, U107, U108, U109, U110, U111, U112, U113,\n\n U114, U115, U116, U117, U118, U119, U120, U121, U122, U123, U124, U125, U126, U127, U128, U129,\n\n U130, U131, U132, U133, U134, U135, U136, U137, U138, U139, U140, U141, U142, U143, U144, U145,\n\n U146, U147, U148, U149, U150, U151, U152, U153, U154, U155, U156, U157, U158, U159, U160, U161,\n\n U162, U163, U164, U165, U166, U167, U168, U169, U170, U171, U172, U173, U174, U175, U176, U177,\n", "file_path": "der/src/asn1/big_uint.rs", "rank": 99, "score": 54721.069030414496 } ]
Rust
src/format.rs
samwho/hmm
8894296e98ab43e6937adbfe88af413c7edee881
use super::{entry::Entry, Result}; use chrono::prelude::*; use colored::*; use handlebars::{ Context, Handlebars, Helper, HelperDef, HelperResult, JsonRender, Output, RenderContext, }; use std::collections::BTreeMap; pub struct Format<'a> { renderer: Handlebars<'a>, data: BTreeMap<&'static str, String>, } impl<'a> Format<'a> { pub fn with_template(template: &str) -> Result<Self> { let mut renderer = Handlebars::new(); renderer.set_strict_mode(true); renderer.register_escape_fn(|s| s.trim().to_owned()); renderer.register_template_string("template", template)?; renderer.register_helper("indent", Box::new(IndentHelper::new())); renderer.register_helper("strftime", Box::new(StrftimeHelper {})); renderer.register_helper("color", Box::new(ColorHelper {})); renderer.register_helper("markdown", Box::new(MarkdownHelper {})); Ok(Format { renderer, data: BTreeMap::new(), }) } pub fn format_entry(&mut self, entry: &Entry) -> Result<String> { self.data.clear(); self.data.insert("datetime", entry.datetime().to_rfc3339()); self.data.insert("message", entry.message().to_owned()); Ok(self.renderer.render("template", &self.data)?) } } struct IndentHelper<'a> { wrapper: textwrap::Wrapper<'a, textwrap::HyphenSplitter>, } impl<'a> IndentHelper<'a> { fn new() -> Self { let wrapper = textwrap::Wrapper::with_termwidth() .initial_indent("│ ") .subsequent_indent("│ "); IndentHelper { wrapper } } } impl<'a> HelperDef for IndentHelper<'a> { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let param = h.param(0).unwrap(); Ok(out.write(&self.wrapper.fill(&param.value().render()))?) } } struct StrftimeHelper {} impl HelperDef for StrftimeHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let date_str = h.param(1).unwrap().value().render(); let date = DateTime::parse_from_rfc3339(&date_str) .map_err(|_| handlebars::RenderError::new("couldn't parse date"))?; let local_date = date.with_timezone(&Local); let format_str = h.param(0).unwrap().value().render(); Ok(out.write(&local_date.format(&format_str).to_string())?) } } struct ColorHelper {} impl HelperDef for ColorHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let color = h.param(0).unwrap().value().render(); let s = h.param(1).unwrap().value().render(); Ok(out.write(&format!("{}", s.color(color)))?) } } struct MarkdownHelper {} impl HelperDef for MarkdownHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let s = h.param(0).unwrap().value().render(); Ok(out.write(&format!("{}", termimad::text(&s)))?) } } #[cfg(test)] mod tests { use super::*; use test_case::test_case; #[test_case("{{ message }}" => "hello world")] #[test_case("{{ color \"blue\" message }}" => "hello world".blue().to_string())] #[test_case("{{ indent message }}" => "│ hello world")] #[test_case("{{ strftime \"%Y-%m-%d %H:%M:%S\" datetime }}" => "2020-01-02 03:04:05")] fn test_format(template: &str) -> String { Format::with_template(template) .unwrap() .format_entry(&Entry::new( Utc.ymd(2020, 1, 2).and_hms(3, 4, 5).into(), "hello world".to_owned(), )) .unwrap() } }
use super::{entry::Entry, Result}; use chrono::prelude::*; use colored::*; use handlebars::{ Context, Handlebars, Helper, HelperDef, HelperResult, JsonRender, Output, RenderContext, }; use std::collections::BTreeMap; pub struct Format<'a> { renderer: Handlebars<'a>, data: BTreeMap<&'static str, String>, } impl<'a> Format<'a> { pub fn with_template(template: &str) -> Result<Self> { let mut renderer = Handlebars::new(); renderer.set_strict_mode(true); renderer.register_escape_fn(|s| s.trim().to_owned()); renderer.register_template_string("template", template)?; renderer.register_helper("indent", Box::new(IndentHelper::new())); renderer.register_helper("strftime", Box::new(StrftimeHelper {})); renderer.register_helper("color", Box::new(ColorHelper {})); renderer.register_helper("markdown", Box::new(MarkdownHelper {})); Ok(Format { renderer, data: BTreeMap::new(), }) } pub fn format_entry(&mut self, entry: &Entry) -> Result<String> { self.data.clear(); self.data.insert("datetime", entry.datetime().to_rfc3339()); self.data.insert("message", entry.message().to_owned()); Ok(self.renderer.render("template", &self.data)?) } } struct IndentHelper<'a> { wrapper: textwrap::Wrapper<'a, textwrap::HyphenSplitter>, } impl<'a> IndentHelper<'a> { fn new() -> Self { let wrapper = textwrap::Wrapper::with_termwidth() .initial_indent("│ ") .subsequent_indent("│ "); IndentHelper { wrapper } } } impl<'a> HelperDef for IndentHelper<'a> { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let param = h.param(0).unwrap(); Ok(out.write(&self.wrapper.fill(&param.value().render()))?) } } struct StrftimeHelper {} impl HelperDef for StrftimeHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let date_str = h.param(1).unwrap().value().render(); let date = DateTime::parse_from_rfc3339(&date_str) .map_err(|_| handlebars::RenderError::new("couldn't parse date"))?; let local_date = date.with_timezone(&Local); let format_str = h.param(0).unwrap().value().render(); Ok(out.write(&local_date.format(&format_str).to_string())?) } } struct ColorHelper {} impl HelperDef for ColorHelper {
} struct MarkdownHelper {} impl HelperDef for MarkdownHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let s = h.param(0).unwrap().value().render(); Ok(out.write(&format!("{}", termimad::text(&s)))?) } } #[cfg(test)] mod tests { use super::*; use test_case::test_case; #[test_case("{{ message }}" => "hello world")] #[test_case("{{ color \"blue\" message }}" => "hello world".blue().to_string())] #[test_case("{{ indent message }}" => "│ hello world")] #[test_case("{{ strftime \"%Y-%m-%d %H:%M:%S\" datetime }}" => "2020-01-02 03:04:05")] fn test_format(template: &str) -> String { Format::with_template(template) .unwrap() .format_entry(&Entry::new( Utc.ymd(2020, 1, 2).and_hms(3, 4, 5).into(), "hello world".to_owned(), )) .unwrap() } }
fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let color = h.param(0).unwrap().value().render(); let s = h.param(1).unwrap().value().render(); Ok(out.write(&format!("{}", s.color(color)))?) }
function_block-full_function
[ { "content": "pub fn from_str(s: &str) -> Error {\n\n s.to_owned().into()\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(io::Error),\n\n Csv(csv::Error),\n\n QuickCsv(quick_csv::error::Error),\n\n ChronoParse(chrono::format::ParseError),\n\n SerdeJson(serde_json::error::Error),\n\n Template(handlebars::TemplateError),\n\n TemplateRender(handlebars::TemplateRenderError),\n\n Render(handlebars::RenderError),\n\n Utf8(std::string::FromUtf8Error),\n\n Regex(regex::Error),\n\n String(String),\n\n}\n\n\n\nimpl error::Error for Error {\n", "file_path": "src/error.rs", "rank": 0, "score": 79394.23104230103 }, { "content": "pub fn start_of_current_line<T: Seek + Read>(f: &mut T) -> Result<u64> {\n\n let mut buf = [0; 1];\n\n let mut pos = f.seek(SeekFrom::Current(0))?;\n\n\n\n if let Err(e) = f.read_exact(&mut buf) {\n\n // If we try to read past the end of the file, which is what\n\n // ErrorKind::UnexpectedEof represents, it's not really a problem. We\n\n // just quietly drop in to the loop below and start backtracking. If\n\n // not, we raise the error.\n\n if e.kind() != ErrorKind::UnexpectedEof {\n\n return Err(e.into());\n\n }\n\n }\n\n\n\n if buf[0] == 0x0a {\n\n if pos == 0 {\n\n f.seek(SeekFrom::Start(0))?;\n\n return Ok(0);\n\n }\n\n f.seek(SeekFrom::Start(pos - 1))?;\n", "file_path": "src/seek.rs", "rank": 2, "score": 66377.26857491818 }, { "content": "pub fn start_of_next_line<T: Seek + Read>(f: &mut T) -> Result<Option<u64>> {\n\n let mut buf = [0; 1];\n\n let mut pos = f.seek(SeekFrom::Current(0))?;\n\n\n\n loop {\n\n pos += 1;\n\n if let Err(e) = f.read_exact(&mut buf) {\n\n if e.kind() == ErrorKind::UnexpectedEof {\n\n return Ok(None);\n\n } else {\n\n return Err(e.into());\n\n }\n\n }\n\n\n\n if buf[0] == 0x0a {\n\n return Ok(Some(pos));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/seek.rs", "rank": 3, "score": 63230.29858408627 }, { "content": "pub fn start_of_prev_line<T: Seek + Read>(f: &mut T) -> Result<Option<u64>> {\n\n start_of_current_line(f)?;\n\n\n\n let mut buf = [0; 1];\n\n let mut pos = f.seek(SeekFrom::Current(0))?;\n\n\n\n if pos == 0 {\n\n return Ok(None);\n\n }\n\n\n\n pos -= 1;\n\n f.seek(SeekFrom::Start(pos))?;\n\n\n\n loop {\n\n if pos == 0 {\n\n f.seek(SeekFrom::Start(0))?;\n\n return Ok(Some(0));\n\n }\n\n\n\n pos -= 1;\n\n f.seek(SeekFrom::Start(pos))?;\n\n f.read_exact(&mut buf)?;\n\n\n\n if buf[0] == 0x0a {\n\n return Ok(Some(pos + 1));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/seek.rs", "rank": 4, "score": 63230.29858408627 }, { "content": "use super::{\n\n error::{self, Error},\n\n Result,\n\n};\n\nuse chrono::prelude::*;\n\nuse csv::StringRecord;\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::io::Write;\n\n\n\npub struct Entry {\n\n datetime: DateTime<FixedOffset>,\n\n message: String,\n\n}\n\n\n\nimpl Entry {\n\n pub fn new(datetime: DateTime<FixedOffset>, message: String) -> Self {\n\n Entry { datetime, message }\n\n }\n\n\n\n pub fn with_message(message: &str) -> Self {\n", "file_path": "src/entry.rs", "rank": 8, "score": 19153.863459371063 }, { "content": " Self::new(Utc::now().into(), message.trim().to_owned())\n\n }\n\n\n\n pub fn datetime(&self) -> &DateTime<FixedOffset> {\n\n &self.datetime\n\n }\n\n\n\n pub fn message(&self) -> &str {\n\n &self.message\n\n }\n\n\n\n pub fn contains(&self, s: &str) -> bool {\n\n self.message.contains(s)\n\n }\n\n\n\n pub fn write(&self, mut w: impl Write) -> Result<()> {\n\n Ok(w.write_all(self.to_csv_row()?.as_bytes())?)\n\n }\n\n\n\n pub fn to_csv_row(&self) -> Result<String> {\n", "file_path": "src/entry.rs", "rank": 9, "score": 19150.956900267985 }, { "content": "use super::{entry::Entry, seek, Result};\n\nuse chrono::prelude::*;\n\nuse rand::distributions::{Distribution, Uniform};\n\nuse std::convert::TryInto;\n\nuse std::io::{BufRead, Read, Seek, SeekFrom};\n\n\n\npub struct Entries<T: Seek + Read + BufRead> {\n\n f: T,\n\n buf: String,\n\n}\n\n\n\nimpl<T: Seek + Read + BufRead> Entries<T> {\n\n pub fn new(f: T) -> Self {\n\n Entries {\n\n f,\n\n buf: String::with_capacity(4096),\n\n }\n\n }\n\n\n\n pub fn len(&mut self) -> Result<u64> {\n", "file_path": "src/entries.rs", "rank": 10, "score": 19150.76603113586 }, { "content": " datetime: chrono::DateTime::parse_from_rfc3339(date)?,\n\n message: serde_json::from_str(&msg)?,\n\n })\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for Entry {\n\n type Error = Error;\n\n\n\n fn try_from(s: &str) -> Result<Self> {\n\n quick_csv::Csv::from_string(s).next().unwrap()?.try_into()\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for Entry {\n\n type Error = Error;\n\n\n\n fn try_from(s: String) -> Result<Self> {\n\n s.as_str().try_into()\n\n }\n", "file_path": "src/entry.rs", "rank": 11, "score": 19150.038236438577 }, { "content": " let mut buf = Vec::new();\n\n {\n\n let mut writer = csv::Writer::from_writer(&mut buf);\n\n writer.write_record(&[\n\n self.datetime.to_rfc3339(),\n\n serde_json::to_string(&self.message)?,\n\n ])?;\n\n }\n\n Ok(String::from_utf8(buf)?)\n\n }\n\n}\n\n\n\nimpl TryFrom<quick_csv::Row> for Entry {\n\n type Error = Error;\n\n\n\n fn try_from(r: quick_csv::Row) -> Result<Self> {\n\n let mut cols = r.columns()?;\n\n\n\n let date = cols\n\n .next()\n", "file_path": "src/entry.rs", "rank": 12, "score": 19147.749685565737 }, { "content": " .ok_or_else(|| error::from_str(\"malformed CSV\"))?;\n\n let msg = cols\n\n .next()\n\n .ok_or_else(|| error::from_str(\"malformed CSV\"))?;\n\n\n\n Ok(Entry {\n\n datetime: chrono::DateTime::parse_from_rfc3339(date)?,\n\n message: serde_json::from_str(&msg)?,\n\n })\n\n }\n\n}\n\n\n\nimpl TryFrom<&StringRecord> for Entry {\n\n type Error = Error;\n\n\n\n fn try_from(sr: &StringRecord) -> Result<Self> {\n\n let date = sr.get(0).ok_or_else(|| error::from_str(\"malformed CSV\"))?;\n\n let msg = sr.get(1).ok_or_else(|| error::from_str(\"malformed CSV\"))?;\n\n\n\n Ok(Entry {\n", "file_path": "src/entry.rs", "rank": 13, "score": 19146.665650051105 }, { "content": " fn test_seek_to_first_single_entry() {\n\n let date = DateTime::parse_from_rfc3339(\"2021-04-02T00:00:00Z\").unwrap();\n\n let r = Cursor::new(Vec::from(\n\n \"2021-04-02T20:05:39.428673666+00:00,\\\"\\\"\\\"Hello world\\\"\\\"\\\"\\n\".as_bytes(),\n\n ));\n\n let mut entries = Entries::new(r);\n\n entries.seek_to_first(&date).unwrap();\n\n let message = entries\n\n .next_entry()\n\n .unwrap()\n\n .map(|e| e.message().to_owned());\n\n\n\n assert_eq!(message, Some(\"Hello world\".to_string()));\n\n }\n\n\n\n #[test]\n\n fn test_navigating_entries() -> Result<()> {\n\n let r = Cursor::new(Vec::from(TESTDATA.as_bytes()));\n\n let mut entries = Entries::new(r);\n\n\n", "file_path": "src/entries.rs", "rank": 14, "score": 19145.58538163876 }, { "content": " #[test_case(\"2021-01-01T00:00:00.000000000+00:00\" => None)]\n\n // Testing dates that aren't exact matches but land us in the middle of the\n\n // file.\n\n #[test_case(\"2020-02-12T23:08:00+00:00\" => Some(\"2\".to_owned()))]\n\n #[test_case(\"2020-02-12T23:59:00+00:00\" => Some(\"3\".to_owned()))]\n\n #[test_case(\"2020-04-12T23:27:00+00:00\" => Some(\"4\".to_owned()))]\n\n #[test_case(\"2020-05-12T23:27:00+00:00\" => Some(\"5\".to_owned()))]\n\n #[test_case(\"2020-06-13T10:00:00+00:00\" => Some(\"6\".to_owned()))]\n\n fn test_seek_to_first(date_str: &str) -> Option<String> {\n\n let date = DateTime::parse_from_rfc3339(date_str).unwrap();\n\n let r = Cursor::new(Vec::from(TESTDATA.as_bytes()));\n\n let mut entries = Entries::new(r);\n\n entries.seek_to_first(&date).unwrap();\n\n entries\n\n .next_entry()\n\n .unwrap()\n\n .map(|e| e.message().to_owned())\n\n }\n\n\n\n #[test]\n", "file_path": "src/entries.rs", "rank": 15, "score": 19144.972237873935 }, { "content": "\n\n pub fn seek_to_first(&mut self, date: &chrono::DateTime<FixedOffset>) -> Result<()> {\n\n let file_size = self.len()?;\n\n let mut end = file_size;\n\n let mut start = self.f.seek(SeekFrom::Start(0))?;\n\n\n\n while start < end {\n\n let cur = start + (end - start) / 2;\n\n\n\n let entry = match self.at(cur)? {\n\n Some(entry) => entry,\n\n // If we get none back from at() it means we've tried to seek past\n\n // the end of the file. We break out of the loop in this case and\n\n // ultimately return to the caller with the file cursor at end of\n\n // file. This allows people to seek backwards from the end if they\n\n // want to.\n\n None => break,\n\n };\n\n\n\n if entry.datetime() >= date {\n", "file_path": "src/entries.rs", "rank": 16, "score": 19143.017685206487 }, { "content": " }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T: Seek + Read + BufRead> Iterator for Entries<T> {\n\n type Item = Result<Entry>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self.next_entry() {\n\n Ok(opt) => match opt {\n\n Some(entry) => Some(Ok(entry)),\n\n None => None,\n\n },\n\n Err(e) => Some(Err(e)),\n\n }\n", "file_path": "src/entries.rs", "rank": 17, "score": 19143.01383579685 }, { "content": " pub fn seek_to_end(&mut self) -> Result<()> {\n\n let len = self.len()?;\n\n self.at(len)?;\n\n Ok(())\n\n }\n\n\n\n pub fn seek_to_next(&mut self) -> Result<Option<u64>> {\n\n seek::start_of_next_line(&mut self.f)\n\n }\n\n\n\n pub fn seek_to_prev(&mut self) -> Result<Option<u64>> {\n\n seek::start_of_prev_line(&mut self.f)\n\n }\n\n\n\n pub fn next_entry(&mut self) -> Result<Option<Entry>> {\n\n self.buf.clear();\n\n self.f.read_line(&mut self.buf)?;\n\n\n\n // read_line will leave the buffer empty if it was attempting to read\n\n // past the end of the file. We set the file cursor to past the end of\n", "file_path": "src/entries.rs", "rank": 18, "score": 19142.813378220188 }, { "content": " let prev = self.f.seek(SeekFrom::Current(0))?;\n\n let len = self.f.seek(SeekFrom::End(0))?;\n\n self.f.seek(SeekFrom::Start(prev))?;\n\n Ok(len)\n\n }\n\n\n\n pub fn is_empty(&mut self) -> Result<bool> {\n\n Ok(self.len()? == 0)\n\n }\n\n\n\n pub fn at(&mut self, pos: u64) -> Result<Option<Entry>> {\n\n if pos > self.len()? {\n\n return Ok(None);\n\n }\n\n\n\n self.f.seek(SeekFrom::Start(pos))?;\n\n seek::start_of_current_line(&mut self.f)?;\n\n self.next_entry()\n\n }\n\n\n", "file_path": "src/entries.rs", "rank": 19, "score": 19142.001095743643 }, { "content": " assert_eq!(entries.next_entry()?.unwrap().message(), \"5\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"6\");\n\n assert_eq!(entries.next_entry()?.is_none(), true);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_seek_to_end() -> Result<()> {\n\n let r = Cursor::new(Vec::from(TESTDATA.as_bytes()));\n\n let mut entries = Entries::new(r);\n\n\n\n entries.seek_to_end()?;\n\n assert_eq!(entries.prev_entry()?.unwrap().message(), \"6\");\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_iterator() {\n\n let r = Cursor::new(Vec::from(TESTDATA.as_bytes()));\n\n let mut entries = Entries::new(r);\n", "file_path": "src/entries.rs", "rank": 20, "score": 19141.72155231755 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use test_case::test_case;\n\n\n\n #[test_case(\"2012-01-01T00:00:00+00:00,\\\"\\\"\\\"hello world\\\"\\\"\\\"\" => (\"2012-01-01T00:00:00+00:00\".to_owned(), \"hello world\".to_owned()) ; \"basic entry\")]\n\n #[test_case(\"2012-01-01T00:00:00+00:00,\\\"\\\"\\\"hello\\\\nworld\\\"\\\"\\\"\" => (\"2012-01-01T00:00:00+00:00\".to_owned(), \"hello\\nworld\".to_owned()) ; \"entry with newline\")]\n\n #[test_case(\"2012-01-01T01:00:00+01:00,\\\"\\\"\\\"hello world\\\"\\\"\\\"\" => (\"2012-01-01T01:00:00+01:00\".to_owned(), \"hello world\".to_owned()) ; \"entry with non-UTC timezone\")]\n\n #[test_case(\"2012-01-01T00:00:00+00:00,\\\"\\\"\\\"\\\"\\\"\\\"\" => (\"2012-01-01T00:00:00+00:00\".to_owned(), \"\".to_owned()) ; \"empty entry\")]\n\n fn test_from_str(s: &str) -> (String, String) {\n\n let entry: Entry = s.try_into().unwrap();\n\n (entry.datetime().to_rfc3339(), entry.message().to_owned())\n\n }\n\n}\n", "file_path": "src/entry.rs", "rank": 21, "score": 19141.427847476276 }, { "content": " // the file so that we can check later on when trying to come back and\n\n // read a previous line we can read the last line instead of skipping\n\n // over it, because prev_line() by default skips the line that was just\n\n // read.\n\n if self.buf.is_empty() {\n\n self.f.seek(SeekFrom::End(1))?;\n\n return Ok(None);\n\n }\n\n\n\n let row = quick_csv::Csv::from_reader(self.buf.as_bytes())\n\n .next()\n\n .unwrap()?;\n\n Ok(Some(row.try_into()?))\n\n }\n\n\n\n pub fn rand_entry(&mut self) -> Result<Option<Entry>> {\n\n let mut rng = rand::thread_rng();\n\n let range = Uniform::new(0, self.len()?);\n\n self.at(range.sample(&mut rng))\n\n }\n", "file_path": "src/entries.rs", "rank": 22, "score": 19141.39216268463 }, { "content": " #[test_case(44 * 6 + 0 => None)]\n\n #[test_case(44 * 7 + 0 => None)]\n\n #[test_case(44 * 8 + 0 => None)]\n\n fn test_entry_at(pos: u64) -> Option<String> {\n\n let r = Cursor::new(Vec::from(TESTDATA.as_bytes()));\n\n Entries::new(r)\n\n .at(pos)\n\n .unwrap()\n\n .map(|e| e.message().to_owned())\n\n }\n\n\n\n // Test cases for exact date matches on each line.\n\n #[test_case(\"2020-01-01T00:01:00.899849209+00:00\" => Some(\"1\".to_owned()))]\n\n #[test_case(\"2020-02-12T23:08:40.987613062+00:00\" => Some(\"2\".to_owned()))]\n\n #[test_case(\"2020-03-12T00:00:00.000000000+00:00\" => Some(\"3\".to_owned()))]\n\n #[test_case(\"2020-04-12T23:28:45.726598931+00:00\" => Some(\"4\".to_owned()))]\n\n #[test_case(\"2020-05-12T23:28:48.495151445+00:00\" => Some(\"5\".to_owned()))]\n\n #[test_case(\"2020-06-13T10:12:53.353050231+00:00\" => Some(\"6\".to_owned()))]\n\n // Testing dates before and after the dates in the file.\n\n #[test_case(\"2000-01-01T00:01:00.000000000+00:00\" => Some(\"1\".to_owned()))]\n", "file_path": "src/entries.rs", "rank": 23, "score": 19140.487797586546 }, { "content": "\n\n pub fn prev_entry(&mut self) -> Result<Option<Entry>> {\n\n // This seek takes us to the start of the line that was just read. It\n\n // will sometimes be None if we're already at the start of the file but\n\n // that's fine. We don't do this seek if we've previously read past the\n\n // end of the file, so that when we do read past the end of the file we\n\n // can again go back and read the last line.\n\n if self.f.seek(SeekFrom::Current(0))? <= self.len()? {\n\n self.seek_to_prev()?;\n\n }\n\n\n\n // This seek takes us to the actual previous entry. If this one returns None\n\n // it means we're trying to go past the start of the file, and there is no\n\n // previous entry.\n\n if self.seek_to_prev()?.is_none() {\n\n return Ok(None);\n\n }\n\n\n\n self.next_entry()\n\n }\n", "file_path": "src/entries.rs", "rank": 24, "score": 19139.4450460488 }, { "content": " // We need to navigate to the line that is exactly after the line before us that\n\n // is less than the given time.\n\n\n\n // If we're at the end of the file, it means that there are no lines in the file\n\n // that can be less than the given date, so we return with the file cursor at the\n\n // end of the file.\n\n if end >= file_size {\n\n return Ok(());\n\n }\n\n\n\n // We have to move forward one line at first, as we could have exited the binary\n\n // search loop on the entry before the one that we need to return.\n\n self.next_entry()?;\n\n\n\n loop {\n\n match self.prev_entry()? {\n\n None => break,\n\n Some(entry) => {\n\n if entry.datetime() < date {\n\n break;\n", "file_path": "src/entries.rs", "rank": 25, "score": 19137.729572169286 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::Cursor;\n\n use test_case::test_case;\n\n\n\n // Each TESTDATA line is 43 characters long, 44 if you count the newline.\n\n const TESTDATA: &str = \"2020-01-01T00:01:00.899849209+00:00,\\\"\\\"\\\"1\\\"\\\"\\\"\n\n2020-02-12T23:08:40.987613062+00:00,\\\"\\\"\\\"2\\\"\\\"\\\"\n\n2020-03-12T00:00:00.000000000+00:00,\\\"\\\"\\\"3\\\"\\\"\\\"\n\n2020-04-12T23:28:45.726598931+00:00,\\\"\\\"\\\"4\\\"\\\"\\\"\n\n2020-05-12T23:28:48.495151445+00:00,\\\"\\\"\\\"5\\\"\\\"\\\"\n\n2020-06-13T10:12:53.353050231+00:00,\\\"\\\"\\\"6\\\"\\\"\\\"\n\n\";\n\n\n\n // Clippy isn't a big fan of mathematics that can be represented simpler\n\n // or evaluates to zero, but in these tests it helps make clear that we're\n", "file_path": "src/entries.rs", "rank": 26, "score": 19137.43473759072 }, { "content": " assert_eq!(entries.next_entry()?.unwrap().message(), \"1\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"2\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"3\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"4\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"5\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"6\");\n\n assert_eq!(entries.next_entry()?.is_none(), true);\n\n assert_eq!(entries.prev_entry()?.unwrap().message(), \"6\");\n\n assert_eq!(entries.prev_entry()?.unwrap().message(), \"5\");\n\n assert_eq!(entries.prev_entry()?.unwrap().message(), \"4\");\n\n assert_eq!(entries.prev_entry()?.unwrap().message(), \"3\");\n\n assert_eq!(entries.prev_entry()?.unwrap().message(), \"2\");\n\n assert_eq!(entries.prev_entry()?.unwrap().message(), \"1\");\n\n assert_eq!(entries.prev_entry()?.is_none(), true);\n\n assert_eq!(entries.prev_entry()?.is_none(), true);\n\n assert_eq!(entries.prev_entry()?.is_none(), true);\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"1\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"2\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"3\");\n\n assert_eq!(entries.next_entry()?.unwrap().message(), \"4\");\n", "file_path": "src/entries.rs", "rank": 27, "score": 19135.88074370384 }, { "content": "\n\n assert_eq!(entries.next().unwrap().unwrap().message(), \"1\");\n\n assert_eq!(entries.next().unwrap().unwrap().message(), \"2\");\n\n assert_eq!(entries.next().unwrap().unwrap().message(), \"3\");\n\n assert_eq!(entries.next().unwrap().unwrap().message(), \"4\");\n\n assert_eq!(entries.next().unwrap().unwrap().message(), \"5\");\n\n assert_eq!(entries.next().unwrap().unwrap().message(), \"6\");\n\n assert_eq!(entries.next().is_none(), true);\n\n }\n\n}\n", "file_path": "src/entries.rs", "rank": 28, "score": 19135.647030942608 }, { "content": " // searching in to offsets of each line, so we allow it.\n\n #[allow(clippy::identity_op, clippy::erasing_op)]\n\n #[test_case(44 * 0 + 0 => Some(\"1\".to_owned()))]\n\n #[test_case(44 * 0 + 10 => Some(\"1\".to_owned()))]\n\n #[test_case(44 * 0 + 43 => Some(\"1\".to_owned()))]\n\n #[test_case(44 * 1 + 0 => Some(\"2\".to_owned()))]\n\n #[test_case(44 * 1 + 10 => Some(\"2\".to_owned()))]\n\n #[test_case(44 * 1 + 43 => Some(\"2\".to_owned()))]\n\n #[test_case(44 * 2 + 0 => Some(\"3\".to_owned()))]\n\n #[test_case(44 * 2 + 10 => Some(\"3\".to_owned()))]\n\n #[test_case(44 * 2 + 43 => Some(\"3\".to_owned()))]\n\n #[test_case(44 * 3 + 0 => Some(\"4\".to_owned()))]\n\n #[test_case(44 * 3 + 10 => Some(\"4\".to_owned()))]\n\n #[test_case(44 * 3 + 43 => Some(\"4\".to_owned()))]\n\n #[test_case(44 * 4 + 0 => Some(\"5\".to_owned()))]\n\n #[test_case(44 * 4 + 10 => Some(\"5\".to_owned()))]\n\n #[test_case(44 * 4 + 43 => Some(\"5\".to_owned()))]\n\n #[test_case(44 * 5 + 0 => Some(\"6\".to_owned()))]\n\n #[test_case(44 * 5 + 10 => Some(\"6\".to_owned()))]\n\n #[test_case(44 * 5 + 43 => Some(\"6\".to_owned()))]\n", "file_path": "src/entries.rs", "rank": 29, "score": 19133.087399136795 }, { "content": " if cur == 0 {\n\n break;\n\n }\n\n end = cur - 1;\n\n } else {\n\n if cur == file_size {\n\n break;\n\n }\n\n start = cur + 1;\n\n }\n\n }\n\n\n\n // When we exit the binary search loop we know that we're in one of the following\n\n // states:\n\n //\n\n // - We're at the very start of the file.\n\n // - We're at or past the end of the file.\n\n // - We're somewhere in the middle, potentially on the row before the row we\n\n // want to return.\n\n //\n", "file_path": "src/entries.rs", "rank": 30, "score": 19133.087399136795 }, { "content": "### Show a random entry\n\n\n\n hmmq --random\n\n\n\nPrints out a random entry. The randomness comes from selecting a random byte\n\nin your `.hmm` file, and as such longer entries are more likely to be picked.\n\nThis is a trade-off. Picking entries in a truly random fashion would require\n\nreading the entire file, which is against the philosophy of `hmmq`.\n\n\n\n## Formatting entries\n\n\n\n`hmmq` makes use of the [Handlebars][4] templating format to determine how entries\n\nare printed to the terminal. Here's an example of a really simple template:\n\n\n\n hmmq --format \"{{ datetime }}: {{ message }}\"\n\n\n\nIt's not much to look at, but it shows how the templates look and all of the\n\nvariables you have access to inside a template.\n\n\n\n`hmmq` offers some helper functions to make your templates look nicer. Here's\n\nthe default output format specified explicitly:\n\n\n\n hmmq --format $'╭ {{ color \"blue\" (strftime \"%Y-%m-%d %H:%M\" datetime) }}\\n{{ indent (markdown message) }}╰─────────────────\"\n\n\n\nThe keen reader will notice the `$` before the format argument. This is a bash\n\nquirk. Without it, the `\\n` inside the format argument will print literally\n\ninstead of being interpreted as a newline.\n\n\n\n# `hmmp`\n\n\n\nIf you want to use other tools to filter through your `.hmm` file, that's completely\n\nfile and even encouraged. The `hmmp` tool exists to let you pipe filtered `.hmm` file\n\ncontents and have it formatted how you want it.\n\n\n\nThe following two commands are equivalent:\n\n\n\n tail -n 10 ~/.hmm | hmmp\n\n hmmq --last 10\n\n\n\nAs are the following two:\n\n\n\n tail -n 10 ~/.hmm | hmmp --format \"{{ message }}\"\n\n hmmq --last 10 --format \"{{ message }}\"\n\n\n", "file_path": "README.md", "rank": 31, "score": 12170.017520389882 }, { "content": " through a naked invocation to `hmm`, e.g. `hmm I wonder if I could fix...`.\n\n\n\n- **`hmmq`**: the querying binary. Its main function is being able to quickly\n\n find time ranges by taking advantage of the fact a `.hmm` files is always\n\n sorted lexicographically and binary searching. All the other stuff in it is\n\n convenience.\n\n \n\n- **`hmmp`**: the outputting binary. This binary reads from stdin and formats\n\n entries based on a Handlebars template passed in the `--format` flag. There's\n\n some overlap here with `hmmq` as `hmmq` also takes a `--format` flag, but I\n\n didn't want everyone to always have to pipe to `hmmp`. It's a power-user binary\n\n for people who want to do complex slicing and dicing on their `.hmm` files\n\n outside of `hmmq`. Most users will just use `hmmq`.\n\n\n\n- **`hmmdg`**: data generation for benchmarking. It is expected no users will ever\n\n need to run this binary unless they want to reproduce the benchmarks on their\n\n own machines. Useful for developers for seeing how their features measure up\n\n against other features, though.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 32, "score": 12166.480120995797 }, { "content": "# Comparison to `jrnl`\n\n\n\nFeatures `jrnl` has that `hmm` doesn't:\n\n\n\n- Encryption.\n\n- Ability to add entries at arbitrary points in time.\n\n- In-built notion of tags.\n\n- In-built notion of starring.\n\n- Ability to edit entries.\n\n- Ability to parse English dates/times, e.g. \"yesterday\" and \"2 weeks ago.\"\n\n\n\nFeatures `hmm` has that `jrnl` doesn't:\n\n\n\n- Unambigous date-format (RFC3339).\n\n- File-format optimised for searching by time.\n\n- Ability to format entries however you want.\n\n- No external dependencies.\n\n- Lots of flexibility.\n\n\n\nIf you need to add entries at times in the past, or you need encryption, or\n\nyou need your file format to be purely plain text, or you need to edit entries\n\nafter they're written, `hmm` isn't for you. Other than that, I believe `hmm`\n\ncan be made to work exactly how you want it to.\n\n\n\n# Installation\n\n\n\nNo support for Homebrew yet, so Mac users will need to go down the `cargo`\n\nroute, but I plan to get it in to the Homebrew repos soon.\n\n\n\n## Arch Linux (AUR)\n\n\n\n`hmm` is in the AUR, and can be installed with an AUR helper such as `yay`:\n\n\n\n yay -S hmm-bin\n\n\n\n## Using cargo\n\n\n\n[Install Rust][2], then run:\n\n\n\n cargo install hmmcli\n\n\n\nNow the `hmm` and `hmmq` binaries should be available in your terminal.\n\n\n\n## From source\n\n\n\n[Install Rust][2], [install git][3] then run:\n\n\n\n git clone https://github.com/samwho/hmm\n\n cd hmm\n\n cargo install\n\n\n\n# Usage\n\n\n\n`hmm` is split in to three binaries: `hmm`, `hmmq` and `hmmp`.\n\n\n\n- `hmm` is for writing new entries via the CLI.\n\n- `hmmq` is for querying entries by time and content.\n\n- `hmmp` is for printing entries if you want to use tools other than\n\n `hmmq` to query them.\n\n\n\n# `hmm`\n\n\n\n## Writing an entry from the CLI\n\n\n\n hmm hello world\n\n\n\nThis will write an entry to the default `.hmm` file location, which is in\n\nyour home directory.\n\n\n\n## Writing an entry to a different `.hmm` file\n\n\n\nYour `.hmm` file can be located wherever you want, and named whatever you\n\nwant.\n\n\n\n hmm --path ~/.notes hello world\n\n\n", "file_path": "README.md", "rank": 33, "score": 12164.509075765323 }, { "content": "[![Build status](https://github.com/samwho/hmm/workflows/Build/badge.svg)](https://github.com/samwho/hmm/actions)\n\n[![Crates.io](https://img.shields.io/crates/v/hmmcli.svg)](https://crates.io/crates/hmmcli)\n\n\n\n`hmm` is a small command-line note taking app written in Rust. Entries are\n\nwritten in plain text and indexed by the time they were written.\n\n\n\n`hmm` is inspired by [jrnl][1], except with a different use-case in mind.\n\nWhere `jrnl` excels at a journaling use case, where users to can entries\n\nwith arbitrary times and the file format is human-readable, `hmm` only\n\nallows you to add an entry at the current time and has a machine-readable\n\nformat that's optimised for fast time-based querying.\n\n\n\n* [Comparison to jrnl](#comparison-to-jrnl)\n\n* [Installation](#installation)\n\n * [Arch Linux (AUR)](#arch-linux-aur)\n\n * [Using cargo](#using-cargo)\n\n * [From source](#from-source)\n\n* [Usage](#usage)\n\n* [hmm](#hmm)\n\n * [Writing an entry from the CLI](#writing-an-entry-from-the-cli)\n\n * [Writing an entry to a different .hmm file](#writing-an-entry-to-a-different-hmm-file)\n\n * [Writing long-form entries in your EDITOR](#writing-long-form-entries-in-your-editor)\n\n* [hmmq](#hmmq)\n\n * [Listing your entries](#listing-your-entries)\n\n * [Show the most recent 10 entries](#show-the-most-recent-10-entries)\n\n * [Show the frst 10 entries](#show-the-frst-10-entries)\n\n * [Show entries on a specific day](#show-entries-on-a-specific-day)\n\n * [Show entries on a given year](#show-entries-on-a-given-year)\n\n * [Count entries in a given year](#count-entries-in-a-given-year)\n\n * [Show all entries from a given date](#show-all-entries-from-a-given-date)\n\n * [Show a random entry](#show-a-random-entry)\n\n * [Formatting entries](#formatting-entries)\n\n* [hmmp](#hmmp)\n\n* [Benchmarking](#benchmarking)\n\n\n", "file_path": "README.md", "rank": 34, "score": 12164.162499528684 }, { "content": "## Writing long-form entries in your `EDITOR`\n\n\n\n hmm\n\n\n\nInvoked with no arguments, or just a `--path` argument, `hmm` will open your\n\ndefault `EDITOR` to compose an entry. Saving and quitting that editor will\n\nthen write the note to your `.hmm` file. If you don't have an `EDITOR`\n\nconfigured, you can also pass one as a flag:\n\n\n\n hmm --editor vim\n\n\n\nThe editor variable can be arbitrarily complex, the only thing to keep in mind\n\nis that `hmm` will call it with a temporary file as the last argument. It will\n\nread the contents of that temporary file after your editor command exits\n\nsuccessfully. If your editor does not exit successfully, nothing is written to\n\nyour `.hmm` file.\n\n\n\n# `hmmq`\n\n\n\n## Listing your entries\n\n\n\n hmmq\n\n\n\nBy default, this lists all of your entries in a default format in ascending\n\nchronological order. This may not be desired, so there are a bunch of flags\n\nto narrow down what is shown.\n\n\n\n### Show the most recent 10 entries\n\n\n\n hmmq --last 10\n\n\n\n### Show the frst 10 entries\n\n\n\n hmmq --first 10\n\n\n\n### Show entries on a specific day\n\n\n\n hmmq --start 2020-01-01 --end 2020-01-02\n\n\n\nThe `--start` flag is inclusive and the `--end` flag is exclusive, so the\n\nabove command will show all entries that were created on the 1st of January\n\n2020.\n\n\n\nDates follow the RFC3339/ISO8601 format, allowing you to omit parts you don't\n\nneed. All dates are in your local timezone.\n\n\n\n### Show entries on a given year\n\n\n\n hmmq --start 2019 --end 2020\n\n\n\nThis will show all of your entries from 2019.\n\n\n\n### Count entries in a given year\n\n\n\n hmmq --start 2019 --end 2020 --count\n\n\n\nThis will show you how many entries you made in 2019.\n\n\n\n### Show all entries from a given date\n\n\n\n hmmq --start 2020-02-20\n\n\n\nThis will print all of your entries from the 20th of February 2020.\n\n\n", "file_path": "README.md", "rank": 35, "score": 12162.634651283992 }, { "content": "# Contributing\n\n\n\nThanks for showing an interest in `hmm`! This document aims to be a guide for\n\nmaking contributions to `hmm`, including the philosophy behind the tool and\n\nimportant concepts in the code.\n\n\n\n## Glossary\n\n\n\n- **`.hmm` file**: any file that contains `hmm` entries, referred to as a `.hmm`\n\n file because the default file name is `.hmm`.\n\n- **entry**: an entry is a line in a `.hmm` file. Entries are represented as CSV\n\n with 2 columns: an RFC3339 datetime and a JSON encoded string message. The\n\n messages are JSON encoded in order to make them single lines.\n\n\n\n## Philosophy\n\n\n\n`hmm` is for jotting down in-the-moment thoughts while you're at your terminal.\n\nIf you're running a build or some tests and have a few minutes, talk a little\n\nabout the problem you're working on. Leave tips to future you. Vent. We all have\n\nthose moments where we're at, cursor in terminal, waiting for something to happen,\n\nand in those moments I like to write down a thought. That's what `hmm` is about.\n\n\n\n### Multiple binaries\n\n\n\nI decided early on that I don't want one binary that handles everything through a \n\ndizzying array of flags. I want single-purpose binaries that take only the flags\n\nthat are relevant to them, no more no less.\n\n\n\nIf you're adding functionality you want to expose to a user and it doesn't neatly\n\nfit in to an existing binary, you shouldn't feel afraid adding a new binary.\n\n\n\nHere are the existing binaries and what they are for:\n\n\n\n- **`hmm`**: anything to do with composing entries should go in here, but beware\n\n that it should always be possible for a user to compose a plain text entry\n", "file_path": "CONTRIBUTING.md", "rank": 36, "score": 12162.211380154473 }, { "content": "### Cross platform\n\n\n\n`hmm` works on Linux, Mac and Windows and that's how it's always going to be.\n\n\n\n## Formatting\n\n\n\nAll code should be formatted with `cargo fmt`. There is a check for this in\n\nCI so you can't submit a PR if your change hasn't been `cargo fmt`ed.\n\n\n\n rustup component add rustfmt\n\n cargo fmt\n\n\n\n## Lint\n\n\n\nAll code should have no warnings from `clippy`, Rust's beloved linter. Again,\n\nthere is a CI check for this.\n\n\n\n rustup component add clippy\n\n cargo clippy -- -D warnings\n\n\n\n## Testing\n\n\n\nAll code should be tested. I know, it doesn't have 100% coverage at the\n\nmoment, but that's no excuse for new code.\n\n\n\nWe have both unit tests and integration tests. Unit tests live inside of\n\nthe module being tested, integration tests live inside the binaries being\n\ntested. Have a look at existing tests to get an idea of what's required.\n\n\n\n## Git/Github Workflow\n\n\n\nThis is our preferred process for opening a PR on GitHub:\n\n\n\n- Fork this repository\n\n- Create a branch off of `master` for your work: `git checkout -b my-feature-branch`\n\n- Make some changes, committing them along the way\n\n- When your changes are ready for review, push your branch: `git push origin my-feature-branch`\n\n- Create a pull request from your branch to `hmm/master`\n\n- No need to assign the pull request to anyone, we'll review it when we can\n\n- When the changes have been reviewed and approved, someone will squash and merge for you\n", "file_path": "CONTRIBUTING.md", "rank": 37, "score": 12159.423034445652 }, { "content": "# Benchmarking\n\n\n\nThere's a script in the repository root called `bench.sh` that shows the methodology\n\nbehind the following table if you're interested.\n\n\n\n| Command | Mean [ms] | Min [ms] | Max [ms] | Relative |\n\n|:---|---:|---:|---:|---:|\n\n| `target/release/hmmq --path /tmp/out --random` | 13.5 ± 0.8 | 11.9 | 15.4 | 1.00 |\n\n| `target/release/hmmq --path /tmp/out --last 10` | 15.0 ± 0.8 | 12.8 | 17.1 | 1.11 ± 0.09 |\n\n| `target/release/hmmq --path /tmp/out --first 10` | 13.6 ± 1.0 | 9.0 | 16.2 | 1.01 ± 0.09 |\n\n| `target/release/hmmq --path /tmp/out --start 2019 --first 10` | 16.8 ± 0.8 | 15.3 | 19.2 | 1.24 ± 0.09 |\n\n| `target/release/hmmq --path /tmp/out --end 2019 --last 10` | 18.8 ± 0.9 | 16.4 | 21.4 | 1.40 ± 0.10 |\n\n| `target/release/hmmq --path /tmp/out --start 2019-01 --end 2019-02` | 325.6 ± 11.9 | 309.9 | 379.9 | 24.11 ± 1.65 |\n\n| `target/release/hmmq --path /tmp/out --start 2019 --end 2020 --count` | 346.6 ± 13.6 | 336.7 | 427.6 | 25.67 ± 1.79 |\n\n| `target/release/hmmq --path /tmp/out --start 2019-01 --end 2019-06 --contains lorum` | 232.3 ± 5.2 | 226.4 | 262.7 | 17.21 ± 1.07 |\n\n| `target/release/hmmq --path /tmp/out --start 2019 --end 2020 --regex \"(lorum\\|ipsum)\"` | 565.3 ± 13.3 | 548.1 | 622.1 | 41.87 ± 2.62 |\n\n\n\n[1]: https://jrnl.sh/\n\n[2]: https://rustup.rs/\n\n[3]: https://git-scm.com/book/en/v2/Getting-Started-Installing-Git\n\n[4]: https://handlebarsjs.com/\n", "file_path": "README.md", "rank": 38, "score": 12158.518744256577 }, { "content": " fn from(err: handlebars::RenderError) -> Error {\n\n Error::Render(err)\n\n }\n\n}\n\n\n\nimpl From<handlebars::TemplateRenderError> for Error {\n\n fn from(err: handlebars::TemplateRenderError) -> Error {\n\n Error::TemplateRender(err)\n\n }\n\n}\n\n\n\nimpl From<handlebars::TemplateError> for Error {\n\n fn from(err: handlebars::TemplateError) -> Error {\n\n Error::Template(err)\n\n }\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(err: io::Error) -> Error {\n\n Error::Io(err)\n", "file_path": "src/error.rs", "rank": 42, "score": 11.73436653740282 }, { "content": " fn cause(&self) -> Option<&dyn error::Error> {\n\n match *self {\n\n Error::Io(ref err) => Some(err),\n\n Error::Csv(ref err) => Some(err),\n\n Error::QuickCsv(ref err) => Some(err),\n\n Error::ChronoParse(ref err) => Some(err),\n\n Error::SerdeJson(ref err) => Some(err),\n\n Error::Template(ref err) => Some(err),\n\n Error::TemplateRender(ref err) => Some(err),\n\n Error::Render(ref err) => Some(err),\n\n Error::Utf8(ref err) => Some(err),\n\n Error::Regex(ref err) => Some(err),\n\n Error::String(_) => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n", "file_path": "src/error.rs", "rank": 43, "score": 10.62214543586593 }, { "content": "\n\nimpl From<String> for Error {\n\n fn from(s: String) -> Error {\n\n Error::String(s)\n\n }\n\n}\n\n\n\nimpl From<regex::Error> for Error {\n\n fn from(err: regex::Error) -> Error {\n\n Error::Regex(err)\n\n }\n\n}\n\n\n\nimpl From<std::string::FromUtf8Error> for Error {\n\n fn from(err: std::string::FromUtf8Error) -> Error {\n\n Error::Utf8(err)\n\n }\n\n}\n\n\n\nimpl From<handlebars::RenderError> for Error {\n", "file_path": "src/error.rs", "rank": 44, "score": 9.429909671553657 }, { "content": " Error::Io(ref err) => err.fmt(f),\n\n Error::Csv(ref err) => err.fmt(f),\n\n Error::QuickCsv(ref err) => err.fmt(f),\n\n Error::ChronoParse(ref err) => err.fmt(f),\n\n Error::SerdeJson(ref err) => err.fmt(f),\n\n Error::Template(ref err) => err.fmt(f),\n\n Error::TemplateRender(ref err) => err.fmt(f),\n\n Error::Render(ref err) => err.fmt(f),\n\n Error::Utf8(ref err) => err.fmt(f),\n\n Error::Regex(ref err) => err.fmt(f),\n\n Error::String(ref s) => f.write_str(s),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&str> for Error {\n\n fn from(s: &str) -> Error {\n\n Error::String(s.to_owned())\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 45, "score": 9.395762271487676 }, { "content": " // of the line and can return the position we just read.\n\n if buf[0] == 0x0a {\n\n return Ok(pos + 1);\n\n }\n\n }\n\n\n\n // We haven't reached the start of the line, so we go back a byte and\n\n // start the loop again.\n\n pos -= 1;\n\n f.seek(SeekFrom::Start(pos))?;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::{BufRead, Cursor, Seek, SeekFrom};\n\n use test_case::test_case;\n\n\n\n fn read_line(r: &mut impl BufRead) -> Result<String> {\n", "file_path": "src/seek.rs", "rank": 46, "score": 8.695284874531765 }, { "content": "pub mod entries;\n\npub mod entry;\n\npub mod error;\n\npub mod format;\n\npub mod seek;\n\n\n\npub type Result<T> = std::result::Result<T, error::Error>;\n", "file_path": "src/lib.rs", "rank": 47, "score": 8.113294429976385 }, { "content": " let mut buf = String::new();\n\n r.read_line(&mut buf)?;\n\n Ok(buf)\n\n }\n\n\n\n #[test_case(\"\", 0 => \"\" ; \"empty file\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 0 => \"line 1\\n\" ; \"start of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 3 => \"line 1\\n\" ; \"middle of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 6 => \"line 1\\n\" ; \"end of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 7 => \"line 2\\n\" ; \"start of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 12 => \"line 2\\n\" ; \"middle of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 13 => \"line 2\\n\" ; \"end of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 14 => \"line 3\" ; \"start of third line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 15 => \"line 3\" ; \"middle of third line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 19 => \"line 3\" ; \"end of third line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 26 => \"line 3\" ; \"past eof\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\\n\", 20 => \"line 3\\n\" ; \"last line when line ends with eof\")]\n\n fn test_start_of_current_line(s: &str, pos: u64) -> String {\n\n let mut r = Cursor::new(s.as_bytes());\n\n r.seek(SeekFrom::Start(pos)).unwrap();\n", "file_path": "src/seek.rs", "rank": 48, "score": 5.689361099945547 }, { "content": "use super::Result;\n\nuse std::io::{ErrorKind, Read, Seek, SeekFrom};\n\n\n", "file_path": "src/seek.rs", "rank": 49, "score": 5.121550733355976 }, { "content": " start_of_current_line(&mut r).unwrap();\n\n read_line(&mut r).unwrap()\n\n }\n\n\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 0 => Some(7) ; \"start of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 2 => Some(7) ; \"middle of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 6 => Some(7) ; \"end of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 7 => Some(14) ; \"start of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 9 => Some(14) ; \"middle of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 13 => Some(14) ; \"end of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 14 => None ; \"start of last line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 16 => None ; \"middle of last line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 19 => None ; \"end of last line\")]\n\n fn test_start_of_next_line(s: &str, pos: u64) -> Option<u64> {\n\n let mut r = Cursor::new(s.as_bytes());\n\n r.seek(SeekFrom::Start(pos)).unwrap();\n\n start_of_next_line(&mut r).unwrap()\n\n }\n\n\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 0 => None ; \"start of first line\")]\n", "file_path": "src/seek.rs", "rank": 50, "score": 4.163203219593536 }, { "content": " #[test_case(\"line 1\\nline 2\\nline 3\", 2 => None ; \"middle of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 1 => None ; \"second letter of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 6 => None ; \"end of first line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 7 => Some(0) ; \"start of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 9 => Some(0) ; \"middle of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 13 => Some(0) ; \"end of second line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 14 => Some(7) ; \"start of last line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 16 => Some(7) ; \"middle of last line\")]\n\n #[test_case(\"line 1\\nline 2\\nline 3\", 19 => Some(7) ; \"end of last line\")]\n\n fn test_start_of_prev_line(s: &str, pos: u64) -> Option<u64> {\n\n let mut r = Cursor::new(s.as_bytes());\n\n r.seek(SeekFrom::Start(pos)).unwrap();\n\n start_of_prev_line(&mut r).unwrap()\n\n }\n\n}\n", "file_path": "src/seek.rs", "rank": 51, "score": 3.9187758653458395 }, { "content": " }\n\n}\n\n\n\nimpl From<csv::Error> for Error {\n\n fn from(err: csv::Error) -> Error {\n\n Error::Csv(err)\n\n }\n\n}\n\n\n\nimpl From<quick_csv::error::Error> for Error {\n\n fn from(err: quick_csv::error::Error) -> Error {\n\n Error::QuickCsv(err)\n\n }\n\n}\n\n\n\nimpl From<serde_json::error::Error> for Error {\n\n fn from(err: serde_json::error::Error) -> Error {\n\n Error::SerdeJson(err)\n\n }\n\n}\n\n\n\nimpl From<chrono::format::ParseError> for Error {\n\n fn from(err: chrono::format::ParseError) -> Error {\n\n Error::ChronoParse(err)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 52, "score": 2.634299536702843 }, { "content": "use std::{error, fmt, io};\n\n\n", "file_path": "src/error.rs", "rank": 53, "score": 2.5925467970185485 }, { "content": " pos -= 1;\n\n } else {\n\n f.seek(SeekFrom::Start(pos))?;\n\n }\n\n\n\n loop {\n\n // If we're at the start we are by definition at the start of the line,\n\n // so just rewind the single-byte read we just did and return a 0\n\n // position.\n\n if pos == 0 {\n\n f.seek(SeekFrom::Start(0))?;\n\n return Ok(pos);\n\n }\n\n\n\n if let Err(e) = f.read_exact(&mut buf) {\n\n if e.kind() != ErrorKind::UnexpectedEof {\n\n return Err(e.into());\n\n }\n\n } else {\n\n // If we've read a newline character (0x0a), we've reached the start\n", "file_path": "src/seek.rs", "rank": 54, "score": 1.602557101146423 } ]
Rust
src/jdk.rs
jht5945/buildj
26de9ddcc6dcd5c956da2f592d55bb504fe1a8f5
use std::{collections::HashMap, env, fs, str, path::Path, process::Command}; use rust_util::util_os; use rust_util::util_env; use crate::{local_util, tool, misc::VERBOSE}; use plist::Value; const PATH: &str = "PATH"; const JAVA_HOME: &str = "JAVA_HOME"; const OPENJDK_MACOS: &str = "openjdk-osx"; const JDK_LINUX: &str = "jdk-linux"; const OPENJDK_LINUX: &str = "openjdk-linux"; const MACOS_LIBEXEC_JAVAHOME: &str = "/usr/libexec/java_home"; pub const LOCAL_JAVA_HOME_BASE_DIR: &str = ".jssp/jdks"; lazy_static! { pub static ref BUILDJ_JAVA_NAME: Option<String> = env::var("BUILDJ_JAVA_NAME").ok(); } pub fn get_java_home(version: &str) -> Option<String> { match get_macos_java_home(version) { Some(j) => Some(j), None => match get_local_java_home(version) { Some(j) => Some(j), None => iff!(get_cloud_java(version), get_local_java_home(version), None), }, } } pub fn get_cloud_java(version: &str) -> bool { if !util_os::is_macos_or_linux() { return false; } let cloud_java_names = match &*BUILDJ_JAVA_NAME { None => if util_os::is_macos() { vec![OPENJDK_MACOS] } else if util_os::is_linux() { vec![JDK_LINUX, OPENJDK_LINUX] } else { vec![] }, Some(buildj_java_name) => vec![buildj_java_name.as_str()], }; let local_java_home_base_dir = match local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR) { Ok(o) => o, Err(_) => return false, }; for cloud_java_name in cloud_java_names { if tool::get_and_extract_tool_package(&local_java_home_base_dir, false, cloud_java_name, version, false).is_ok() { return true; } } failure!("Get java failed, version: {}", version); false } pub fn get_macos_java_home(version: &str) -> Option<String> { if !util_os::is_macos() || util_env::is_env_on("SKIP_CHECK_JAVA_HOME") { return None; } let java_home_x = Command::new(MACOS_LIBEXEC_JAVAHOME).arg("-x").output().ok()?; let java_home_plist_value = match Value::from_reader_xml(&*java_home_x.stdout) { Err(e) => { debugging!("Parse java_home outputs failed: {}", e); return None; } Ok(val) => val, }; let java_home_plist_value_array = match java_home_plist_value.as_array() { None => { debugging!("Covert java_home plist output to array failed: {:?}", java_home_plist_value); return None; } Some(val) => val, }; for java_home_plist_item in java_home_plist_value_array { debugging!("Checking: {:?}", java_home_plist_item); if let Some(jvm_item) = java_home_plist_item.as_dictionary() { let jvm_version_value = jvm_item.get("JVMVersion"); let jvm_home_path_value = jvm_item.get("JVMHomePath"); if let (Some(Value::String(jvm_version)), Some(Value::String(jvm_path))) = (jvm_version_value, jvm_home_path_value) { debugging!("Check version: {} vs {}", jvm_version, version); if jvm_version.starts_with(version) { debugging!("Check version success: {} -> {}", jvm_version, jvm_path); return Some(jvm_path.into()); } } } } None } pub fn get_local_java_home(version: &str) -> Option<String> { let local_java_home_base_dir = local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR).ok()?; let paths = fs::read_dir(Path::new(&local_java_home_base_dir)).ok()?; for path in paths { if let Ok(dir_entry) = path { if let Some(p) = dir_entry.path().to_str() { if *VERBOSE { debugging!("Try match path: {}", p); } let mut path_name = p; if p.ends_with('/') { path_name = &path_name[..path_name.len() - 1] } if let Some(i) = path_name.rfind('/') { path_name = &path_name[i + 1..]; } let matched_path_opt = if (path_name.starts_with("jdk-") && (&path_name[4..]).starts_with(version)) || (path_name.starts_with("jdk") && (&path_name[3..]).starts_with(version)) { Some(p) } else { None }; if let Some(matched_path) = matched_path_opt { if *VERBOSE { debugging!("Matched JDK path found: {}", matched_path); } return if local_util::is_path_exists(matched_path, "Contents/Home") { Some(format!("{}/{}", matched_path, "Contents/Home")) } else { Some(matched_path.to_string()) }; } } } } None } pub fn extract_jdk_and_wait(file_name: &str) { if let Ok(local_java_home_base_dir) = local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR) { local_util::extract_package_and_wait(&local_java_home_base_dir, file_name).unwrap_or_else(|err| { failure!("Extract file: {}, failed: {}", file_name, err); }); } } pub fn get_env() -> HashMap<String, String> { let mut new_env: HashMap<String, String> = HashMap::new(); for (key, value) in env::vars() { new_env.insert(key, value); } new_env } pub fn get_env_with_java_home(java_home: &str) -> HashMap<String, String> { let mut new_env: HashMap<String, String> = HashMap::new(); for (key, value) in env::vars() { let key_str = key.as_str(); if JAVA_HOME == key_str { } else if PATH == key_str { let path = value.to_string(); let new_path = format!("{}/bin:{}", java_home, path); new_env.insert(PATH.to_string(), new_path); } else { new_env.insert(key, value); } } new_env.insert(JAVA_HOME.to_string(), java_home.to_string()); new_env }
use std::{collections::HashMap, env, fs, str, path::Path, process::Command}; use rust_util::util_os; use rust_util::util_env; use crate::{local_util, tool, misc::VERBOSE}; use plist::Value; const PATH: &str = "PATH"; const JAVA_HOME: &str = "JAVA_HOME"; const OPENJDK_MACOS: &str = "openjdk-osx"; const JDK_LINUX: &str = "jdk-linux"; const OPENJDK_LINUX: &str = "openjdk-linux"; const MACOS_LIBEXEC_JAVAHOME: &str = "/usr/libexec/java_home"; pub const LOCAL_JAVA_HOME_BASE_DIR: &str = ".jssp/jdks"; lazy_static! { pub static ref BUILDJ_JAVA_NAME: Option<String> = env::var("BUILDJ_JAVA_NAME").ok(); } pub fn get_java_home(version: &str) -> Option<String> { match get_macos_java_home(version) { Some(j) => Some(j), None => match get_local_java_home(version) { Some(j) => Some(j), None => iff!(get_cloud_java(version), get_local_java_home(version), None), }, } } pub fn get_cloud_java(version: &str) -> bool { if !util_os::is_macos_or_linux() { return false; } let cloud_java_names = match &*BUILDJ_JAVA_NAME { None => if util_os::is_macos() { vec![OPENJDK_MACOS] } else if util_os::is_linux() { vec![JDK_LINUX, OPENJDK_LINUX] } else { vec![] }, Some(buildj_java_name) => vec![buildj_java_name.as_str()], }; let local_java_home_base_dir = match local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR) { Ok(o) => o, Err(_) => return false, }; for cloud_java_name in cloud_java_names { if tool::get_and_extract_tool_package(&local_java_home_base_dir, false, cloud_java_name, version, false).is_ok() { return true; } } failure!("Get java failed, version: {}", version); false } pub fn get_macos_java_home(version: &str) -> Option<String> { if !util_os::is_macos() || util_env::is_env_on("SKIP_CHECK_JAVA_HOME") { return None; } let java_home_x = Command::new(MACOS_LIBEXEC_JAVAHOME).arg("-x").output().ok()?; let java_home_plist_value = mat
{ let mut new_env: HashMap<String, String> = HashMap::new(); for (key, value) in env::vars() { new_env.insert(key, value); } new_env } pub fn get_env_with_java_home(java_home: &str) -> HashMap<String, String> { let mut new_env: HashMap<String, String> = HashMap::new(); for (key, value) in env::vars() { let key_str = key.as_str(); if JAVA_HOME == key_str { } else if PATH == key_str { let path = value.to_string(); let new_path = format!("{}/bin:{}", java_home, path); new_env.insert(PATH.to_string(), new_path); } else { new_env.insert(key, value); } } new_env.insert(JAVA_HOME.to_string(), java_home.to_string()); new_env }
ch Value::from_reader_xml(&*java_home_x.stdout) { Err(e) => { debugging!("Parse java_home outputs failed: {}", e); return None; } Ok(val) => val, }; let java_home_plist_value_array = match java_home_plist_value.as_array() { None => { debugging!("Covert java_home plist output to array failed: {:?}", java_home_plist_value); return None; } Some(val) => val, }; for java_home_plist_item in java_home_plist_value_array { debugging!("Checking: {:?}", java_home_plist_item); if let Some(jvm_item) = java_home_plist_item.as_dictionary() { let jvm_version_value = jvm_item.get("JVMVersion"); let jvm_home_path_value = jvm_item.get("JVMHomePath"); if let (Some(Value::String(jvm_version)), Some(Value::String(jvm_path))) = (jvm_version_value, jvm_home_path_value) { debugging!("Check version: {} vs {}", jvm_version, version); if jvm_version.starts_with(version) { debugging!("Check version success: {} -> {}", jvm_version, jvm_path); return Some(jvm_path.into()); } } } } None } pub fn get_local_java_home(version: &str) -> Option<String> { let local_java_home_base_dir = local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR).ok()?; let paths = fs::read_dir(Path::new(&local_java_home_base_dir)).ok()?; for path in paths { if let Ok(dir_entry) = path { if let Some(p) = dir_entry.path().to_str() { if *VERBOSE { debugging!("Try match path: {}", p); } let mut path_name = p; if p.ends_with('/') { path_name = &path_name[..path_name.len() - 1] } if let Some(i) = path_name.rfind('/') { path_name = &path_name[i + 1..]; } let matched_path_opt = if (path_name.starts_with("jdk-") && (&path_name[4..]).starts_with(version)) || (path_name.starts_with("jdk") && (&path_name[3..]).starts_with(version)) { Some(p) } else { None }; if let Some(matched_path) = matched_path_opt { if *VERBOSE { debugging!("Matched JDK path found: {}", matched_path); } return if local_util::is_path_exists(matched_path, "Contents/Home") { Some(format!("{}/{}", matched_path, "Contents/Home")) } else { Some(matched_path.to_string()) }; } } } } None } pub fn extract_jdk_and_wait(file_name: &str) { if let Ok(local_java_home_base_dir) = local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR) { local_util::extract_package_and_wait(&local_java_home_base_dir, file_name).unwrap_or_else(|err| { failure!("Extract file: {}, failed: {}", file_name, err); }); } } pub fn get_env() -> HashMap<String, String>
random
[ { "content": "pub fn get_and_extract_tool_package(base_dir: &str, dir_with_name: bool, name: &str, version: &str, extract_match: bool) -> XResult<bool> {\n\n let tool_package_detail = get_tool_package_detail(name, version)?;\n\n let build_json_object = json::parse(&tool_package_detail)?;\n\n if *VERBOSE {\n\n debugging!(\"Get tool {}:{}, result JSON: {}\", name, version, json::stringify_pretty(build_json_object.clone(), 4));\n\n }\n\n if build_json_object[\"status\"] != 200 {\n\n return simple_error!(\"Error in get tool package detail: {}\", build_json_object[\"message\"]);\n\n }\n\n let data = &build_json_object[\"data\"];\n\n let integrity = &data[\"integrity\"];\n\n let url = &data[\"url\"];\n\n let name = &data[\"name\"];\n\n if integrity.is_null() || url.is_null() || name.is_null() {\n\n return simple_error!(\"Parse tool package detail failed: {}\", tool_package_detail);\n\n }\n\n let n = data[\"n\"].to_string();\n\n let v = data[\"v\"].to_string();\n\n\n\n if extract_match && version != v {\n", "file_path": "src/tool.rs", "rank": 0, "score": 199867.8249955572 }, { "content": "pub fn get_cloud_builder(builder: &str, version: &str) -> bool {\n\n if ! util_os::is_macos_or_linux() {\n\n return false;\n\n }\n\n let local_builder_home_base_dir = match local_util::get_user_home_dir(LOCAL_BUILDER_HOME_BASE_DIR) {\n\n Ok(o) => o, Err(_) => return false,\n\n };\n\n match get_and_extract_tool_package(&local_builder_home_base_dir, true, builder, version, true) {\n\n Ok(_) => true, Err(err) => {\n\n failure!(\"Get builder: {} failed, version: {}, error: {}\", builder, version, err);\n\n false\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/tool.rs", "rank": 2, "score": 195001.20879741423 }, { "content": "pub fn is_path_exists(dir: &str, sub_dir: &str) -> bool {\n\n let full_path = &format!(\"{}/{}\", dir, sub_dir);\n\n Path::new(full_path).exists()\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 3, "score": 154136.99868794318 }, { "content": "pub fn get_tool_package_detail(name: &str, version: &str) -> XResult<String> {\n\n let secret: Option<String> = if *NOAUTH {\n\n warning!(\"Running in no auth mode!\");\n\n None\n\n } else {\n\n match get_tool_package_secret() {\n\n Ok(r) => Some(r), Err(err) => {\n\n warning!(\"Get package detail secret failed: {}, from file: ~/{}\", err, STANDARD_CONFIG_JSON);\n\n None\n\n },\n\n }\n\n };\n\n \n\n let mut url = String::with_capacity(1024);\n\n match secret {\n\n None => {\n\n url.push_str(TOOL_PACKAGE_DETAIL_URL_WITHOUT_AUTH);\n\n url.push_str(\"?\");\n\n },\n\n Some(secret) => {\n", "file_path": "src/tool.rs", "rank": 4, "score": 153145.84799829742 }, { "content": "pub fn get_builder_home(builder: &str, version: &str) -> Option<BuilderDesc> {\n\n let local_builder_home_base_dir = match local_util::get_user_home_dir(LOCAL_BUILDER_HOME_BASE_DIR) {\n\n Ok(o) => o, Err(_) => return None,\n\n };\n\n let builder_name = match builder {\n\n \"maven\" => BuilderName::Maven,\n\n \"gradle\" => BuilderName::Gradle,\n\n _ => {\n\n failure!(\"Unknown builder: {}\", builder);\n\n return None;\n\n },\n\n };\n\n let local_builder_home_dir = &format!(\"{}/{}-{}\", local_builder_home_base_dir, builder, version);\n\n\n\n if Path::new(local_builder_home_dir).exists() || get_cloud_builder(builder, version) {\n\n get_local_builder_home_sub(builder_name, local_builder_home_dir)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/tool.rs", "rank": 6, "score": 148272.50563209155 }, { "content": "pub fn get_short_git_hash() -> Option<&'static str> {\n\n get_full_git_hash().map(|h| &h[0..7])\n\n}\n", "file_path": "src/misc.rs", "rank": 9, "score": 132207.4867251108 }, { "content": "pub fn get_full_git_hash() -> Option<&'static str> {\n\n // build from crates, git hash is empty\n\n iff!(GIT_HASH.is_empty(), None, Some(GIT_HASH))\n\n}\n\n\n", "file_path": "src/misc.rs", "rank": 10, "score": 132207.4867251108 }, { "content": "pub fn verify_file_integrity(integrity: &str, file_name: &str) -> XResult<bool> {\n\n match integrity.find('-') {\n\n None => simple_error!(\"Not supported integrigty: {}\", integrity),\n\n Some(index) => {\n\n let digest_hex = &integrity[index+1..];\n\n let calc_digest_hex = match &integrity[0..index] {\n\n \"sha256:hex\" => calc_file_digest(&mut Sha256::new(), \"SHA256\", file_name)?,\n\n \"sha512:hex\" => calc_file_digest(&mut Sha512::new(), \"SHA512\", file_name)?,\n\n \"sha1:hex\" => calc_file_digest(&mut Sha1::new(), \"SHA1\", file_name)?,\n\n \"md5:hex\" => calc_file_digest(&mut Md5::new(), \"MD5\", file_name)?,\n\n _ => return simple_error!(\"Not supported integrigty: {}\", integrity),\n\n };\n\n let integrity_verify_result = digest_hex == calc_digest_hex.as_str();\n\n if ! integrity_verify_result {\n\n failure!(\"Verify integrity failed, expected: {}, actual: {}\", digest_hex, calc_digest_hex);\n\n }\n\n Ok(integrity_verify_result)\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 11, "score": 126279.5588880211 }, { "content": "pub fn set_tool_package_secret(secret: &str) -> XResult<()> {\n\n let standard_config_file = local_util::get_user_home_dir(STANDARD_CONFIG_JSON)?;\n\n\n\n match fs::metadata(&standard_config_file) {\n\n Err(_) => {\n\n match fs::write(&standard_config_file, json::stringify_pretty(\n\n object!{ \"build.js\" => object!{\n\n \"auth_token\" => secret, }\n\n }, 4)) {\n\n Ok(_) => Ok(()),\n\n Err(err) => simple_error!(\"Write config failed: {}, error message: {}\", standard_config_file, err),\n\n }\n\n },\n\n Ok(f) => {\n\n if ! f.is_file() {\n\n return simple_error!(\"Config is not a file: {}\", standard_config_file);\n\n }\n\n let standard_config_json = fs::read_to_string(&standard_config_file)?;\n\n let mut standard_config_object = json::parse(&standard_config_json)?;\n\n if standard_config_object[\"build.js\"].is_null() {\n", "file_path": "src/tool.rs", "rank": 13, "score": 115018.59946854375 }, { "content": "pub fn get_archive_version(gid: &str, aid: &str) -> XResult<String> {\n\n if *VERBOSE {\n\n debugging!(\"Start get archive info: {}:{}\", gid, aid);\n\n }\n\n let mut url = String::with_capacity(1024);\n\n url.push_str(GET_ARCHIVER_VERSION_URL);\n\n url.push_str(\"?gid=\");\n\n url.push_str(&urlencoding::encode(gid));\n\n url.push_str(\"&aid=\");\n\n url.push_str(&urlencoding::encode(aid));\n\n let version_result = get_url_content(url.as_str())?;\n\n if *VERBOSE {\n\n debugging!(\"Get archive result: {}\", version_result);\n\n }\n\n let version_result_object = json::parse(&version_result)?;\n\n if version_result_object[\"status\"] != 200 {\n\n simple_error!(\"Get archive info version failed: {}\", version_result)\n\n } else {\n\n Ok(version_result_object[\"data\"].to_string())\n\n }\n\n}\n\n\n", "file_path": "src/build_json.rs", "rank": 14, "score": 109565.04953252629 }, { "content": "pub fn get_args_as_vec() -> Vec<String> {\n\n env::args().collect::<Vec<String>>()\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 15, "score": 107320.4378329521 }, { "content": "pub fn is_buildin_args(args: &[String]) -> bool {\n\n args.get(1)\n\n .map(|arg| arg.starts_with(\":::\") || arg.starts_with(\"...\"))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 16, "score": 97241.10778751218 }, { "content": "pub fn init_dir(dir: &str) {\n\n if ! Path::new(dir).exists() {\n\n fs::create_dir_all(dir).unwrap_or_else(|err| {\n\n failure!(\"Init dir {} failed: {}\", dir, err);\n\n });\n\n }\n\n}\n", "file_path": "src/local_util.rs", "rank": 17, "score": 94883.51851405602 }, { "content": "pub fn get_local_builder_home_sub_first_sub_dir(local_builder_home_dir: &str) -> Option<String> {\n\n let paths = fs::read_dir(Path::new(&local_builder_home_dir)).ok()?;\n\n for path in paths {\n\n if let Ok(p) = path {\n\n if p.path().is_dir() {\n\n return Some(p.path().to_str()?.to_string());\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/tool.rs", "rank": 18, "score": 93790.73753542744 }, { "content": "pub fn extract_package_and_wait(dir: &str, file_name: &str) -> XResult<()> {\n\n let mut cmd: Command;\n\n if file_name.ends_with(\".zip\") {\n\n cmd = Command::new(\"unzip\");\n\n } else if file_name.ends_with(\".tar.gz\") {\n\n cmd = Command::new(\"tar\");\n\n cmd.arg(\"-xzvf\");\n\n } else {\n\n return simple_error!(\"Unknown file type: {}\", file_name);\n\n }\n\n cmd.arg(file_name).current_dir(dir).spawn()?.wait()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 20, "score": 92399.18562117837 }, { "content": "pub fn init_home_dir(home_sub_dir: &str) {\n\n if let Ok(user_home_dir) = get_user_home_dir(home_sub_dir) {\n\n init_dir(&user_home_dir);\n\n }\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 21, "score": 89787.411586111 }, { "content": "pub fn get_local_builder_home_sub(builder_name: BuilderName, local_builder_home_dir: &str) -> Option<BuilderDesc> {\n\n match get_local_builder_home_sub_first_sub_dir(local_builder_home_dir) {\n\n None => {\n\n failure!(\"Cannot find builder home in: {}\", local_builder_home_dir);\n\n None\n\n },\n\n Some(p) => Some(BuilderDesc{name: builder_name, home: p, bin: None}),\n\n }\n\n}\n\n\n", "file_path": "src/tool.rs", "rank": 22, "score": 88059.19326846318 }, { "content": "pub fn print_version() {\n\n println!(r#\"buildj {}{}{}\n\nBuild date: {}\n\n\n\nCopyright (C) 2019-{} Hatter Jiang.\n\nLicense MIT <{}https://opensource.org/licenses/MIT{}>\n\n\n\nOfficial website: {}https://buildj.ruststack.org/{}\n\n\"#, BUDERJ_VER,\n\n get_short_git_hash().map(|h| format!(\" - {}\", h)).unwrap_or(\"\".into()),\n\n get_full_git_hash().map(|h| format!(\"\\nFull git commit hash: {}{}{}\", util_term::BOLD, h, util_term::END)).unwrap_or(\"\".into()),\n\n BUILD_DATE,\n\n *BUILD_YEAR,\n\n util_term::UNDER, util_term::END,\n\n util_term::UNDER, util_term::END);\n\n}\n\n\n", "file_path": "src/misc.rs", "rank": 23, "score": 87555.42292634414 }, { "content": "pub fn get_url_content(url: &str) -> XResult<String> {\n\n if *VERBOSE {\n\n debugging!(\"Get URL: {}\", url);\n\n }\n\n Ok(reqwest::blocking::get(url)?.text()?)\n\n}\n", "file_path": "src/http.rs", "rank": 24, "score": 86241.0388065227 }, { "content": "pub fn get_user_home_dir(dir: &str) -> XResult<String> {\n\n Ok(format!(\"{}/{}\", get_user_home()?, dir))\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 25, "score": 83319.09121740086 }, { "content": "pub fn get_tool_package_secret() -> XResult<String> {\n\n if (*AUTH_TOKEN).is_some() {\n\n if *VERBOSE {\n\n debugging!(\"Use auth token from env 'BUILDJ_AUTH_TOKEN'\");\n\n }\n\n return Ok((*AUTH_TOKEN).as_ref().unwrap().clone());\n\n }\n\n\n\n let standard_config_file = local_util::get_user_home_dir(STANDARD_CONFIG_JSON)?;\n\n let standard_config_json = fs::read_to_string(&standard_config_file)?;\n\n let standard_config_object = json::parse(&standard_config_json)?;\n\n\n\n let build_js_auth_token = &standard_config_object[\"build.js\"][\"auth_token\"];\n\n \n\n if build_js_auth_token.is_null() {\n\n simple_error!(\"Standard json#build.js#auth_token is null.\")\n\n } else {\n\n Ok(build_js_auth_token.to_string())\n\n }\n\n}\n\n\n", "file_path": "src/tool.rs", "rank": 26, "score": 82129.00108380473 }, { "content": "pub fn download_url(url: &str, dest: &mut File) -> XResult<()> {\n\n if *VERBOSE {\n\n debugging!(\"Start download URL: {}\", url);\n\n }\n\n let mut response = reqwest::blocking::get(url)?;\n\n let header_content_length: i64 = match response.headers().get(\"content-length\") {\n\n None => -1_i64, Some(len_value) => {\n\n let len_str = len_value.to_str().unwrap_or_else(|err| {\n\n warning!(\"Get content length for {:?}, error: {}\", len_value, err);\n\n \"-1\"\n\n });\n\n len_str.parse::<i64>().unwrap_or_else(|err| {\n\n warning!(\"Get content length for {:?}, error: {}\", len_value, err);\n\n -1\n\n })\n\n },\n\n };\n\n if *VERBOSE {\n\n debugging!(\"Content-Length: {}\", header_content_length);\n\n }\n\n util_io::copy_io_default(&mut response, dest, header_content_length)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 27, "score": 81803.0886025699 }, { "content": "pub fn calc_file_digest(digest: &mut dyn Digest, digest_alg: &str, file_name: &str) -> XResult<String> {\n\n let mut buf = [0u8; DEFAULT_BUF_SIZE];\n\n let mut f = File::open(file_name)?;\n\n let file_len = f.metadata().map(|md| md.len() as i64).unwrap_or(-1_i64);\n\n let mut print_status_context = PrintStatusContext::default();\n\n let mut written = 0_i64;\n\n loop {\n\n let len = match f.read(&mut buf) {\n\n Ok(0) => { println!(); return Ok(digest.result_str()); },\n\n Ok(len) => len,\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n\n Err(e) => return Err(Box::new(e)),\n\n };\n\n digest.input(&buf[..len]);\n\n written += len as i64;\n\n util_io::print_status_last_line(&format!(\"Calc {}\", digest_alg), file_len, written, &mut print_status_context);\n\n }\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 28, "score": 79778.30820989885 }, { "content": "fn do_with_buildin_arg_java_cmd(first_arg: &str, args: &[String]) {\n\n let first_num_pos = first_arg.chars().position(|c| c >= '0' && c <= '9');\n\n let (cmd, ver) = match first_num_pos {\n\n None => {\n\n failure!(\"Java command version is not assigned!\");\n\n return;\n\n },\n\n Some(pos) => {\n\n (&first_arg[3..pos], &first_arg[pos..])\n\n },\n\n };\n\n match get_java_home(ver) {\n\n None => failure!(\"Assigned java version not found: {}\", ver),\n\n Some(java_home) => {\n\n success!(\"Find java home: {}\", java_home);\n\n let java_bin = &format!(\"{}/bin/{}\", java_home, cmd);\n\n if fs::metadata(java_bin).is_ok() {\n\n success!(\"Command found: {}\", java_bin);\n\n } else {\n\n failure!(\"Command {} not exists\", java_bin);\n", "file_path": "src/main.rs", "rank": 30, "score": 75498.80777559898 }, { "content": "fn do_with_buildin_arg_builder(first_arg: &str, args: &[String], builder_name: &str) {\n\n let builder_version = &first_arg[(builder_name.len() + 3)..];\n\n if builder_version.is_empty() {\n\n failure!(\"Builder version is not assigned!\");\n\n return;\n\n }\n\n let mut has_java = false;\n\n let mut java_home = String::new();\n\n if args.len() > 2 && args[2].starts_with(\"--java\") {\n\n has_java = true;\n\n let java_version = &args[2][6..];\n\n if !java_version.is_empty() {\n\n java_home = match get_java_home(java_version) {\n\n Some(h) => h, None => {\n\n failure!(\"Assigned java version not found: {}\", java_version);\n\n return;\n\n },\n\n };\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 31, "score": 63291.47227791147 }, { "content": "pub fn print_usage() {\n\n println!(\"\\n{}\", include_str!(\"usage.txt\"));\n\n}\n\n\n", "file_path": "src/misc.rs", "rank": 32, "score": 63039.66359048837 }, { "content": "fn do_with_buildin_arg_config(_first_arg: &str, args: &[String]) {\n\n information!(\"Current config file: ~/{}\", tool::STANDARD_CONFIG_JSON);\n\n if args.len() <= 2 {\n\n failure!(\"No arguments, get or set.\");\n\n return;\n\n }\n\n match args[2].as_str() {\n\n \"get\" => match get_tool_package_secret() {\n\n Err(_) => warning!(\"No config found.\"),\n\n Ok(secret) => success!(\"Config secret: {}\", secret),\n\n },\n\n \"set\" => {\n\n if args.len() < 4 {\n\n failure!(\"Need secret for set, :::config set <secret>\");\n\n } else {\n\n match set_tool_package_secret(&args[3]) {\n\n Err(err) => failure!(\"Config secret failed: {}\", err),\n\n Ok(_) => success!(\"Config secret success.\"),\n\n }\n\n }\n\n },\n\n arg => failure!(\"Unknown argument: {}\", arg)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 33, "score": 56199.10650737405 }, { "content": "fn do_with_buildin_arg_gradle(first_arg: &str, args: &[String]) {\n\n do_with_buildin_arg_builder(first_arg, args, \"gradle\")\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 56199.10650737405 }, { "content": "fn do_with_buildin_arg_maven(first_arg: &str, args: &[String]) {\n\n do_with_buildin_arg_builder(first_arg, args, \"maven\")\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 35, "score": 56199.10650737405 }, { "content": "fn do_with_buildin_arg_ddd(first_arg: &str, args: &[String]) {\n\n let build_json_object = match read_build_json_object() {\n\n Some(object) => object, None => return,\n\n };\n\n let build_json_object_xrun = &build_json_object[\"xRuns\"][&first_arg[3..]];\n\n if build_json_object_xrun.is_null() {\n\n failure!(\"Cannot find build.json#xRuns#{}\", &first_arg[3..]);\n\n return;\n\n }\n\n let cmd_name = build_json_object_xrun[0].to_string();\n\n let mut cmd = Command::new(&cmd_name);\n\n cmd.current_dir(\".\");\n\n let mut cmd_args = vec![];\n\n for i in 1..build_json_object_xrun.len() {\n\n if *VERBOSE {\n\n cmd_args.push(build_json_object_xrun[i].to_string());\n\n }\n\n cmd.arg(build_json_object_xrun[i].to_string());\n\n }\n\n for arg in args.iter().skip(3) {\n", "file_path": "src/main.rs", "rank": 36, "score": 56199.10650737405 }, { "content": "pub fn calc_sha256(d: &[u8]) -> String {\n\n let mut sha256 = Sha256::new();\n\n sha256.input(d);\n\n sha256.result_str()\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 37, "score": 55763.16187115859 }, { "content": "pub fn create_build_json(args: &[String]) {\n\n if find_build_json_in_current().is_some() {\n\n failure!(\"File exits: {}\", BUILD_JSON);\n\n return;\n\n }\n\n\n\n let mut java_version = \"\";\n\n let mut builder = \"\";\n\n let mut builder_version = \"\";\n\n for arg in args {\n\n if arg.starts_with(\"--java\") && arg.len() > 6 {\n\n java_version = &arg.as_str()[6..];\n\n } else if arg.starts_with(\"--maven\") && arg.len() > 7 {\n\n builder = \"maven\";\n\n builder_version = &arg.as_str()[7..];\n\n } else if arg.starts_with(\"--gradle\") && arg.len() > 8 {\n\n builder = \"gradle\";\n\n builder_version = &arg.as_str()[8..];\n\n }\n\n }\n", "file_path": "src/build_json.rs", "rank": 38, "score": 55574.902902418035 }, { "content": "pub fn find_build_json() -> Option<String> {\n\n if let Some(p) = find_build_json_in_current() {\n\n return Some(p);\n\n }\n\n match find_build_json_in_parents() {\n\n Some(p) => {\n\n warning!(\"Cannot find {} in current dir, find: {}\", BUILD_JSON, p);\n\n Some(p)\n\n },\n\n None => {\n\n failure!(\"Cannot find {}\", BUILD_JSON);\n\n None\n\n },\n\n }\n\n}\n", "file_path": "src/build_json.rs", "rank": 39, "score": 55574.902902418035 }, { "content": "pub fn find_build_json_in_parents() -> Option<String> {\n\n let mut path = fs::canonicalize(\".\").ok()?;\n\n let mut loop_count = 0_usize;\n\n loop {\n\n loop_count += 1_usize;\n\n if loop_count > 100_usize {\n\n failure!(\"Find build.json loop more than 100 loop!\");\n\n return None;\n\n }\n\n\n\n let p = path.to_str()?;\n\n if p == \"/\" {\n\n return None;\n\n }\n\n let p_build_json = format!(\"{}/{}\", p, BUILD_JSON);\n\n let path_build_json = Path::new(&p_build_json);\n\n if path_build_json.exists() {\n\n return Some(p_build_json);\n\n }\n\n path = path.parent()?.to_path_buf();\n\n }\n\n}\n\n\n", "file_path": "src/build_json.rs", "rank": 40, "score": 54480.21699720505 }, { "content": "pub fn get_user_home() -> XResult<String> {\n\n match dirs::home_dir() {\n\n Some(home_dir_o) => match home_dir_o.to_str() {\n\n Some(home_dir_str) => Ok(home_dir_str.to_string()),\n\n None => simple_error!(\"Home dir not found!\"),\n\n },\n\n None => simple_error!(\"Home dir not found!\"),\n\n }\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 41, "score": 54480.21699720505 }, { "content": "pub fn find_build_json_in_current() -> Option<String> {\n\n let path = fs::canonicalize(\".\").ok()?;\n\n let p_build_json = format!(\"{}/{}\", path.to_str()?, BUILD_JSON);\n\n let path_build_json = Path::new(&p_build_json);\n\n iff!(path_build_json.exists(), Some(p_build_json), None)\n\n}\n\n\n", "file_path": "src/build_json.rs", "rank": 42, "score": 54480.21699720505 }, { "content": "fn get_final_args(args: &[String], build_json_object: &json::JsonValue) -> Option<Vec<String>> {\n\n let mut final_args:Vec<String> = vec![];\n\n if args.len() > 1 {\n\n let arg1 = &args[1];\n\n if arg1.starts_with(\"::\") {\n\n let a_cmd = &arg1[2..];\n\n let a_cmd_j = &build_json_object[\"xArgs\"][a_cmd];\n\n if a_cmd_j.is_null() {\n\n warning!(\"xArgs argument not found: {}\", a_cmd);\n\n if args.len() == 2 {\n\n failure!(\"Only one xArgs argument, exit.\");\n\n return None;\n\n }\n\n final_args.push(arg1.to_string());\n\n } else {\n\n for a_j in a_cmd_j.members() {\n\n if ! a_j.is_null() {\n\n final_args.push(a_j.as_str().unwrap().to_string());\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 43, "score": 52865.86787678327 }, { "content": "pub fn run_command_and_wait(cmd: &mut Command) -> XResult<()> {\n\n cmd.spawn()?.wait()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/local_util.rs", "rank": 44, "score": 50616.859004736994 }, { "content": "fn process_envs(the_env: &mut HashMap<String, String>, build_json_object: &json::JsonValue) {\n\n let envs_j = &build_json_object[\"envs\"];\n\n if ! envs_j.is_null() {\n\n for env in envs_j.members() {\n\n if *VERBOSE {\n\n debugging!(\"Env: {}\", env);\n\n }\n\n let (env_k, env_v) = (&env[0], &env[1]);\n\n if let (Some(env_k_str), Some(env_v_str)) = (env_k.as_str(), env_v.as_str()) {\n\n the_env.insert(env_k_str.to_owned(), env_v_str.to_owned());\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 45, "score": 42359.401461523295 }, { "content": "fn read_build_json_object_from_env() -> Option<json::JsonValue> {\n\n if (*JAVA_VERSION).is_some() || (*BUILDER_VERSION).is_some() {\n\n let mut build_json_object = object!{};\n\n if (*JAVA_VERSION).is_some() {\n\n build_json_object[\"java\"] = (*JAVA_VERSION).as_ref().unwrap().to_string().into();\n\n }\n\n if (*BUILDER_VERSION).is_some() {\n\n let builder_version = (*BUILDER_VERSION).as_ref().unwrap().to_string();\n\n if builder_version.starts_with(\"gradle\") {\n\n build_json_object[\"builder\"] = object! {\n\n \"name\" => \"gradle\",\n\n \"version\" => builder_version[6..],\n\n };\n\n } else if builder_version.starts_with(\"maven\") {\n\n build_json_object[\"builder\"] = object! {\n\n \"name\" => \"maven\",\n\n \"version\" => builder_version[5..],\n\n };\n\n } else {\n\n warning!(\"Unknown builder: {}\", builder_version);\n", "file_path": "src/main.rs", "rank": 46, "score": 40432.12237089478 }, { "content": "fn get_java_and_builder(build_json_object: &json::JsonValue) -> Option<(String, BuilderDesc)> {\n\n let java_version_j = &build_json_object[\"java\"];\n\n let builder_name_j = &build_json_object[\"builder\"][\"name\"];\n\n let builder_version_j = &build_json_object[\"builder\"][\"version\"];\n\n\n\n if java_version_j.is_null() {\n\n failure!(\"Java version is not assigned!\");\n\n return None;\n\n }\n\n if builder_name_j.is_null() || builder_version_j.is_null() {\n\n failure!(\"Builder name or version is not assigned!\");\n\n return None;\n\n }\n\n let java_version = java_version_j.as_str().unwrap();\n\n let builder_name = builder_name_j.as_str().unwrap();\n\n let builder_version = builder_version_j.as_str().unwrap();\n\n if *VERBOSE {\n\n debugging!(\"Java version: {}\", java_version);\n\n debugging!(\"Builder name: {}\", builder_name);\n\n debugging!(\"Builder version: {}\", builder_version);\n", "file_path": "src/main.rs", "rank": 47, "score": 35859.55538889259 }, { "content": "use std::{fs::{self, File}, path::Path};\n\nuse rust_util::{ XResult, util_os};\n\nuse crate::{http, local_util, misc::{AUTH_TOKEN, VERBOSE, NOAUTH}};\n\n\n\nconst M2_HOME: &str = \"M2_HOME\";\n\nconst MAVEN_HOME: &str = \"MAVEN_HOME\";\n\nconst GRADLE_HOME: &str = \"GRADLE_HOME\";\n\n\n\npub const LOCAL_BUILDER_HOME_BASE_DIR: &str = \".jssp/builder\";\n\npub const STANDARD_CONFIG_JSON: &str = \".standard_config.json\";\n\nconst TOOL_PACKAGE_DETAIL_URL: &str = \"https://hatter.ink/tool/query_tool_by_name_version.json\";\n\nconst TOOL_PACKAGE_DETAIL_URL_WITHOUT_AUTH: &str = \"https://hatter.ink/tool/query_tool_by_name_version_without_auth.json\";\n\n\n\n#[derive(Clone, Copy)]\n\npub enum BuilderName {\n\n Maven,\n\n Gradle,\n\n}\n\n\n\npub struct BuilderDesc {\n", "file_path": "src/tool.rs", "rank": 48, "score": 27221.198512853247 }, { "content": " pub name: BuilderName,\n\n pub home: String,\n\n pub bin: Option<String>,\n\n}\n\n\n\nimpl BuilderDesc {\n\n pub fn get_builder_home_name(&self) -> Vec<String> {\n\n match self.name {\n\n BuilderName::Maven => vec![M2_HOME.to_string(), MAVEN_HOME.to_string()],\n\n BuilderName::Gradle => vec![GRADLE_HOME.to_string()],\n\n }\n\n }\n\n\n\n pub fn get_builder_bin(&self) -> String {\n\n match &self.bin {\n\n Some(b) => b.clone(),\n\n None => match self.name {\n\n BuilderName::Maven => format!(\"{}/bin/mvn\", self.home.clone()),\n\n BuilderName::Gradle => format!(\"{}/bin/gradle\", self.home.clone()),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tool.rs", "rank": 49, "score": 27219.060649025512 }, { "content": " return simple_error!(\"Required version not match, {}: {} vs {}\", name, version, &v);\n\n }\n\n\n\n let mut target_base_dir = String::with_capacity(512);\n\n target_base_dir.push_str(base_dir);\n\n if dir_with_name {\n\n target_base_dir.push_str(\"/\");\n\n target_base_dir.push_str(&format!(\"{}-{}\", n, v));\n\n }\n\n local_util::init_dir(&target_base_dir);\n\n let target_file_name = format!(\"{}/{}\", &target_base_dir, name.to_string());\n\n\n\n information!(\"Start download: {} -> {}\", &url.to_string(), &target_file_name);\n\n http::download_url(&url.to_string(), &mut File::create(&target_file_name)?)?;\n\n\n\n information!(\"Start verify integrity: {} ...\", &target_file_name);\n\n if local_util::verify_file_integrity(&integrity.to_string(), &target_file_name)? {\n\n success!(\"Verify integrity success.\");\n\n } else {\n\n return simple_error!(\"Verify integrity failed!\");\n\n }\n\n\n\n success!(\"Start extract file: {}\", &target_file_name);\n\n local_util::extract_package_and_wait(&target_base_dir, &name.to_string())?;\n\n\n\n Ok(true)\n\n}\n", "file_path": "src/tool.rs", "rank": 50, "score": 27216.45085482774 }, { "content": " url.push_str(TOOL_PACKAGE_DETAIL_URL);\n\n url.push_str(\"?\");\n\n url.push_str(\"__auth_token=\");\n\n url.push_str(&urlencoding::encode(&secret));\n\n },\n\n };\n\n url.push_str(\"&name=\");\n\n url.push_str(&urlencoding::encode(name));\n\n url.push_str(\"&ver=\");\n\n url.push_str(&urlencoding::encode(version));\n\n Ok(http::get_url_content(url.as_str())?)\n\n}\n\n\n", "file_path": "src/tool.rs", "rank": 51, "score": 27215.63423349268 }, { "content": " standard_config_object[\"build.js\"] = object! {\n\n \"auth_token\" => secret,\n\n };\n\n } else {\n\n standard_config_object[\"build.js\"][\"auth_token\"] = secret.into();\n\n }\n\n match fs::write(&standard_config_file, json::stringify_pretty(standard_config_object, 4)) {\n\n Ok(_) => Ok(()),\n\n Err(err) => simple_error!(\"Write config failed: {}, error message: {}\", &standard_config_file, err),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tool.rs", "rank": 52, "score": 27214.43160047019 }, { "content": "fn main() {\n\n let output = Command::new(\"git\").args(&[\"rev-parse\", \"HEAD\"]).output().unwrap();\n\n let git_hash = String::from_utf8(output.stdout).unwrap();\n\n println!(\"cargo:rustc-env=GIT_HASH={}\", git_hash);\n\n let date_output = Command::new(\"date\").args(&[\"+%Y-%m-%dT%H:%M:%S%z\"]).output().unwrap();\n\n let date = String::from_utf8(date_output.stdout).unwrap();\n\n println!(\"cargo:rustc-env=BUILD_DATE={}\", date);\n\n let date_year_output = Command::new(\"date\").args(&[\"+%Y\"]).output().unwrap();\n\n let date_year = String::from_utf8(date_year_output.stdout).unwrap();\n\n println!(\"cargo:rustc-env=BUILD_YEAR={}\", date_year)\n\n}\n", "file_path": "build.rs", "rank": 53, "score": 26420.043670512616 }, { "content": "fn main() {\n\n match get_short_git_hash() {\n\n None => information!(\"{} - version {}\", BUILDJ, BUDERJ_VER),\n\n Some(shot_git_hash) => information!(\"{} - version {} - {}\", BUILDJ, BUDERJ_VER, &shot_git_hash),\n\n }\n\n\n\n if *VERBOSE {\n\n if let Some(full_git_hash) = get_full_git_hash() {\n\n debugging!(\"Full GIT_HASH: {}\", full_git_hash);\n\n }\n\n debugging!(\"Build date: {}\", BUILD_DATE);\n\n }\n\n\n\n let args = local_util::get_args_as_vec();\n\n information!(\"Arguments: {:?}\", args);\n\n\n\n if (! *NOBUILDIN) && local_util::is_buildin_args(&args) {\n\n do_with_buildin_args(&args);\n\n return;\n\n }\n", "file_path": "src/main.rs", "rank": 54, "score": 25576.037452615143 }, { "content": "fn do_with_buildin_args(args: &[String]) {\n\n let first_arg = args.get(1).unwrap();\n\n match first_arg.as_str() {\n\n \":::\" | \":::help\" => print_usage(),\n\n \":::version\" => print_version(),\n\n \":::create\" => create_build_json(args),\n\n \":::config\" => do_with_buildin_arg_config(first_arg, args),\n\n a if a.starts_with(\":::jar\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::java\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jinfo\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jlink\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::keytool\")=> do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jaotc\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jcmd\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jconsole\")=> do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jdb\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jmap\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jps\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jstack\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jstat\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::jimage\") => do_with_buildin_arg_java_cmd(a, args),\n\n a if a.starts_with(\":::maven\") => do_with_buildin_arg_maven (a, args),\n\n a if a.starts_with(\":::gradle\") => do_with_buildin_arg_gradle (a, args),\n\n a if a.starts_with(\"...\") => do_with_buildin_arg_ddd (a, args),\n\n _ => failure!(\"Unknown args: {:?}\", &args),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 55, "score": 22879.44511854835 }, { "content": "fn read_build_json_object() -> Option<json::JsonValue> {\n\n if let Some(o) = read_build_json_object_from_env() {\n\n return Some(o);\n\n }\n\n\n\n let build_json = find_build_json()?;\n\n success!(\"Find {} @ {}\", BUILD_JSON, build_json);\n\n\n\n let build_json_content = fs::read_to_string(build_json).map_err(|err| {\n\n failure!(\"Read {} failed: {}\", BUILD_JSON, err);\n\n err\n\n }).ok()?;\n\n json::parse(&build_json_content).map_err(|err| {\n\n failure!(\"Parse JSON failed: {}\", err);\n\n err\n\n }).ok()\n\n}\n\n\n\n\n", "file_path": "src/main.rs", "rank": 56, "score": 20376.87181050191 }, { "content": "use std::env;\n\nuse rust_util::{util_env, util_term};\n\n\n\npub const BUILDJ: &str = \"buildj\";\n\npub const BUDERJ_VER: &str = env!(\"CARGO_PKG_VERSION\");\n\npub const BUILD_DATE: &str = env!(\"BUILD_DATE\");\n\nconst GIT_HASH: &str = env!(\"GIT_HASH\");\n\n\n\n\n\nlazy_static! {\n\n pub static ref VERBOSE: bool = util_env::is_env_on(\"BUILDJ_VERBOSE\");\n\n pub static ref NOAUTH: bool = util_env::is_env_on(\"BUILDJ_NOAUTH\");\n\n pub static ref NOBUILDIN: bool = util_env::is_env_on(\"BUILDJ_NOBUILDIN\");\n\n pub static ref AUTH_TOKEN: Option<String> = env::var(\"BUILDJ_AUTH_TOKEN\").ok();\n\n pub static ref JAVA_VERSION: Option<String> = env::var(\"BUILDJ_JAVA\").ok();\n\n pub static ref BUILDER_VERSION: Option<String> = env::var(\"BUILDJ_BUILDER\").ok();\n\n pub static ref BUILD_YEAR: String = env::var(\"BUILD_YEAR\").unwrap_or_else(|_| \"unknown\".to_string());\n\n}\n\n\n", "file_path": "src/misc.rs", "rank": 57, "score": 15.591750748394208 }, { "content": "use std::{fs, path::Path};\n\nuse rust_util::XResult;\n\n\n\nuse crate::http::get_url_content;\n\nuse crate::misc::VERBOSE;\n\n\n\npub const BUILD_JSON: &str = \"build.json\";\n\n\n\nconst GET_ARCHIVER_VERSION_URL: &str= \"https://hatter.ink/repo/archive_info_version.json\";\n\n\n", "file_path": "src/build_json.rs", "rank": 59, "score": 9.967510529068278 }, { "content": " }\n\n\n\n let java_home = match get_java_home(java_version) {\n\n Some(h) => h, None => {\n\n failure!(\"Assigned java version not found: {}\", java_version);\n\n return None;\n\n },\n\n };\n\n let builder_desc = match tool::get_builder_home(builder_name, builder_version) {\n\n Some(h) => h, None => {\n\n failure!(\"Assigned builder: {}, version: {} not found.\", builder_name, builder_version);\n\n return None;\n\n },\n\n };\n\n Some((java_home, builder_desc))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 60, "score": 9.582107495942648 }, { "content": " if java_version.is_empty() || builder.is_empty() || builder_version.is_empty() {\n\n failure!(\"Args java version, builder or builder version is not assigned or format error.\");\n\n return;\n\n }\n\n let mut build_json_object = object!{\n\n \"java\" => java_version,\n\n \"builder\" => object! {\n\n \"name\" => builder,\n\n \"version\" => builder_version,\n\n },\n\n };\n\n match get_archive_version(\"me.hatter\", \"commons\") {\n\n Err(err) => failure!(\"Get me.hatter:commons version failed: {}\", err),\n\n Ok(ver) => build_json_object[\"repo\"] = object! {\n\n \"dependencies\" => array! [\n\n format!(\"me.hatter:commons:{}\", ver).as_str()\n\n ]\n\n },\n\n }\n\n match fs::write(BUILD_JSON, json::stringify_pretty(build_json_object, 4)) {\n\n Ok(_) => success!(\"Write file success: {}\", BUILD_JSON),\n\n Err(err) => failure!(\"Write file failed: {}, error message: {}\", BUILD_JSON, err),\n\n }\n\n}\n\n\n", "file_path": "src/build_json.rs", "rank": 61, "score": 9.223835614616728 }, { "content": "#[macro_use] extern crate json;\n\n#[macro_use] extern crate lazy_static;\n\n#[macro_use] extern crate rust_util;\n\n\n\nuse std::fs;\n\nuse std::collections::HashMap;\n\nuse std::process::{self, Command};\n\n\n\npub mod jdk;\n\npub mod local_util;\n\npub mod http;\n\npub mod tool;\n\npub mod build_json;\n\npub mod misc;\n\n\n\nuse rust_util::util_cmd;\n\nuse tool::*;\n\nuse jdk::*;\n\nuse build_json::*;\n\nuse misc::*;\n\n\n\n\n", "file_path": "src/main.rs", "rank": 63, "score": 8.236388410133964 }, { "content": " let builder_desc = match tool::get_builder_home(builder_name, builder_version) {\n\n Some(h) => h, None => {\n\n failure!(\"Assigned builder: {}, version: {} not found.\", builder_name, builder_version);\n\n return;\n\n },\n\n };\n\n if has_java {\n\n success!(\"JAVA_HOME = {}\", java_home);\n\n }\n\n success!(\"BUILDER_HOME = {}\", &builder_desc.home);\n\n\n\n let mut new_env = iff!(has_java, get_env_with_java_home(&java_home), get_env());\n\n for builder_home_name in builder_desc.get_builder_home_name() {\n\n new_env.insert(builder_home_name, builder_desc.home.clone());\n\n }\n\n\n\n let mut cmd = Command::new(builder_desc.get_builder_bin());\n\n cmd.envs(&new_env);\n\n let from_index = iff!(has_java, 3, 2);\n\n for arg in args.iter().skip(from_index) {\n\n cmd.arg(&arg);\n\n }\n\n if let Err(err) = util_cmd::run_command_and_wait(&mut cmd) {\n\n failure!(\"Run build command failed: {}\", err);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 64, "score": 8.08224004139864 }, { "content": "use std::env;\n\nuse std::path::Path;\n\nuse std::process::Command;\n\nuse std::fs::{self, File};\n\nuse std::io::{Read, ErrorKind};\n\nuse rust_util::XResult;\n\nuse rust_util::util_io::{self, DEFAULT_BUF_SIZE, PrintStatusContext};\n\nuse crypto::{digest::Digest, md5::Md5, sha1::Sha1, sha2::{Sha256, Sha512}};\n\n\n", "file_path": "src/local_util.rs", "rank": 66, "score": 6.368766171726843 }, { "content": " if *VERBOSE {\n\n debugging!(\"Init home dir: {}\", tool::LOCAL_BUILDER_HOME_BASE_DIR);\n\n }\n\n local_util::init_home_dir(tool::LOCAL_BUILDER_HOME_BASE_DIR);\n\n if *VERBOSE {\n\n debugging!(\"Init home dir: {}\", jdk::LOCAL_JAVA_HOME_BASE_DIR);\n\n }\n\n local_util::init_home_dir(jdk::LOCAL_JAVA_HOME_BASE_DIR);\n\n\n\n let build_json_object = match read_build_json_object() {\n\n Some(object) => object, None => return,\n\n };\n\n\n\n let (java_home, builder_desc) = match get_java_and_builder(&build_json_object) {\n\n Some((java_home, builder_desc)) => (java_home, builder_desc), None => return,\n\n };\n\n \n\n success!(\"JAVA_HOME = {}\", java_home);\n\n success!(\"BUILDER_HOME = {}\", &builder_desc.home);\n\n\n", "file_path": "src/main.rs", "rank": 67, "score": 5.941658712463992 }, { "content": "# buildj\n\nbuildj - java build tool, website: [https://buildj.ruststack.org/](https://buildj.ruststack.org/)\n\n\n\n## Install\n\n\n\n```\n\ncargo install --git https://github.com/jht5945/buildj [--force]\n\n\n\nOR\n\n\n\ncargo install buildj [--force]\n\n```\n\n\n\n## Usage\n\n\n\n### Help\n\n```\n\n$ buildj :::\n\n[INFO] buildj - version 0.1\n\n[INFO] Arguments: [\"./buildj\", \":::\"]\n\n\n\nbuildj ::: - print this message\n\nbuildj :::help - print this message\n\nbuildj :::create --java<version> --maven<version> - create java-version, maven-version project\n\nbuildj :::create --java<version> --gradle<version> - create java-version, gradle-version project\n\nbuildj :::java<version> [-version] - run java with assigned version, e.g. buildj :::java1.8 -version\n\nbuildj :::maven<version> [--java<version>] - run maven with assigned version and java version, e.g. buildj :::maven3.5.2 --java1.8 ARGS\n\nbuildj :::gradle<version> ]--java<version>] - run gradle with assigned version and java version, e.g. buildj :::gradle3.5.1 --java1.8 ARGS\n\nbuildj - run build, run assigned version builder tool\n\n```\n\n\n", "file_path": "README.md", "rank": 68, "score": 5.614771628432539 }, { "content": "Gradle 3.5.1\n\n------------------------------------------------------------\n\n\n\nBuild time: 2017-06-16 14:36:27 UTC\n\nRevision: d4c3bb4eac74bd0a3c70a0d213709e484193e251\n\n\n\nGroovy: 2.4.10\n\nAnt: Apache Ant(TM) version 1.9.6 compiled on June 29 2015\n\nJVM: 1.8.0 (Oracle Corporation 25.0-b70)\n\nOS: Mac OS X 10.14.4 x86_64\n\n```\n\n\n\nCreate build.json\n\n```\n\n$ buildj :::create --java1.8 --maven3.5.1\n\n[INFO] buildj - version 0.1\n\n[INFO] Arguments: [\"./buildj\", \":::create\", \"--java1.8\", \"--maven3.5.1\"]\n\n[OK ] Write file success: build.json\n\n\n\n$ cat build.json \n\n{\n\n \"java\": \"1.8\",\n\n \"builder\": {\n\n \"name\": \"gradle\",\n\n \"version\": \"3.5.1\"\n\n }\n\n}\n\n```\n\n\n\nRun:\n\n```\n\n$ buildj\n\n```\n\n\n\n<br>\n\n\n\nAdd environment in build:\n\n```\n\n{\n\n \"envs\": [\n\n [\"VAR_NAME\", \"VAR_VALUE\"]\n\n ]\n\n}\n\n```\n\n\n\n<br>\n\n\n\nUse xArgs in build:\n\n```\n\n{\n\n xArgs: {\n\n \"build\": [\"clean\", \"install\"]\n\n }\n\n}\n\n```\n\nCommand: `buildj ::build`\n\n\n\n<br>\n\n\n\nUse xRuns in build:\n\n```\n\n{\n\n xRuns: {\n\n \"pub\": [\"./publish\"]\n\n }\n\n}\n\n```\n\nCommand: `buildj ...pub`\n\n\n", "file_path": "README.md", "rank": 69, "score": 5.547143511647683 }, { "content": " return;\n\n }\n\n let mut cmd = Command::new(java_bin);\n\n cmd.envs(&get_env_with_java_home(&java_home));\n\n if args.len() > 2 {\n\n cmd.args(&args[2..]);\n\n }\n\n if let Err(err) = util_cmd::run_command_and_wait(&mut cmd) {\n\n failure!(\"Exec java failed: {}\", err);\n\n }\n\n },\n\n };\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 70, "score": 5.3626728029352 }, { "content": " }\n\n }\n\n if *VERBOSE {\n\n debugging!(\"Use env configed build.json: {}\", json::stringify(build_json_object.clone()));\n\n }\n\n success!(\"Find build.json @ENV\");\n\n Some(build_json_object)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 71, "score": 5.127412844689607 }, { "content": " let mut new_env = get_env_with_java_home(&java_home);\n\n for builder_home_name in builder_desc.get_builder_home_name() {\n\n new_env.insert(builder_home_name, builder_desc.home.clone());\n\n }\n\n process_envs(&mut new_env, &build_json_object);\n\n\n\n let mut cmd = Command::new(builder_desc.get_builder_bin());\n\n cmd.envs(&new_env);\n\n\n\n let final_args = match get_final_args(&args, &build_json_object) {\n\n Some(fa) => fa, None => return,\n\n };\n\n if *VERBOSE {\n\n debugging!(\"Final arguments: {:?}\", &final_args);\n\n }\n\n for f_arg in final_args {\n\n cmd.arg(f_arg);\n\n }\n\n if *VERBOSE {\n\n debugging!(\"-----BEGIN ENVIRONMENT VARIABLES-----\");\n", "file_path": "src/main.rs", "rank": 72, "score": 5.097068064118939 }, { "content": "### Run Java\n\n```\n\n$ buildj :::java9 -version\n\n[INFO] buildj - version 0.1\n\n[INFO] Arguments: [\"./buildj\", \":::java9\", \"-version\"]\n\n[OK ] Find java home: /Library/Java/JavaVirtualMachines/jdk-9.0.4.jdk/Contents/Home\n\njava version \"9.0.4\"\n\nJava(TM) SE Runtime Environment (build 9.0.4+11)\n\nJava HotSpot(TM) 64-Bit Server VM (build 9.0.4+11, mixed mode)\n\n```\n\n\n\nRun Maven:\n\n```\n\n$ buildj :::maven3.5.2 -version\n\n[INFO] buildj - version 0.1\n\n[INFO] Arguments: [\"./buildj\", \":::maven3.5.2\", \"-version\"]\n\n[OK ] BUILDER_HOME = /Users/hatterjiang/.jssp/builder/maven-3.5.2/apache-maven-3.5.2\n\nApache Maven 3.5.2 (138edd61fd100ec658bfa2d307c43b76940a5d7d; 2017-10-18T15:58:13+08:00)\n\nMaven home: /Users/hatterjiang/.jssp/builder/maven-3.5.2/apache-maven-3.5.2\n\nJava version: 1.8.0, vendor: Oracle Corporation\n\nJava home: /Library/Java/JavaVirtualMachines/jdk1.8.0.jdk/Contents/Home/jre\n\nDefault locale: en_US, platform encoding: UTF-8\n\nOS name: \"mac os x\", version: \"10.14.4\", arch: \"x86_64\", family: \"mac\"\n\n```\n\n\n\n### Run Gradle\n\n```\n\n$ buildj :::gradle3.5.1 -version\n\n[INFO] buildj - version 0.1\n\n[INFO] Arguments: [\"./buildj\", \":::gradle3.5.1\", \"-version\"]\n\n[OK ] BUILDER_HOME = /Users/hatterjiang/.jssp/builder/gradle-3.5.1/gradle-3.5.1\n\n\n\n------------------------------------------------------------\n", "file_path": "README.md", "rank": 73, "score": 4.1437682894441314 }, { "content": "use std::fs::File;\n\nuse rust_util::{XResult, util_io};\n\n\n\nuse crate::misc::VERBOSE;\n\n\n", "file_path": "src/http.rs", "rank": 74, "score": 4.120122351619479 }, { "content": " for (k, v) in new_env {\n\n debugging!(\"{}={}\", k, v);\n\n }\n\n debugging!(\"-----END ENVIRONMENT VARIABLES-----\");\n\n }\n\n let exit_status = util_cmd::run_command_and_wait(&mut cmd).unwrap_or_else(|err| {\n\n failure!(\"Run build command failed: {}\", err);\n\n process::exit(-1);\n\n });\n\n\n\n if !exit_status.success() {\n\n if let Some(exit_code) = exit_status.code() {\n\n process::exit(exit_code);\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 75, "score": 2.500807057966769 }, { "content": "use std::process::Command;\n\n\n", "file_path": "build.rs", "rank": 76, "score": 2.080392495403284 }, { "content": " if *VERBOSE {\n\n cmd_args.push(arg.to_string());\n\n }\n\n cmd.arg(arg.to_string());\n\n }\n\n if *VERBOSE {\n\n debugging!(\"Running cmd: {}, args: {:?}\", &cmd_name, cmd_args);\n\n }\n\n if let Err(err) = util_cmd::run_command_and_wait(&mut cmd) {\n\n failure!(\"Run xRun command failed: {}\", err);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 77, "score": 1.350814601901654 } ]
Rust
tests/basic_choice.rs
pasa-v2x/asn1rs
e8ab92d96c57e17f7cd82cdf825c0063c95a8430
use asn1rs::prelude::*; use asn1rs::syn::io::UperReader as NewUperReader; use asn1rs::syn::io::UperWriter as NewUperWriter; asn_to_rust!( r"BasicChoice DEFINITIONS AUTOMATIC TAGS ::= BEGIN Basic ::= CHOICE { abc UTF8String, def UTF8String, ghi INTEGER } Extensible ::= CHOICE { abc UTF8String, def INTEGER, ..., -- whatever reserved blubber comment ghi INTEGER, jkl Basic, mno UTF8String } END" ); fn serialize_uper(to_uper: &impl Writable) -> (usize, Vec<u8>) { let mut writer = NewUperWriter::default(); writer.write(to_uper).unwrap(); let bits = writer.bit_len(); (bits, writer.into_bytes_vec()) } fn deserialize_uper<T: Readable>(data: &[u8], bits: usize) -> T { let mut reader = NewUperReader::from_bits(data, bits); reader.read::<T>().unwrap() } fn serialize_and_deserialize_uper<T: Readable + Writable + std::fmt::Debug + PartialEq>( bits: usize, data: &[u8], uper: &T, ) { let serialized = serialize_uper(uper); assert_eq!((bits, data), (serialized.0, &serialized.1[..])); assert_eq!(uper, &deserialize_uper::<T>(data, bits)); } #[test] fn test_extensible_uper() { serialize_and_deserialize_uper(10, &[0x00, 0x00], &Extensible::Abc(String::default())); serialize_and_deserialize_uper( 106, &[ 0x03, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x15, 0xdb, 0xdc, 0x9b, 0x19, 0x08, 0x40, ], &Extensible::Abc("Hello World!".to_string()), ); serialize_and_deserialize_uper(18, &[0x40, 0x40, 0x00], &Extensible::Def(0)); serialize_and_deserialize_uper(26, &[0x40, 0x81, 0x4e, 0x40], &Extensible::Def(1337)); serialize_and_deserialize_uper(32, &[0x80_u8, 0x02, 0x01, 0x00], &Extensible::Ghi(0)); serialize_and_deserialize_uper(32, &[0x80_u8, 0x02, 0x01, 0x1B], &Extensible::Ghi(27)); serialize_and_deserialize_uper( 40, &[0x80_u8, 0x03, 0x02, 0x05, 0x39], &Extensible::Ghi(1337), ); serialize_and_deserialize_uper( 120, &[ 0x82, 0x0d, 0x0c, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x61, 0x67, 0x61, 0x69, 0x6e, 0x21, ], &Extensible::Mno("Hello again!".to_string()), ); } #[test] pub fn test_basic_uper() { serialize_and_deserialize_uper( 106, &[ 0x03, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x15, 0xdb, 0xdc, 0x9b, 0x19, 0x08, 0x40, ], &Basic::Abc("Hello World!".to_string()), ); serialize_and_deserialize_uper( 106, &[ 0x43, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x18, 0x59, 0xd8, 0x5a, 0x5b, 0x88, 0x40, ], &Basic::Def("Hello again!".to_string()), ); serialize_and_deserialize_uper(26, &[0x80, 0x81, 0x4e, 0x40], &Basic::Ghi(1337)); } #[test] fn test_extensible_choice_inner_complex() { let jkl = Extensible::Jkl(Basic::Ghi(1337)); let (bits, buffer) = serialize_uper(&jkl); let jkl_deserialized = deserialize_uper(&buffer[..], bits); assert_eq!(jkl, jkl_deserialized); } #[test] fn test_basic_variants_parsed() { let _abc = Basic::Abc(String::default()); let _def = Basic::Def(String::default()); let _ghi = Basic::Ghi(123_u64); match Basic::Abc(String::default()) { Basic::Abc(_) | Basic::Def(_) | Basic::Ghi(_) => {} } }
use asn1rs::prelude::*; use asn1rs::syn::io::UperReader as NewUperReader; use asn1rs::syn::io::UperWriter as NewUperWriter; asn_to_rust!( r"BasicChoice DEFINITIONS AUTOMATIC TAGS ::= BEGIN Basic ::= CHOICE { abc UTF8String, def UTF8String, ghi INTEGER } Extensible ::= CHOICE { abc UTF8String, def INTEGER, ..., -- whatever reserved blubber comment ghi INTEGER, jkl Basic, mno UTF8String } END" ); fn serialize_uper(to_uper: &impl Writable) -> (usize, Vec<u8>) { let mut writer = NewUperWriter::default(); writer.write(to_uper).unwrap(); let bits = writer.bit_len(); (bits, writer.into_bytes_vec()) } fn deserialize_uper<T: Readable>(data: &[u8], bits: usize) -> T { let mut reader = NewUperReader::from_bits(data, bits); reader.read::<T>().unwrap() } fn serialize_and_deserialize_uper<T: Readable + Writable + std::fmt::Debug + PartialEq>( bits: usize, data: &[u8], uper: &T, ) { let serialized = serialize_uper(uper); assert_eq!((bits, data), (serialized.0, &serialized.1[..])); assert_eq!(uper, &deserialize_uper::<T>(data, bits)); } #[test] fn test_extensible_uper() { serialize_and_deserialize_uper(10, &[0x00, 0x00], &Extensible::Abc(String::default())); serialize_and_deserialize_uper( 106, &[ 0x03, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x15, 0xdb, 0xdc, 0x9b, 0x19, 0x08, 0x40, ], &Extensible::Abc("Hello World!".to_string()), ); serialize_and_deserialize_uper(18, &[0x40, 0x40, 0x00], &Extensible::Def(0)); serialize_and_deserialize_uper(26, &[0x40, 0x81, 0x4e, 0x40], &Extensible::Def(1337)); serialize_and_deserialize_uper(32, &[0x80_u8, 0x02, 0x01, 0x00], &Extensible::Ghi(0)); serialize_and_deserialize_uper(32, &[0x80_u8, 0x02, 0x01, 0x1B], &Extensible::Ghi(27)); serialize_and_deserialize_uper( 40, &[0x80_u8, 0x03, 0x02, 0x05, 0x39], &Extensible::Ghi(1337), ); serialize_and_deserialize_uper( 120, &[ 0x82, 0x0d, 0x0c, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x61, 0x67, 0x61, 0x69, 0x6e, 0x21, ], &Extensible::Mno("Hello again!".to_string()), ); } #[test] pub fn test_basic_uper() { serialize_and_deserialize_uper( 106, &[ 0x03, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x15, 0xdb, 0xdc, 0x9b, 0x19, 0x08, 0x40, ], &Basic::Abc("Hello World!".to_string()), ); serialize_and_deserialize_uper( 106, &[ 0x43, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x18, 0x59, 0xd8, 0x5a, 0x5b, 0x88, 0x40, ], &Basic::Def("Hello again!".to_string()), ); serialize_and_deserialize_uper(26, &[0x80, 0x81, 0x4e, 0x40], &Basic::Ghi(1337)); } #[test] fn test_extensible_choice_inner_complex() { let jkl = Extensible::Jkl(Basic::Ghi(1337)); let (bits, buffer) = serialize_uper(&jkl); let jkl_deserialized = deserialize_uper(&buffer[..], bits); assert_eq!(jkl, jkl_deserialized); } #[test] fn test_basic_variants_parsed() {
let _abc = Basic::Abc(String::default()); let _def = Basic::Def(String::default()); let _ghi = Basic::Ghi(123_u64); match Basic::Abc(String::default()) { Basic::Abc(_) | Basic::Def(_) | Basic::Ghi(_) => {} } }
function_block-function_prefix_line
[ { "content": "fn serialize_uper(to_uper: impl Writable) -> (usize, Vec<u8>) {\n\n let mut writer = NewUperWriter::default();\n\n writer.write(&to_uper).unwrap();\n\n let bits = writer.bit_len();\n\n (bits, writer.into_bytes_vec())\n\n}\n\n\n", "file_path": "tests/basic_enumerated.rs", "rank": 2, "score": 375556.3187787541 }, { "content": "fn deserialize_uper<T: Readable>(data: &[u8], bits: usize) -> T {\n\n let mut reader = NewUperReader::from_bits(data, bits);\n\n reader.read::<T>().unwrap()\n\n}\n\n\n", "file_path": "tests/basic_enumerated.rs", "rank": 3, "score": 353998.3507986537 }, { "content": "#[test]\n\npub fn test_basic_uper() {\n\n let mut writer = NewUperWriter::default();\n\n writer.write(&Basic::Abc).unwrap();\n\n writer.write(&Basic::Def).unwrap();\n\n writer.write(&Basic::Ghi).unwrap();\n\n\n\n assert_eq!(\n\n &[\n\n 0b00 << 6 // Abc \n\n | 0b01 << 4 // Def \n\n | 0b10 << 2 // Ghi\n\n ],\n\n writer.byte_content()\n\n );\n\n}\n", "file_path": "tests/basic_enumerated.rs", "rank": 6, "score": 258226.72120962036 }, { "content": "#[test]\n\nfn test_uper_big() {\n\n let mut writer = NewUperWriter::default();\n\n writer.write(&RangedMax(66_000)).unwrap();\n\n let bytes = 66_000_u64.to_be_bytes();\n\n assert_eq!(&[0x03, bytes[5], bytes[6], bytes[7]], writer.byte_content());\n\n}\n\n\n", "file_path": "tests/basic_integer.rs", "rank": 8, "score": 229279.36224505273 }, { "content": "#[test]\n\nfn test_uper_small() {\n\n let mut writer = NewUperWriter::default();\n\n writer.write(&RangedMax(123)).unwrap();\n\n assert_eq!(&[0x01, 123_u8], writer.byte_content());\n\n}\n\n\n", "file_path": "tests/basic_integer.rs", "rank": 9, "score": 229279.36224505273 }, { "content": "#[test]\n\nfn test_serialize_with_uper() {\n\n let p = Potato {\n\n size: 123,\n\n size2: 1234,\n\n size3: 128,\n\n string: String::from(\"where is the content\"),\n\n };\n\n let mut uper = UperWriter::default();\n\n uper.write(&p).unwrap();\n\n assert_eq!(\n\n &[\n\n // https://asn1.io/asn1playground/\n\n 0x01, 0x7B, 0x02, 0x04, 0xD2, 0xE8, 0x28, 0xEE, 0xD0, 0xCA, 0xE4, 0xCA, 0x40, 0xD2,\n\n 0xE6, 0x40, 0xE8, 0xD0, 0xCA, 0x40, 0xC6, 0xDE, 0xDC, 0xE8, 0xCA, 0xDC, 0xE8\n\n ],\n\n uper.byte_content()\n\n );\n\n assert_eq!(26 * 8 + 7, uper.bit_len());\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 12, "score": 218829.2813319357 }, { "content": "#[test]\n\nfn topping_test_serialize_with_uper() {\n\n let mut uper = UperWriter::default();\n\n uper.write(&Topping::NotPineapple).unwrap();\n\n uper.write(&Topping::EvenLessPineapple).unwrap();\n\n uper.write(&Topping::NoPineappleAtAll).unwrap();\n\n assert_eq!(&[0x00 | 0x40 >> 2 | 0x80 >> 4], uper.byte_content());\n\n assert_eq!(6, uper.bit_len());\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 13, "score": 214000.57525164742 }, { "content": "fn create_insert_fn(impl_scope: &mut Impl, context_used: bool) -> &mut Function {\n\n impl_scope\n\n .new_fn(&insert_fn_name())\n\n .arg_ref_self()\n\n .vis(\"pub async\")\n\n .arg(\n\n if context_used { \"context\" } else { \"_context\" },\n\n format!(\"&{}::Context<'_>\", MODULE_NAME),\n\n )\n\n .ret(format!(\"Result<i32, {}::PsqlError>\", MODULE_NAME))\n\n}\n\n\n", "file_path": "asn1rs-model/src/gen/rust/async_psql.rs", "rank": 14, "score": 210258.8350385545 }, { "content": "fn create_retrieve_fn(impl_scope: &mut Impl, context_used: bool) -> &mut Function {\n\n impl_scope\n\n .new_fn(&retrieve_fn_name())\n\n .vis(\"pub async\")\n\n .arg(\n\n if context_used { \"context\" } else { \"_context\" },\n\n format!(\"&{}::Context<'_>\", MODULE_NAME),\n\n )\n\n .arg(\"id\", \"i32\")\n\n .ret(format!(\"Result<Self, {}::Error>\", MODULE_NAME))\n\n}\n\n\n", "file_path": "asn1rs-model/src/gen/rust/async_psql.rs", "rank": 15, "score": 210258.8350385545 }, { "content": "fn create_load_fn(impl_scope: &mut Impl, context_used: bool) -> &mut Function {\n\n impl_scope\n\n .new_fn(&load_fn_name())\n\n .vis(\"pub async\")\n\n .arg(\n\n if context_used { \"context\" } else { \"_context\" },\n\n format!(\"&{}::Context<'_>\", MODULE_NAME),\n\n )\n\n .arg(\"row\", format!(\"&{}::Row\", MODULE_NAME))\n\n .ret(format!(\"Result<Self, {}::Error>\", MODULE_NAME))\n\n}\n\n\n", "file_path": "asn1rs-model/src/gen/rust/async_psql.rs", "rank": 16, "score": 210258.8350385545 }, { "content": "#[test]\n\nfn test_protobuf() {\n\n let mut buffer = Vec::default();\n\n let writer = &mut buffer as &mut dyn ProtobufWriter;\n\n RangedMax(123).write_protobuf(writer).unwrap();\n\n assert_eq!(&[0x08, 123_u8], &buffer[..]);\n\n\n\n let mut buffer = Vec::default();\n\n let writer = &mut buffer as &mut dyn ProtobufWriter;\n\n RangedMax(66_000).write_protobuf(writer).unwrap();\n\n assert_eq!(&[0x08, 0x80 | 80_u8, 0x80 | 3, 4], &buffer[..]);\n\n}\n", "file_path": "tests/basic_integer.rs", "rank": 18, "score": 195036.5515101035 }, { "content": "fn create_retrieve_many_fn(impl_scope: &mut Impl) -> &mut Function {\n\n impl_scope\n\n .new_fn(&retrieve_many_fn_name())\n\n .vis(\"pub async\")\n\n .arg(\"context\", format!(\"&{}::Context<'_>\", MODULE_NAME))\n\n .arg(\"ids\", \"&[i32]\")\n\n .ret(format!(\"Result<Vec<Self>, {}::Error>\", MODULE_NAME))\n\n}\n\n\n", "file_path": "asn1rs-model/src/gen/rust/async_psql.rs", "rank": 20, "score": 191964.1830581071 }, { "content": "#[test]\n\nfn test_default_range() {\n\n assert_eq!(RangedMax::value_min(), NotRanged::value_min());\n\n assert_eq!(RangedMax::value_max(), NotRanged::value_max());\n\n let _ = NotRanged(123_u64); // does not compile if the inner type differs\n\n}\n\n\n", "file_path": "tests/basic_integer.rs", "rank": 21, "score": 190539.10853300703 }, { "content": "#[test]\n\nfn pizza_test_uper_1() {\n\n let mut uper = UperWriter::default();\n\n let pizza = Pizza {\n\n size: 2,\n\n topping: Topping::NotPineapple,\n\n };\n\n uper.write(&pizza).unwrap();\n\n // https://asn1.io/asn1playground/\n\n assert_eq!(&[0x40], uper.byte_content());\n\n assert_eq!(4, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(pizza, uper.read::<Pizza>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 22, "score": 181559.98172999563 }, { "content": "#[test]\n\nfn test_optional_uper() {\n\n let mut uper = UperWriter::default();\n\n let v = Optional { value: Some(1337) };\n\n uper.write(&v).unwrap();\n\n // https://asn1.io/asn1playground/\n\n assert_eq!(&[0x81, 0x02, 0x9C, 0x80], uper.byte_content());\n\n assert_eq!(3 * 8 + 1, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(v, uper.read::<Optional>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n\n#[asn(sequence)]\n\n#[derive(Debug, PartialOrd, PartialEq)]\n\npub struct CrazyList {\n\n #[asn(sequence_of(option(option(sequence_of(integer)))))]\n\n values: Vec<Option<Option<Vec<u64>>>>,\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 23, "score": 181559.98172999563 }, { "content": "#[test]\n\nfn what_to_eat_test_uper_2() {\n\n let mut uper = UperWriter::default();\n\n let what = WhatToEat::Potato(Potato {\n\n size: 13,\n\n size2: 37,\n\n size3: 42,\n\n string: \"such tasty potato\".to_string(),\n\n });\n\n uper.write(&what).unwrap();\n\n // https://asn1.io/asn1playground/\n\n assert_eq!(\n\n &[\n\n 0x00, 0x86, 0x80, 0x92, 0x9E, 0x11, 0x73, 0x75, 0x63, 0x68, 0x20, 0x74, 0x61, 0x73,\n\n 0x74, 0x79, 0x20, 0x70, 0x6F, 0x74, 0x61, 0x74, 0x6F\n\n ],\n\n uper.byte_content()\n\n );\n\n assert_eq!(23 * 8, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(what, uper.read::<WhatToEat>().unwrap());\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 24, "score": 181559.98172999563 }, { "content": "#[test]\n\nfn what_to_eat_test_uper_1() {\n\n let mut uper = UperWriter::default();\n\n let what = WhatToEat::Pizza(Pizza {\n\n size: 3,\n\n topping: Topping::EvenLessPineapple,\n\n });\n\n uper.write(&what).unwrap();\n\n // https://asn1.io/asn1playground/\n\n assert_eq!(&[0xC8], uper.byte_content());\n\n assert_eq!(5, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(what, uper.read::<WhatToEat>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 25, "score": 181559.98172999563 }, { "content": "#[test]\n\nfn pizza_test_uper_3() {\n\n let mut uper = UperWriter::default();\n\n let pizza = Pizza {\n\n size: 3,\n\n topping: Topping::EvenLessPineapple,\n\n };\n\n uper.write(&pizza).unwrap();\n\n // https://asn1.io/asn1playground/\n\n assert_eq!(&[0x90], uper.byte_content());\n\n assert_eq!(4, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(pizza, uper.read::<Pizza>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n\n#[asn(choice)]\n\n#[derive(Debug, PartialOrd, PartialEq)]\n\npub enum WhatToEat {\n\n #[asn(complex(Potato))]\n\n Potato(Potato),\n\n #[asn(complex(Pizza))]\n\n Pizza(Pizza),\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 26, "score": 181559.98172999563 }, { "content": "#[test]\n\nfn pizza_test_uper_2() {\n\n let mut uper = UperWriter::default();\n\n let pizza = Pizza {\n\n size: 1,\n\n topping: Topping::NoPineappleAtAll,\n\n };\n\n uper.write(&pizza).unwrap();\n\n // https://asn1.io/asn1playground/\n\n assert_eq!(&[0x20], uper.byte_content());\n\n assert_eq!(4, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(pizza, uper.read::<Pizza>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 27, "score": 181559.98172999563 }, { "content": "#[test]\n\nfn test_deserialize_with_uper() {\n\n let mut uper = UperReader::from_bits(\n\n vec![\n\n // https://asn1.io/asn1playground/\n\n 0x01, 0x7B, 0x02, 0x04, 0xD2, 0xE8, 0x28, 0xEE, 0xD0, 0xCA, 0xE4, 0xCA, 0x40, 0xD2,\n\n 0xE6, 0x40, 0xE8, 0xD0, 0xCA, 0x40, 0xC6, 0xDE, 0xDC, 0xE8, 0xCA, 0xDC, 0xE8,\n\n ],\n\n 26 * 8 + 7,\n\n );\n\n let p = uper.read::<Potato>().unwrap();\n\n assert_eq!(\n\n Potato {\n\n size: 123,\n\n size2: 1234,\n\n size3: 128,\n\n string: String::from(\"where is the content\"),\n\n },\n\n p\n\n );\n\n}\n\n\n\n#[asn(enumerated)]\n\n#[derive(Debug, PartialOrd, PartialEq)]\n\npub enum Topping {\n\n NotPineapple,\n\n EvenLessPineapple,\n\n NoPineappleAtAll,\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 28, "score": 181559.98172999563 }, { "content": "#[test]\n\nfn test_flat_list_uper() {\n\n let mut uper = UperWriter::default();\n\n let v = FlatList(vec![13, 37, 42]);\n\n uper.write(&v).unwrap();\n\n // https://asn1.io/asn1playground/\n\n assert_eq!(\n\n &[0x03, 0x01, 0x0D, 0x01, 0x25, 0x01, 0x2A],\n\n uper.byte_content()\n\n );\n\n assert_eq!(7 * 8, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(v, uper.read::<FlatList>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n\n#[asn(transparent)]\n\n#[derive(Debug, PartialOrd, PartialEq)]\n\npub struct Important(#[asn(option(integer))] Option<u64>);\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 29, "score": 177741.21737153857 }, { "content": "#[test]\n\nfn test_bool_container_uper() {\n\n let mut uper = UperWriter::default();\n\n let v = BoolContainer {\n\n bool1: false,\n\n bool2: true,\n\n bool3: true,\n\n };\n\n uper.write(&v).unwrap();\n\n assert_eq!(&[0b011_0_0000], uper.byte_content());\n\n assert_eq!(3, uper.bit_len());\n\n\n\n let mut uper = uper.into_reader();\n\n assert_eq!(v, uper.read::<BoolContainer>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n\n#[asn(transparent)]\n\n#[derive(Debug, Default, PartialOrd, PartialEq)]\n\npub struct NegativeRangeMin(#[asn(integer(- 12..12))] i8);\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 30, "score": 177741.21737153857 }, { "content": "#[test]\n\nfn test_crazy_list_uper() {\n\n let mut uper = UperWriter::default();\n\n let list = CrazyList {\n\n values: vec![Some(Some(vec![13])), Some(Some(vec![37])), Some(None), None],\n\n };\n\n uper.write(&list).unwrap();\n\n assert_eq!(\n\n &[\n\n // from analytic, I hate myself for it and I am sorry to everyone that needs to adjust this\n\n // ...well... probably myself in the future... so self.await ... hehe ...\n\n // -- 0\n\n 0x04, // 4 elements in the list\n\n // -- 1\n\n 0b11 << 6 // first element: Some, Some\n\n | 0x01 >> 2, // length of inner list, part 1\n\n // -- 2\n\n 0x01 << 6 // length of inner list, part2\n\n | 0x01 >> 2, // length of integer, part 1\n\n // -- 3\n\n 0x01 << 6 // length of integer, part 2\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 31, "score": 177741.21737153857 }, { "content": "#[test]\n\nfn test_transparent_important_uper_some() {\n\n let mut uper = UperWriter::default();\n\n let v = Important(Some(42));\n\n uper.write(&v).unwrap();\n\n // invalid according to https://asn1.io/asn1playground/\n\n // but who cares... :P\n\n assert_eq!(\n\n &[\n\n // --- 0\n\n 0b1 << 7 // Some\n\n | 0x01 >> 1, // length of the integer, part 1\n\n // --- 1\n\n 0x01 << 7 // length of the integer, part 2\n\n | 42 >> 1, // value of the integer, part 1\n\n // --- 2\n\n 42 << 7 // value of the integer, part 2\n\n ],\n\n uper.byte_content()\n\n );\n\n\n\n assert_eq!(2 * 8 + 1, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(v, uper.read::<Important>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 32, "score": 177741.21737153857 }, { "content": "#[test]\n\nfn topping_test_deserialize_with_uper() {\n\n let mut uper = UperReader::from_bits(vec![0x00_u8 | 0x40 >> 2 | 0x80 >> 4], 6);\n\n assert_eq!(Topping::NotPineapple, uper.read::<Topping>().unwrap());\n\n assert_eq!(Topping::EvenLessPineapple, uper.read::<Topping>().unwrap());\n\n assert_eq!(Topping::NoPineappleAtAll, uper.read::<Topping>().unwrap());\n\n}\n\n\n\n#[asn(sequence)]\n\n#[derive(Debug, PartialOrd, PartialEq)]\n\npub struct Pizza {\n\n #[asn(integer(1..4))]\n\n size: u8,\n\n #[asn(complex(Topping))]\n\n topping: Topping,\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 33, "score": 177741.21737153857 }, { "content": "pub trait Reader {\n\n /// Sub-strings larger than 16k are not supported\n\n fn read_substring_with_length_determinant_prefix(&mut self) -> Result<BitBuffer, Error> {\n\n let byte_len = self.read_length_determinant()?;\n\n let bit_len = byte_len * BYTE_LEN;\n\n let mut bytes = vec![0x00_u8; byte_len];\n\n self.read_bit_string(&mut bytes[..], 0, bit_len)?;\n\n Ok(BitBuffer::from_bits(bytes, bit_len))\n\n }\n\n\n\n fn read_utf8_string(&mut self) -> Result<String, Error> {\n\n let len = self.read_length_determinant()?;\n\n let mut buffer = vec![0_u8; len];\n\n self.read_bit_string_till_end(&mut buffer[..len], 0)?;\n\n if let Ok(string) = String::from_utf8(buffer) {\n\n Ok(string)\n\n } else {\n\n Err(Error::InvalidUtf8String)\n\n }\n\n }\n", "file_path": "src/io/uper.rs", "rank": 34, "score": 176161.75003322808 }, { "content": "pub trait Writer {\n\n /// Sub-strings larger than 16k are not supported\n\n fn write_substring_with_length_determinant_prefix(\n\n &mut self,\n\n fun: &dyn Fn(&mut dyn Writer) -> Result<(), Error>,\n\n ) -> Result<(), Error> {\n\n let mut buffer = BitBuffer::default();\n\n fun(&mut buffer as &mut dyn Writer)?;\n\n self.write_length_determinant(buffer.byte_len())?;\n\n self.write_bit_string(&buffer.content(), 0, buffer.bit_len())?;\n\n Ok(())\n\n }\n\n\n\n fn write_utf8_string(&mut self, value: &str) -> Result<(), Error> {\n\n self.write_length_determinant(value.len())?;\n\n self.write_bit_string_till_end(value.as_bytes(), 0)?;\n\n Ok(())\n\n }\n\n\n\n fn write_choice_index_extensible(\n", "file_path": "src/io/uper.rs", "rank": 35, "score": 176161.75003322808 }, { "content": "#[test]\n\nfn test_transparent_important_uper_none() {\n\n let mut uper = UperWriter::default();\n\n let v = Important(None);\n\n uper.write(&v).unwrap();\n\n // invalid according to https://asn1.io/asn1playground/\n\n // but who cares... :P\n\n assert_eq!(&[0b0 << 7], uper.byte_content());\n\n\n\n assert_eq!(1, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(v, uper.read::<Important>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n\n#[asn(sequence)]\n\n#[derive(Debug, Default, PartialOrd, PartialEq)]\n\npub struct BoolContainer {\n\n #[asn(boolean)]\n\n bool1: bool,\n\n #[asn(boolean)]\n\n bool2: bool,\n\n #[asn(boolean)]\n\n bool3: bool,\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 36, "score": 174108.78225641843 }, { "content": "#[test]\n\nfn are_we_binary_yet_uper() {\n\n let mut uper = UperWriter::default();\n\n let are_we = AreWeBinaryYet {\n\n binary: vec![0x13, 0x37],\n\n };\n\n uper.write(&are_we).unwrap();\n\n // https://asn1.io/asn1playground/\n\n assert_eq!(&[02, 0x13, 0x37], uper.byte_content());\n\n assert_eq!(3 * 8, uper.bit_len());\n\n let mut uper = uper.into_reader();\n\n assert_eq!(are_we, uper.read::<AreWeBinaryYet>().unwrap());\n\n assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n\n#[asn(sequence)]\n\n#[derive(Debug, PartialOrd, PartialEq)]\n\npub struct Optional {\n\n #[asn(option(integer))]\n\n value: Option<u64>,\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 37, "score": 171005.51936335157 }, { "content": "#[test]\n\nfn test_basic_variants_parsed() {\n\n let _abc = Basic::Abc;\n\n let _def = Basic::Def;\n\n let _ghi = Basic::Ghi;\n\n\n\n match Basic::Abc {\n\n // this does not compile if there are additional unexpected variants\n\n Basic::Abc | Basic::Def | Basic::Ghi => {}\n\n }\n\n}\n\n\n", "file_path": "tests/basic_enumerated.rs", "rank": 38, "score": 158343.00232054197 }, { "content": "pub fn parse_asn_definition(\n\n attr: TokenStream,\n\n item: TokenStream,\n\n) -> Result<(Option<Definition<AsnModelType>>, Item), TokenStream> {\n\n let item_span = item.span();\n\n let attr_span = attr.span();\n\n\n\n if cfg!(feature = \"debug-proc-macro\") {\n\n println!(\"ATTRIBUTE: {}\", attr.to_string());\n\n println!(\"ITEM: {}\", item.to_string());\n\n }\n\n\n\n let item = syn::parse2::<Item>(item)\n\n .map_err(|e| compile_error_ts(item_span, format!(\"Invalid Item: {}\", e)))?;\n\n let asn = syn::parse2::<AsnAttribute<DefinitionHeader>>(attr.clone()).map_err(|e| {\n\n compile_error_ts(\n\n attr_span,\n\n format!(\"Invalid ASN attribute ('{}'): {}\", attr.to_string(), e),\n\n )\n\n })?;\n", "file_path": "asn1rs-model/src/ast/mod.rs", "rank": 39, "score": 156026.171393426 }, { "content": "#[test]\n\nfn test_predefined_numbers() {\n\n assert_eq!((2, vec![0x00_u8]), serialize_uper(PredefinedNumbers::Abc));\n\n assert_eq!((2, vec![0x40_u8]), serialize_uper(PredefinedNumbers::Def));\n\n assert_eq!((8, vec![0x80_u8]), serialize_uper(PredefinedNumbers::Ghi));\n\n assert_eq!((8, vec![0x81_u8]), serialize_uper(PredefinedNumbers::Jkl));\n\n\n\n assert_eq!(PredefinedNumbers::Abc, deserialize_uper(&[0x00_u8], 2,));\n\n assert_eq!(PredefinedNumbers::Def, deserialize_uper(&[0x40_u8], 2,));\n\n assert_eq!(PredefinedNumbers::Ghi, deserialize_uper(&[0x80_u8], 8,));\n\n assert_eq!(PredefinedNumbers::Jkl, deserialize_uper(&[0x81_u8], 8,));\n\n}\n\n\n", "file_path": "tests/basic_enumerated.rs", "rank": 40, "score": 151078.41462116188 }, { "content": "#[test]\n\nfn test_standard_choice() {\n\n parse_asn_map_to_rust_map_to_stringify_with_proc_macro_annotation_re_parse_check_equal(\n\n r#\"BasicSchema DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n\n\n\n MyType ::= [PRIVATE 1] CHOICE {\n\n abc Utf8String,\n\n def [APPLICATION 7] INTEGER,\n\n ghi Utf8String\n\n }\n\n \n\nEND\"#,\n\n )\n\n}\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 41, "score": 148157.3706314355 }, { "content": "#[test]\n\nfn test_extensible_choice() {\n\n parse_asn_map_to_rust_map_to_stringify_with_proc_macro_annotation_re_parse_check_equal(\n\n r#\"BasicSchema DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n\n\n\n MyType ::= [PRIVATE 1] CHOICE {\n\n abc Utf8String,\n\n def [APPLICATION 7] INTEGER,\n\n ...,\n\n ghi Utf8String\n\n }\n\n \n\nEND\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 42, "score": 148157.3706314355 }, { "content": "#[test]\n\nfn test_compiles() {\n\n let _p = Potato {\n\n size: 123,\n\n size2: 1234,\n\n size3: 234,\n\n string: String::from(\"where is the content\"),\n\n };\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 43, "score": 147949.89436561117 }, { "content": "#[test]\n\nfn uper_proof() {\n\n use asn1rs::syn::io::UperWriter;\n\n let mut writer = UperWriter::default();\n\n writer\n\n .write(&WhatToEat::Pizza(Pizza {\n\n price: 2,\n\n size: 3,\n\n note: Some(String::from(\"Extra crusty!\")),\n\n }))\n\n .unwrap();\n\n\n\n // read into the plain type to prove they behave the same\n\n use what_is_being_generated as g;\n\n\n\n let mut reader = writer.into_reader();\n\n let read = reader.read::<g::WhatToEat>().expect(\"Failed to read\");\n\n\n\n assert_eq!(\n\n read,\n\n g::WhatToEat::Pizza(g::Pizza {\n\n price: 2,\n\n size: 3,\n\n note: Some(String::from(\"Extra crusty!\"))\n\n })\n\n );\n\n}\n", "file_path": "tests/showcase.rs", "rank": 44, "score": 147813.26458129956 }, { "content": "pub fn convert_to_rust<F: AsRef<Path>, D: AsRef<Path>, A: FnOnce(&mut RustGenerator)>(\n\n file: F,\n\n dir: D,\n\n custom_adjustments: A,\n\n) -> Result<Vec<String>, Error> {\n\n let input = ::std::fs::read_to_string(file)?;\n\n let tokens = Tokenizer::default().parse(&input);\n\n let model = Model::try_from(tokens)?;\n\n let mut generator = RustGenerator::default();\n\n generator.add_model(model.to_rust());\n\n\n\n custom_adjustments(&mut generator);\n\n\n\n let output = generator.to_string().map_err(|_| Error::RustGenerator)?;\n\n\n\n let mut files = Vec::new();\n\n for (file, content) in output {\n\n ::std::fs::write(dir.as_ref().join(&file), content)?;\n\n files.push(file);\n\n }\n\n Ok(files)\n\n}\n\n\n", "file_path": "src/converter.rs", "rank": 45, "score": 146590.70849480492 }, { "content": "#[deprecated(note = \"Use the UperReader/-Writer with the Read-/Writable interface instead\")]\n\n#[cfg(feature = \"legacy-uper-codegen\")]\n\npub trait Uper {\n\n fn read_uper(reader: &mut dyn Reader) -> Result<Self, Error>\n\n where\n\n Self: Sized;\n\n\n\n fn write_uper(&self, writer: &mut dyn Writer) -> Result<(), Error>;\n\n}\n\n\n", "file_path": "src/io/uper.rs", "rank": 46, "score": 145394.13348343747 }, { "content": "#[inline]\n\nfn bit_buffer_range<C: octetstring::Constraint>() -> Option<(i64, i64)> {\n\n match (C::MIN, C::MAX) {\n\n (None, None) => None,\n\n (min, max) => Some((\n\n min.unwrap_or(0) as i64,\n\n max.unwrap_or(std::i64::MAX as usize) as i64, // TODO never verified!\n\n )),\n\n }\n\n}\n", "file_path": "src/syn/io/uper.rs", "rank": 47, "score": 144977.3399397639 }, { "content": "#[test]\n\nfn test_crazy_list_println() {\n\n let mut writer = PrintlnWriter::default();\n\n let list = CrazyList {\n\n values: vec![Some(Some(vec![13])), Some(Some(vec![37])), Some(None), None],\n\n };\n\n // Prints something like\n\n //\n\n // Writing sequence CrazyList\n\n // Writing sequence-of (MIN..MAX)\n\n // Writing OPTIONAL\n\n // Some\n\n // Writing OPTIONAL\n\n // Some\n\n // Writing sequence-of (MIN..MAX)\n\n // WRITING Integer 13\n\n // Writing OPTIONAL\n\n // Some\n\n // Writing OPTIONAL\n\n // Some\n\n // Writing sequence-of (MIN..MAX)\n\n // WRITING Integer 37\n\n // Writing OPTIONAL\n\n // Some\n\n // Writing OPTIONAL\n\n // None\n\n // Writing OPTIONAL\n\n // None\n\n list.write(&mut writer).unwrap();\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 48, "score": 142150.37072826055 }, { "content": "#[test]\n\nfn test_flat_list_println() {\n\n // Writing sequence FlatList\n\n // Writing sequence-of (MIN..MAX)\n\n // WRITING Integer 13\n\n // WRITING Integer 37\n\n // WRITING Integer 42\n\n PrintlnWriter::default()\n\n .write(&FlatList(vec![13, 37, 42]))\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 49, "score": 142150.37072826055 }, { "content": "#[test]\n\nfn test_transparent_important_println() {\n\n // Writing sequence FlatList\n\n // Writing sequence-of (MIN..MAX)\n\n // WRITING Integer 13\n\n // WRITING Integer 37\n\n // WRITING Integer 42\n\n PrintlnWriter::default()\n\n .write(&Important(Some(42)))\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 50, "score": 142150.37072826055 }, { "content": "fn bit_string_copy(\n\n src: &[u8],\n\n src_bit_position: usize,\n\n dst: &mut [u8],\n\n dst_bit_position: usize,\n\n len: usize,\n\n) -> Result<(), UperError> {\n\n if dst.len() * BYTE_LEN < dst_bit_position + len {\n\n return Err(Error::InsufficientSpaceInDestinationBuffer);\n\n }\n\n if src.len() * BYTE_LEN < src_bit_position + len {\n\n return Err(Error::InsufficientDataInSourceBuffer);\n\n }\n\n for bit in 0..len {\n\n let dst_byte_pos = (dst_bit_position + bit) / BYTE_LEN;\n\n let dst_bit_pos = (dst_bit_position + bit) % BYTE_LEN;\n\n let dst_bit_pos = BYTE_LEN - dst_bit_pos - 1; // flip\n\n\n\n let bit = {\n\n let src_byte_pos = (src_bit_position + bit) / BYTE_LEN;\n", "file_path": "src/io/buffer.rs", "rank": 51, "score": 141201.7603762057 }, { "content": "pub fn proto_definition_name(name: &str) -> String {\n\n rust_struct_or_enum_name(name)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_non_definitions_rust_to_protobuf() {\n\n let mut model_rust = Model::default();\n\n model_rust.name = \"ModelWithOriginOfRust\".into();\n\n model_rust.imports = vec![Import {\n\n what: vec![\"a\".into(), \"b\".into()],\n\n from: \"some_very_specific_module\".into(),\n\n }];\n\n let model_proto = model_rust.to_protobuf();\n\n assert_eq!(model_rust.name, model_proto.name);\n\n assert_eq!(model_rust.imports, model_proto.imports);\n\n assert!(model_proto.definitions.is_empty());\n", "file_path": "asn1rs-model/src/model/protobuf.rs", "rank": 52, "score": 138516.3541334816 }, { "content": "fn bit_string_copy_bulked(\n\n src: &[u8],\n\n src_bit_position: usize,\n\n dst: &mut [u8],\n\n dst_bit_position: usize,\n\n len: usize,\n\n) -> Result<(), UperError> {\n\n // chosen by real world tests\n\n if len <= BYTE_LEN * 2 {\n\n return bit_string_copy(src, src_bit_position, dst, dst_bit_position, len);\n\n }\n\n\n\n if dst.len() * BYTE_LEN < dst_bit_position + len {\n\n return Err(Error::InsufficientSpaceInDestinationBuffer);\n\n }\n\n if src.len() * BYTE_LEN < src_bit_position + len {\n\n return Err(Error::InsufficientDataInSourceBuffer);\n\n }\n\n\n\n let bits_till_full_byte_src = (BYTE_LEN - (src_bit_position % BYTE_LEN)) % BYTE_LEN;\n", "file_path": "src/io/buffer.rs", "rank": 53, "score": 137843.84478550337 }, { "content": "pub trait Writer {\n\n fn write_varint(&mut self, value: u64) -> Result<(), Error>;\n\n\n\n fn write_bool(&mut self, value: bool) -> Result<(), Error> {\n\n self.write_varint(if value { 1 } else { 0 })\n\n }\n\n\n\n fn write_bytes(&mut self, value: &[u8]) -> Result<(), Error>;\n\n\n\n fn write_tag(&mut self, field: u32, format: Format) -> Result<(), Error> {\n\n self.write_varint(u64::from(field << 3 | (format as u32)))\n\n }\n\n\n\n fn write_enum_variant(&mut self, variant: u32) -> Result<(), Error> {\n\n self.write_varint(u64::from(variant))\n\n }\n\n\n\n fn write_sfixed32(&mut self, value: i32) -> Result<(), Error>;\n\n\n\n fn write_uint32(&mut self, value: u32) -> Result<(), Error> {\n", "file_path": "src/io/protobuf.rs", "rank": 54, "score": 136244.06783086498 }, { "content": "pub trait Reader {\n\n fn read_varint(&mut self) -> Result<u64, Error>;\n\n\n\n fn read_bool(&mut self) -> Result<bool, Error> {\n\n Ok(self.read_varint()? != 0)\n\n }\n\n\n\n fn read_bytes(&mut self) -> Result<Vec<u8>, Error>;\n\n\n\n fn read_tag(&mut self) -> Result<(u32, Format), Error> {\n\n let mask = 0b0000_0111;\n\n let tag = self.read_varint()? as u32;\n\n let format = Format::from(tag & mask)?;\n\n let field = tag >> 3;\n\n Ok((field, format))\n\n }\n\n\n\n fn read_enum_variant(&mut self) -> Result<u32, Error> {\n\n Ok(self.read_varint()? as u32)\n\n }\n", "file_path": "src/io/protobuf.rs", "rank": 55, "score": 136244.06783086498 }, { "content": "pub trait Writer {\n\n type Error;\n\n\n\n #[inline]\n\n fn write<T: Writable>(&mut self, value: &T) -> Result<(), Self::Error>\n\n where\n\n Self: Sized,\n\n {\n\n value.write(self)\n\n }\n\n\n\n fn write_sequence<C: sequence::Constraint, F: Fn(&mut Self) -> Result<(), Self::Error>>(\n\n &mut self,\n\n f: F,\n\n ) -> Result<(), Self::Error>;\n\n\n\n fn write_sequence_of<C: sequenceof::Constraint, T: WritableType>(\n\n &mut self,\n\n slice: &[T::Type],\n\n ) -> Result<(), Self::Error>;\n", "file_path": "src/syn/mod.rs", "rank": 56, "score": 136244.06783086498 }, { "content": "pub trait Reader {\n\n type Error;\n\n\n\n #[inline]\n\n fn read<T: Readable>(&mut self) -> Result<T, Self::Error>\n\n where\n\n Self: Sized,\n\n {\n\n T::read(self)\n\n }\n\n\n\n fn read_sequence<\n\n C: sequence::Constraint,\n\n S: Sized,\n\n F: Fn(&mut Self) -> Result<S, Self::Error>,\n\n >(\n\n &mut self,\n\n f: F,\n\n ) -> Result<S, Self::Error>;\n\n\n", "file_path": "src/syn/mod.rs", "rank": 57, "score": 136244.06783086498 }, { "content": "pub fn expand(definition: Option<Definition<AsnModelType>>) -> Vec<TokenStream> {\n\n let mut additional_impl: Vec<TokenStream> = Vec::default();\n\n let mut model: Model<AsnModelType> = Model {\n\n name: \"__proc_macro\".to_string(),\n\n imports: vec![],\n\n definitions: vec![],\n\n };\n\n\n\n if let Some(definition) = definition {\n\n model.definitions.push(definition);\n\n use crate::gen::rust::walker::AsnDefWriter;\n\n additional_impl\n\n .push(TokenStream::from_str(&AsnDefWriter::stringify(&model.to_rust())).unwrap());\n\n }\n\n\n\n additional_impl\n\n}\n\n\n", "file_path": "asn1rs-model/src/ast/mod.rs", "rank": 58, "score": 134549.2393491696 }, { "content": "pub fn main() {\n\n let params = cli::parse_parameters();\n\n\n\n for source in &params.source_files {\n\n let result = match params.conversion_target.as_str() {\n\n cli::CONVERSION_TARGET_RUST => {\n\n converter::convert_to_rust(source, &params.destination_dir, |rust| {\n\n rust.set_fields_pub(!params.rust_fields_not_public);\n\n rust.set_fields_have_getter_and_setter(params.rust_getter_and_setter);\n\n })\n\n }\n\n cli::CONVERSION_TARGET_PROTO => {\n\n converter::convert_to_proto(source, &params.destination_dir)\n\n }\n\n cli::CONVERSION_TARGET_SQL => {\n\n converter::convert_to_sql(source, &params.destination_dir)\n\n }\n\n e => panic!(\"Unexpected CONVERSION_TARGET={}\", e),\n\n };\n\n match result {\n", "file_path": "src/main.rs", "rank": 59, "score": 130959.04685891107 }, { "content": "use asn1rs::prelude::*;\n\nuse asn1rs::syn::io::UperWriter as NewUperWriter;\n\n\n\nasn_to_rust!(\n\n r\"BasicInteger DEFINITIONS AUTOMATIC TAGS ::=\n\n BEGIN\n\n \n\n RangedMax ::= Integer (0..MAX)\n\n \n\n NotRanged ::= Integer\n\n \n\n END\"\n\n);\n\n\n\n#[test]\n", "file_path": "tests/basic_integer.rs", "rank": 60, "score": 129152.69982283535 }, { "content": "#[proc_macro]\n\npub fn asn_to_rust(item: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(item as LitStr).value();\n\n let tokens = Tokenizer::default().parse(&input);\n\n let model = Model::try_from(tokens).unwrap();\n\n\n\n let mut generator = RustGenerator::default();\n\n generator.add_model(model.to_rust());\n\n\n\n generator\n\n .to_string()\n\n .unwrap()\n\n .into_iter()\n\n .map(|(_file, content)| content)\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n .parse()\n\n .unwrap()\n\n}\n\n\n", "file_path": "asn1rs-macros/src/lib.rs", "rank": 63, "score": 128449.61006908928 }, { "content": "fn generate_rust_code_with_proc_macro_attributes(definition: &Definition<Rust>) -> String {\n\n let mut scope = Scope::new();\n\n RustCodeGenerator::default().add_definition(&mut scope, &definition);\n\n scope.to_string()\n\n}\n\n\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 64, "score": 123974.89330415131 }, { "content": "pub fn parse_parameters() -> Parameters {\n\n let parser = create_argument_parser();\n\n let matches = parser.get_matches();\n\n Parameters {\n\n rust_fields_not_public: matches.is_present(ARG_RUST_FIELDS_NOT_PUBLIC[0]),\n\n rust_getter_and_setter: matches.is_present(ARG_RUST_GETTER_AND_SETTER[0]),\n\n conversion_target: matches\n\n .value_of_lossy(ARG_CONVERSION_TARGET[0])\n\n .expect(\"Missing conversion target\")\n\n .to_string(),\n\n source_files: matches\n\n .values_of_lossy(\"SOURCE_FILES\")\n\n .expect(\"Missing source files\"),\n\n destination_dir: matches\n\n .value_of_lossy(\"DESTINATION_DIR\")\n\n .expect(\"Missing destination directory\")\n\n .to_string(),\n\n }\n\n}\n", "file_path": "src/cli.rs", "rank": 65, "score": 122477.82570800785 }, { "content": "pub fn ident_or_literal_or_punct_or_err<'a>(\n\n stepper: &'a StepCursor<'_, 'a>,\n\n a: Cursor<'a>,\n\n err: &str,\n\n) -> Result<(String, Cursor<'a>), syn::Error> {\n\n ident_or_literal_or_punct(a).ok_or_else(|| stepper.error(err))\n\n}\n\n\n", "file_path": "asn1rs-model/src/ast/range.rs", "rank": 66, "score": 114269.65452872548 }, { "content": "pub fn create_argument_parser<'a, 'b>() -> App<'a, 'b> {\n\n App::new(env!(\"CARGO_PKG_NAME\"))\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(env!(\"CARGO_PKG_AUTHORS\"))\n\n .about(env!(\"CARGO_PKG_DESCRIPTION\"))\n\n .setting(AppSettings::ColoredHelp)\n\n .arg(arg(ARG_RUST_FIELDS_NOT_PUBLIC, None).takes_value(false))\n\n .arg(arg(ARG_RUST_GETTER_AND_SETTER, None).takes_value(false))\n\n .arg(\n\n arg(ARG_CONVERSION_TARGET, Some(CONVERSION_TARGET_RUST))\n\n .possible_values(&CONVERSION_TARGET_POSSIBLE_VALUES)\n\n .next_line_help(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"DESTINATION_DIR\")\n\n .required(true)\n\n .multiple(false)\n\n .value_name(\"DESTINATION_DIR\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"SOURCE_FILES\")\n\n .required(true)\n\n .multiple(true)\n\n .value_name(\"SOURCE_FILES\"),\n\n )\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 67, "score": 109972.64376141434 }, { "content": "#[test]\n\nfn test_standard_sequence() {\n\n parse_asn_map_to_rust_map_to_stringify_with_proc_macro_annotation_re_parse_check_equal(\n\n r#\"BasicSchema DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n\n\n\n MyType ::= [5] SEQUENCE {\n\n abc Utf8String,\n\n def [APPLICATION 7] INTEGER,\n\n ghi Utf8String\n\n }\n\n \n\nEND\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 68, "score": 109904.33169458031 }, { "content": "#[test]\n\nfn test_standard_enum() {\n\n parse_asn_map_to_rust_map_to_stringify_with_proc_macro_annotation_re_parse_check_equal(\n\n r#\"BasicSchema DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n\n\n\n MyType ::= [UNIVERSAL 5] ENUMERATED {\n\n implicit,\n\n number(7),\n\n wow\n\n }\n\n \n\nEND\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 69, "score": 109904.33169458031 }, { "content": "#[test]\n\nfn test_standard_sequence_of() {\n\n parse_asn_map_to_rust_map_to_stringify_with_proc_macro_annotation_re_parse_check_equal(\n\n r#\"BasicSchema DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n\n\n\n MyType ::= [1023] SEQUENCE OF INTEGER\n\n \n\nEND\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 70, "score": 109904.33169458031 }, { "content": "#[test]\n\nfn test_extensible_enum() {\n\n parse_asn_map_to_rust_map_to_stringify_with_proc_macro_annotation_re_parse_check_equal(\n\n r#\"BasicSchema DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n\n\n\n MyType ::= [UNIVERSAL 5] ENUMERATED {\n\n implicit,\n\n number(7),\n\n ...,\n\n wow\n\n }\n\n \n\nEND\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 71, "score": 109904.33169458031 }, { "content": "pub trait Constraint {\n\n const MIN: Option<usize> = None;\n\n const MAX: Option<usize> = None;\n\n}\n\n\n\n#[derive(Default)]\n\npub struct NoConstraint;\n\nimpl Constraint for NoConstraint {}\n\n\n\nimpl<C: Constraint> WritableType for Utf8String<C> {\n\n type Type = String;\n\n\n\n #[inline]\n\n fn write_value<W: Writer>(writer: &mut W, value: &Self::Type) -> Result<(), W::Error> {\n\n writer.write_utf8string::<C>(value.as_str())\n\n }\n\n}\n\n\n\nimpl<C: Constraint> ReadableType for Utf8String<C> {\n\n type Type = String;\n\n\n\n #[inline]\n\n fn read_value<R: Reader>(reader: &mut R) -> Result<Self::Type, <R as Reader>::Error> {\n\n reader.read_utf8string::<C>()\n\n }\n\n}\n", "file_path": "src/syn/utf8string.rs", "rank": 72, "score": 107186.37727050524 }, { "content": "pub trait Writable {\n\n fn write<W: Writer>(&self, writer: &mut W) -> Result<(), W::Error>;\n\n}\n\n\n", "file_path": "src/syn/mod.rs", "rank": 73, "score": 107163.60093670958 }, { "content": "pub fn proto_variant_name(name: &str) -> String {\n\n rust_variant_name(name)\n\n}\n\n\n", "file_path": "asn1rs-model/src/model/protobuf.rs", "rank": 74, "score": 104976.92760729464 }, { "content": "#[allow(clippy::module_name_repetitions)]\n\npub fn rust_module_name(name: &str) -> String {\n\n let mut out = String::new();\n\n let mut prev_lowered = false;\n\n let mut prev_alphabetic = false;\n\n let mut chars = name.chars().peekable();\n\n while let Some(c) = chars.next() {\n\n let mut lowered = false;\n\n let alphabetic = c.is_alphabetic();\n\n if c.is_uppercase() {\n\n if !out.is_empty() && prev_alphabetic {\n\n if !prev_lowered {\n\n out.push('_');\n\n } else if let Some(next) = chars.peek() {\n\n if next.is_lowercase() {\n\n out.push('_');\n\n }\n\n }\n\n }\n\n lowered = true;\n\n out.push_str(&c.to_lowercase().to_string());\n", "file_path": "asn1rs-model/src/model/rust.rs", "rank": 75, "score": 104976.92760729464 }, { "content": "#[allow(clippy::module_name_repetitions)]\n\npub fn rust_field_name(name: &str) -> String {\n\n rust_module_name(name)\n\n}\n\n\n", "file_path": "asn1rs-model/src/model/rust.rs", "rank": 76, "score": 104976.92760729464 }, { "content": "#[allow(clippy::module_name_repetitions)]\n\npub fn rust_variant_name(name: &str) -> String {\n\n let mut out = String::new();\n\n let mut next_upper = true;\n\n for c in name.chars() {\n\n if next_upper {\n\n out.push_str(&c.to_uppercase().to_string());\n\n next_upper = false;\n\n } else if c == '-' {\n\n next_upper = true;\n\n } else {\n\n out.push(c);\n\n }\n\n }\n\n out\n\n}\n\n\n", "file_path": "asn1rs-model/src/model/rust.rs", "rank": 77, "score": 104976.92760729464 }, { "content": "pub fn proto_field_name(name: &str) -> String {\n\n rust_module_name(name)\n\n}\n\n\n", "file_path": "asn1rs-model/src/model/protobuf.rs", "rank": 78, "score": 104976.92760729464 }, { "content": "pub trait ReadableType {\n\n type Type: Sized;\n\n\n\n #[inline]\n\n fn read_ref<R: Reader>(&self, reader: &mut R) -> Result<Self::Type, R::Error> {\n\n Self::read_value(reader)\n\n }\n\n\n\n fn read_value<R: Reader>(reader: &mut R) -> Result<Self::Type, R::Error>;\n\n}\n\n\n\nimpl<T: Readable> ReadableType for T {\n\n type Type = T;\n\n\n\n #[inline]\n\n fn read_value<R: Reader>(reader: &mut R) -> Result<T, R::Error> {\n\n T::read(reader)\n\n }\n\n}\n\n\n", "file_path": "src/syn/mod.rs", "rank": 79, "score": 104776.62177353096 }, { "content": "pub trait WritableType {\n\n type Type;\n\n\n\n #[inline]\n\n fn write_ref<W: Writer>(&self, writer: &mut W, value: &Self::Type) -> Result<(), W::Error> {\n\n Self::write_value(writer, value)\n\n }\n\n\n\n fn write_value<W: Writer>(writer: &mut W, value: &Self::Type) -> Result<(), W::Error>;\n\n}\n\n\n\npub struct AsnType<T>(PhantomData<T>);\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::syn::io::PrintlnWriter;\n\n use crate::syn::sequence::Sequence;\n\n use crate::syn::utf8string::Utf8String;\n\n\n", "file_path": "src/syn/mod.rs", "rank": 80, "score": 104776.62177353096 }, { "content": "#[allow(clippy::module_name_repetitions)]\n\npub fn rust_struct_or_enum_name(name: &str) -> String {\n\n rust_variant_name(name)\n\n}\n\n\n", "file_path": "asn1rs-model/src/model/rust.rs", "rank": 81, "score": 103361.32022022767 }, { "content": "pub trait Readable: Sized {\n\n fn read<R: Reader>(reader: &mut R) -> Result<Self, R::Error>;\n\n}\n\n\n", "file_path": "src/syn/mod.rs", "rank": 82, "score": 102818.27378597904 }, { "content": "pub trait Constraint: Sized {\n\n const NAME: &'static str;\n\n const VARIANT_COUNT: usize;\n\n const STD_VARIANT_COUNT: usize;\n\n const EXTENSIBLE: bool = false;\n\n\n\n fn to_choice_index(&self) -> usize;\n\n\n\n fn write_content<W: Writer>(&self, writer: &mut W) -> Result<(), W::Error>;\n\n\n\n fn read_content<R: Reader>(index: usize, reader: &mut R) -> Result<Option<Self>, R::Error>;\n\n}\n\n\n\nimpl<C: Constraint> WritableType for Choice<C> {\n\n type Type = C;\n\n\n\n #[inline]\n\n fn write_value<W: Writer>(\n\n writer: &mut W,\n\n value: &Self::Type,\n", "file_path": "src/syn/choice.rs", "rank": 83, "score": 102731.71920380408 }, { "content": "pub trait TagProperty {\n\n fn tag(&self) -> Option<Tag>;\n\n\n\n fn set_tag(&mut self, tag: Tag);\n\n\n\n fn reset_tag(&mut self);\n\n\n\n fn with_tag_opt(self, tag: Option<Tag>) -> Self\n\n where\n\n Self: Sized,\n\n {\n\n if let Some(tag) = tag {\n\n self.with_tag(tag)\n\n } else {\n\n self.without_tag()\n\n }\n\n }\n\n\n\n fn with_tag(mut self, tag: Tag) -> Self\n\n where\n", "file_path": "asn1rs-model/src/model/mod.rs", "rank": 84, "score": 100415.35924609596 }, { "content": "pub fn convert_to_sql_with<F: AsRef<Path>, D: AsRef<Path>>(\n\n file: F,\n\n dir: D,\n\n mut generator: SqlGenerator,\n\n) -> Result<Vec<String>, Error> {\n\n let input = ::std::fs::read_to_string(file)?;\n\n let tokens = Tokenizer::default().parse(&input);\n\n let model = Model::try_from(tokens)?;\n\n\n\n generator.add_model(model.to_rust().to_sql());\n\n let output = generator.to_string()?;\n\n\n\n let mut files = Vec::new();\n\n for (file, content) in output {\n\n ::std::fs::write(dir.as_ref().join(&file), content)?;\n\n files.push(file);\n\n }\n\n Ok(files)\n\n}\n", "file_path": "src/converter.rs", "rank": 85, "score": 100333.60704084171 }, { "content": "pub fn convert_to_sql<F: AsRef<Path>, D: AsRef<Path>>(\n\n file: F,\n\n dir: D,\n\n) -> Result<Vec<String>, Error> {\n\n convert_to_sql_with(file, dir, SqlGenerator::default())\n\n}\n\n\n", "file_path": "src/converter.rs", "rank": 86, "score": 100333.60704084171 }, { "content": "pub fn convert_to_proto<F: AsRef<Path>, D: AsRef<Path>>(\n\n file: F,\n\n dir: D,\n\n) -> Result<Vec<String>, Error> {\n\n let input = ::std::fs::read_to_string(file)?;\n\n let tokens = Tokenizer::default().parse(&input);\n\n let model = Model::try_from(tokens)?;\n\n let mut generator = ProtobufGenerator::default();\n\n generator.add_model(model.to_rust().to_protobuf());\n\n let output = generator.to_string()?;\n\n\n\n let mut files = Vec::new();\n\n for (file, content) in output {\n\n ::std::fs::write(dir.as_ref().join(&file), content)?;\n\n files.push(file);\n\n }\n\n Ok(files)\n\n}\n\n\n", "file_path": "src/converter.rs", "rank": 87, "score": 100333.60704084171 }, { "content": "fn parse_choice(\n\n mut enm: syn::ItemEnum,\n\n asn: &AsnAttribute<DefinitionHeader>,\n\n asn_span: proc_macro2::Span,\n\n) -> Result<(Option<Definition<AsnModelType>>, Item), TokenStream> {\n\n enm.variants\n\n .iter()\n\n .find(|v| v.fields.is_empty())\n\n .map(|v| {\n\n compile_err_ts(\n\n v.span(),\n\n \"CHOICE does not allow any variant to not have data attached\",\n\n )\n\n })\n\n .transpose()?;\n\n\n\n let variants = enm\n\n .variants\n\n .iter_mut()\n\n .map(|v| {\n", "file_path": "asn1rs-model/src/ast/mod.rs", "rank": 88, "score": 99854.28273862283 }, { "content": "#[proc_macro_attribute]\n\npub fn asn(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n TokenStream::from(ast::parse(attr.into(), item.into()))\n\n}\n", "file_path": "asn1rs-macros/src/lib.rs", "rank": 89, "score": 97351.82715566375 }, { "content": "fn find_and_remove_first_asn_attribute(attributes: &mut Vec<Attribute>) -> Option<Attribute> {\n\n index_of_first_asn_attribute(&attributes[..]).map(|index| attributes.remove(index))\n\n}\n\n\n", "file_path": "asn1rs-model/src/ast/mod.rs", "rank": 90, "score": 96713.11416024991 }, { "content": "pub fn parse(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n if cfg!(feature = \"debug-proc-macro\") {\n\n println!();\n\n println!(\"---------- asn proc_macro_attribute parse call ----------\");\n\n println!(\"Attribute: {}\", attr.to_string());\n\n println!(\"Item: {}\", item.to_string());\n\n println!();\n\n }\n\n\n\n let (definition, item) = match parse_asn_definition(attr, item) {\n\n Ok(v) => v,\n\n Err(e) => return e,\n\n };\n\n\n\n if cfg!(feature = \"debug-proc-macro\") {\n\n println!(\"---------- parsed definition begin ----------\");\n\n println!(\"{:#?}\", definition);\n\n println!(\"---------- parsed definition end ----------\");\n\n println!();\n\n\n", "file_path": "asn1rs-model/src/ast/mod.rs", "rank": 91, "score": 95885.05323370143 }, { "content": "pub fn ident_or_literal_or_punct(a: Cursor<'_>) -> Option<(String, Cursor<'_>)> {\n\n a.ident()\n\n .map(|(a, b)| (a.to_string(), b))\n\n .or_else(|| a.literal().map(|(a, b)| (a.to_string(), b)))\n\n .or_else(|| a.punct().map(|(a, b)| (a.to_string(), b)))\n\n}\n", "file_path": "asn1rs-model/src/ast/range.rs", "rank": 92, "score": 95341.24278065213 }, { "content": "pub fn arg<'a>(values: [&'a str; 5], default: Option<&'a str>) -> Arg<'a, 'a> {\n\n let mut arg = Arg::with_name(values[0])\n\n .env(values[0])\n\n .value_name(values[1])\n\n .short(values[2])\n\n .long(values[3])\n\n //.help(values[4])\n\n .takes_value(true);\n\n\n\n if let Some(default) = default {\n\n arg = arg.default_value(default);\n\n }\n\n\n\n arg\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 93, "score": 94116.70716422479 }, { "content": "fn extract_attribute(attr: &str) -> TokenStream {\n\n const PREFIX: &str = \"#[asn(\";\n\n const SUFFIX: &str = \")]\";\n\n\n\n assert!(attr.starts_with(PREFIX));\n\n assert!(attr.ends_with(SUFFIX));\n\n let substr = attr.split_at(PREFIX.len()).1;\n\n let substr = substr.split_at(substr.len() - SUFFIX.len()).0;\n\n let attr: TokenStream = substr.parse().unwrap();\n\n attr\n\n}\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 94, "score": 88556.97199788831 }, { "content": "use asn1rs::prelude::*;\n\nuse asn1rs::syn::io::UperReader as NewUperReader;\n\nuse asn1rs::syn::io::UperWriter as NewUperWriter;\n\n\n\nasn_to_rust!(\n\n r\"BasicEnumerated DEFINITIONS AUTOMATIC TAGS ::=\n\n BEGIN\n\n \n\n Basic ::= ENUMERATED {\n\n abc,\n\n def,\n\n ghi\n\n }\n\n \n\n PredefinedNumbers ::= ENUMERATED {\n\n abc(0),\n\n def(5),\n\n ..., -- whatever reserved blubber comment\n\n ghi(8),\n\n jkl(9)\n\n }\n\n\n\n \n\n END\"\n\n);\n\n\n", "file_path": "tests/basic_enumerated.rs", "rank": 95, "score": 85880.3306096103 }, { "content": "fn impl_insert_fn_content<'a, I: ExactSizeIterator<Item = &'a (String, RustType)>>(\n\n is_tuple_struct: bool,\n\n on_self: bool,\n\n name: &str,\n\n fields: impl Fn() -> I,\n\n container: &mut impl Container,\n\n) {\n\n let mut params = Vec::default();\n\n let mut to_await = Vec::default();\n\n for insert in fields().filter_map(|(field_name, r_type)| {\n\n let field_name = RustCodeGenerator::rust_field_name(field_name, true);\n\n let field_name_as_variable = if field_name\n\n .chars()\n\n .next()\n\n .map(|c| c.is_numeric())\n\n .unwrap_or(false)\n\n {\n\n Some(format!(\"value_{}\", field_name))\n\n } else {\n\n None\n", "file_path": "asn1rs-model/src/gen/rust/async_psql.rs", "rank": 96, "score": 83880.46860734222 }, { "content": "fn parse_asn_map_to_rust_map_to_stringify_with_proc_macro_annotation_re_parse_check_equal(\n\n asn: &str,\n\n) {\n\n let tokens = Tokenizer::default().parse(asn);\n\n let asn_model = Model::try_from(tokens).unwrap();\n\n let rust_model = asn_model.to_rust();\n\n\n\n for definition in rust_model.definitions {\n\n let stringified = generate_rust_code_with_proc_macro_attributes(&definition);\n\n let mut lines = stringified.lines().map(str::trim).filter(|s| !s.is_empty());\n\n\n\n let attribute = extract_attribute(lines.next().unwrap());\n\n let body = lines\n\n .map(|l| l.to_string())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n .parse::<TokenStream>()\n\n .unwrap();\n\n\n\n println!(\"---\");\n", "file_path": "tests/proc_macro_reparse.rs", "rank": 97, "score": 82714.66563191378 }, { "content": "fn index_of_first_asn_attribute(attributes: &[Attribute]) -> Option<usize> {\n\n attributes.iter().enumerate().find_map(|(index, attr)| {\n\n attr.path\n\n .segments\n\n .first()\n\n .filter(|s| s.ident.to_string().eq(\"asn\"))\n\n .map(|_| index)\n\n })\n\n}\n\n\n", "file_path": "asn1rs-model/src/ast/mod.rs", "rank": 98, "score": 82564.33665141376 }, { "content": " assert_eq!(0, uper.bits_remaining());\n\n}\n\n\n\n/*\n\nBasicSchema DEFINITIONS AUTOMATIC TAGS ::=\n\nBEGIN\n\n Potato ::= SEQUENCE {\n\n size INTEGER,\n\n size2 INTEGER,\n\n size3 INTEGER(12..128),\n\n string Utf8String\n\n }\n\n\n\n Topping ::= ENUMERATED\n\n {\n\n not_pineapple,\n\n even_less_pineapple,\n\n no_pineapple_at_all\n\n }\n\n\n", "file_path": "tests/basic_proc_macro_attribute.rs", "rank": 99, "score": 80630.37976793275 } ]
Rust
src/lib.rs
lucviala/rusty-vjoy
0767102ac650b357f8677ce4556732711468cbc1
mod ffi; pub type VJDStat = ffi::VjdStat; pub type JoystickPosition = ffi::_JOYSTICK_POSITION_V2; #[repr(u32)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum HidUsage { X = 0x30, Y = 0x31, Z = 0x32, RX = 0x33, RY = 0x34, RZ = 0x35, SL0 = 0x36, SL1 = 0x37, WHL = 0x38, POV = 0x39, } pub fn get_vjoy_version() -> i16 { unsafe { ffi::GetvJoyVersion() } } pub fn vjoy_enabled() -> bool { unsafe { ffi::vJoyEnabled() != 0 } } pub fn get_vjoy_product_string() -> String { unsafe { let ptr = ffi::GetvJoyProductString(); let mut string = String::with_capacity(23); for i in 0..46 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr as *mut i8); string.push_str(c_str.to_str().unwrap()); } string } } pub fn get_vjoy_manufacturer_string() -> String { unsafe { let ptr = ffi::GetvJoyManufacturerString(); let mut string = String::with_capacity(16); for i in 0..32 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr); string.push_str(c_str.to_str().unwrap()); } string } } pub fn get_vjoy_serial_number_string() -> String { unsafe { let ptr = ffi::GetvJoySerialNumberString(); let mut string = String::with_capacity(5); for i in 0..10 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr as *mut i8); string.push_str(c_str.to_str().unwrap()); } string } } pub fn driver_match() -> (bool, u16, u16) { let mut dll_ver = 0; let mut driver_ver = 0; unsafe { let status = ffi::DriverMatch(&mut dll_ver, &mut driver_ver) != 0; (status, dll_ver, driver_ver) } } pub fn get_vjoy_max_devices() -> i32 { let mut number = 0; unsafe { if ffi::GetvJoyMaxDevices(&mut number) != 0 { number } else { 0 } } } pub fn get_number_existing_vjd() -> i32 { let mut number = 0; unsafe { if ffi::GetNumberExistingVJD(&mut number) != 0 { number } else { 0 } } } pub fn get_vjd_button_number(id: u32) -> i32 { unsafe { ffi::GetVJDButtonNumber(id) } } pub fn get_vjd_disc_pov_number(id: u32) -> i32 { unsafe { ffi::GetVJDDiscPovNumber(id) } } pub fn get_vjd_cont_pov_number(id: u32) -> i32 { unsafe { ffi::GetVJDContPovNumber(id) } } pub fn get_vjd_axis_exist(id: u32, usage: HidUsage) -> bool { unsafe { ffi::GetVJDAxisExist(id, usage as u32) != 0 } } pub fn get_vjd_axis_max(id: u32, usage: HidUsage) -> i32 { let mut max = 0; unsafe { if ffi::GetVJDAxisMax(id, usage as u32, &mut max) != 0 { max } else { 0 } } } pub fn get_vjd_axis_min(id: u32, usage: HidUsage) -> i32 { let mut min = 0; unsafe { if ffi::GetVJDAxisMin(id, usage as u32, &mut min) != 0 { min } else { 0 } } } pub fn get_vjd_status(id: u32) -> VJDStat { unsafe { ffi::GetVJDStatus(id) } } pub fn is_vjd_exists(id: u32) -> bool { unsafe { ffi::isVJDExists(id) != 0 } } pub fn get_owner_pid(id: u32) -> i32 { unsafe { ffi::GetOwnerPid(id) } } pub fn acquire_vjd(id: u32) -> bool { unsafe { ffi::AcquireVJD(id) != 0 } } pub fn relinquish_vjd(id: u32) { unsafe { ffi::RelinquishVJD(id); } } pub fn update_vjd(id: u32, data: *mut JoystickPosition) { unsafe { ffi::UpdateVJD(id, data as *mut std::ffi::c_void); } } pub fn reset_vjd(id: u32) { unsafe { ffi::ResetVJD(id); } } pub fn reset_all() { unsafe { ffi::ResetAll(); } } pub fn reset_buttons(id: u32) { unsafe { ffi::ResetButtons(id); } } pub fn reset_povs(id: u32) { unsafe { ffi::ResetPovs(id); } } pub fn set_axis(value: i32, id: u32, axis: HidUsage) { unsafe { ffi::SetAxis(value, id, axis as u32); } } pub fn set_btn(value: i32, id: u32, n_btn: u8) { unsafe { ffi::SetBtn(value, id, n_btn); } } pub fn set_disc_pov(value: i32, id: u32, n_pov: u8) { unsafe { ffi::SetDiscPov(value, id, n_pov); } } pub fn set_cont_pov(value: u32, id: u32, n_pov: u8) { unsafe { ffi::SetContPov(value, id, n_pov); } } pub enum VjoyError { InitializationError, OpenVjoyDeviceError, } pub struct VjoyApi; impl VjoyApi { pub fn new() -> Result<Self, VjoyError> { if vjoy_enabled() && driver_match().0 { Ok(Self {}) } else { Err(VjoyError::InitializationError) } } pub fn acquire_device(id: u32) -> Result<VjoyDevice, VjoyError> { if id > 16 { return Err(VjoyError::OpenVjoyDeviceError); } Ok(VjoyDevice::new(id)?) } } pub struct VjoyDevice { pub id: u32, } impl VjoyDevice { fn new(id: u32) -> Result<Self, VjoyError> { match get_vjd_status(id) { VJDStat::VjdStatOwned => return Ok(Self { id }), VJDStat::VjdStatFree => { if acquire_vjd(id) { return Ok(Self { id }); } } _ => {} } Err(VjoyError::OpenVjoyDeviceError) } } impl Drop for VjoyDevice { fn drop(&mut self) { relinquish_vjd(self.id); } }
mod ffi; pub type VJDStat = ffi::VjdStat; pub type JoystickPosition = ffi::_JOYSTICK_POSITION_V2; #[repr(u32)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum HidUsage { X = 0x30, Y = 0x31, Z = 0x32, RX = 0x33, RY = 0x34, RZ = 0x35, SL0 = 0x36, SL1 = 0x37, WHL = 0x38, POV = 0x39, } pub fn get_vjoy_version() -> i16 { unsafe { ffi::GetvJoyVersion() } } pub fn vjoy_enabled() -> bool { unsafe { ffi::vJoyEnabled() != 0 } } pub fn get_vjoy_product_string() -> String { unsafe { let ptr = ffi::GetvJoyProductString(); let mut string = String::with_capacity(23); for i in 0..46 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr as *mut i8); string.push_str(c_str.to_str().unwrap()); } string } } pub fn get_vjoy_manufacturer_string() -> String { unsafe { let ptr = ffi::GetvJoyManufacturerString(); let mut string = String::with_capacity(16); for i in 0..32 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr); string.push_str(c_str.to_str().unwrap()); } string } } pub fn get_vjoy_serial_number_string() -> String { unsafe { let ptr = ffi::GetvJoySerialNumberString(); let mut string = String::with_capacity(5); for i in 0..10 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr as *mut i8); string.push_str(c_str.to_str().unwrap()); } string } } pub fn driver_match() -> (bool, u16, u16) { let mut dll_ver = 0; let mut driver_ver = 0; unsafe { let status = ffi::DriverMatch(&mut dll_ver, &mut driver_ver) != 0; (status, dll_ver, driver_ver) } } pub fn g
} } } pub fn get_number_existing_vjd() -> i32 { let mut number = 0; unsafe { if ffi::GetNumberExistingVJD(&mut number) != 0 { number } else { 0 } } } pub fn get_vjd_button_number(id: u32) -> i32 { unsafe { ffi::GetVJDButtonNumber(id) } } pub fn get_vjd_disc_pov_number(id: u32) -> i32 { unsafe { ffi::GetVJDDiscPovNumber(id) } } pub fn get_vjd_cont_pov_number(id: u32) -> i32 { unsafe { ffi::GetVJDContPovNumber(id) } } pub fn get_vjd_axis_exist(id: u32, usage: HidUsage) -> bool { unsafe { ffi::GetVJDAxisExist(id, usage as u32) != 0 } } pub fn get_vjd_axis_max(id: u32, usage: HidUsage) -> i32 { let mut max = 0; unsafe { if ffi::GetVJDAxisMax(id, usage as u32, &mut max) != 0 { max } else { 0 } } } pub fn get_vjd_axis_min(id: u32, usage: HidUsage) -> i32 { let mut min = 0; unsafe { if ffi::GetVJDAxisMin(id, usage as u32, &mut min) != 0 { min } else { 0 } } } pub fn get_vjd_status(id: u32) -> VJDStat { unsafe { ffi::GetVJDStatus(id) } } pub fn is_vjd_exists(id: u32) -> bool { unsafe { ffi::isVJDExists(id) != 0 } } pub fn get_owner_pid(id: u32) -> i32 { unsafe { ffi::GetOwnerPid(id) } } pub fn acquire_vjd(id: u32) -> bool { unsafe { ffi::AcquireVJD(id) != 0 } } pub fn relinquish_vjd(id: u32) { unsafe { ffi::RelinquishVJD(id); } } pub fn update_vjd(id: u32, data: *mut JoystickPosition) { unsafe { ffi::UpdateVJD(id, data as *mut std::ffi::c_void); } } pub fn reset_vjd(id: u32) { unsafe { ffi::ResetVJD(id); } } pub fn reset_all() { unsafe { ffi::ResetAll(); } } pub fn reset_buttons(id: u32) { unsafe { ffi::ResetButtons(id); } } pub fn reset_povs(id: u32) { unsafe { ffi::ResetPovs(id); } } pub fn set_axis(value: i32, id: u32, axis: HidUsage) { unsafe { ffi::SetAxis(value, id, axis as u32); } } pub fn set_btn(value: i32, id: u32, n_btn: u8) { unsafe { ffi::SetBtn(value, id, n_btn); } } pub fn set_disc_pov(value: i32, id: u32, n_pov: u8) { unsafe { ffi::SetDiscPov(value, id, n_pov); } } pub fn set_cont_pov(value: u32, id: u32, n_pov: u8) { unsafe { ffi::SetContPov(value, id, n_pov); } } pub enum VjoyError { InitializationError, OpenVjoyDeviceError, } pub struct VjoyApi; impl VjoyApi { pub fn new() -> Result<Self, VjoyError> { if vjoy_enabled() && driver_match().0 { Ok(Self {}) } else { Err(VjoyError::InitializationError) } } pub fn acquire_device(id: u32) -> Result<VjoyDevice, VjoyError> { if id > 16 { return Err(VjoyError::OpenVjoyDeviceError); } Ok(VjoyDevice::new(id)?) } } pub struct VjoyDevice { pub id: u32, } impl VjoyDevice { fn new(id: u32) -> Result<Self, VjoyError> { match get_vjd_status(id) { VJDStat::VjdStatOwned => return Ok(Self { id }), VJDStat::VjdStatFree => { if acquire_vjd(id) { return Ok(Self { id }); } } _ => {} } Err(VjoyError::OpenVjoyDeviceError) } } impl Drop for VjoyDevice { fn drop(&mut self) { relinquish_vjd(self.id); } }
et_vjoy_max_devices() -> i32 { let mut number = 0; unsafe { if ffi::GetvJoyMaxDevices(&mut number) != 0 { number } else { 0
function_block-random_span
[ { "content": "fn main() {\n\n //https://doc.rust-lang.org/rustc/command-line-arguments.html\n\n println!(r\"cargo:rustc-link-search=C:\\Program Files\\vJoy\\x64\");\n\n println!(\"cargo:rustc-link-lib=vJoyInterface\");\n\n\n\n //https://rust-lang.github.io/rust-bindgen\n\n let bindings = bindgen::Builder::default()\n\n .header(\"wrapper.h\")\n\n .clang_args(&[\"-x\", \"c++\"])\n\n //\n\n .allowlist_function(\"GetvJoyVersion\")\n\n .allowlist_function(\"vJoyEnabled\")\n\n .allowlist_function(\"GetvJoyProductString\")\n\n .allowlist_function(\"GetvJoyManufacturerString\")\n\n .allowlist_function(\"GetvJoySerialNumberString\")\n\n .allowlist_function(\"DriverMatch\")\n\n .allowlist_function(\"RegisterRemovalCB\")\n\n .allowlist_function(\"vJoyFfbCap\")\n\n .allowlist_function(\"GetvJoyMaxDevices\")\n\n .allowlist_function(\"GetNumberExistingVJD\")\n", "file_path": "build.rs", "rank": 28, "score": 26024.94800977271 }, { "content": " pub fn vJoyEnabled() -> BOOL;\n\n\n\n pub fn GetvJoyProductString() -> *mut std::os::raw::c_char;\n\n\n\n pub fn GetvJoyManufacturerString() -> *mut std::os::raw::c_char;\n\n\n\n pub fn GetvJoySerialNumberString() -> *mut std::os::raw::c_char;\n\n\n\n pub fn DriverMatch(DllVer: *mut WORD, DrvVer: *mut WORD) -> BOOL;\n\n\n\n pub fn GetvJoyMaxDevices(n: *mut ::std::os::raw::c_int) -> BOOL;\n\n\n\n pub fn GetNumberExistingVJD(n: *mut ::std::os::raw::c_int) -> BOOL;\n\n\n\n pub fn GetVJDButtonNumber(rID: UINT) -> ::std::os::raw::c_int;\n\n\n\n pub fn GetVJDDiscPovNumber(rID: UINT) -> ::std::os::raw::c_int;\n\n\n\n pub fn GetVJDContPovNumber(rID: UINT) -> ::std::os::raw::c_int;\n\n\n", "file_path": "src/ffi.rs", "rank": 29, "score": 21479.908696498867 }, { "content": " pub fn ResetAll();\n\n\n\n pub fn ResetButtons(rID: UINT) -> BOOL;\n\n\n\n pub fn ResetPovs(rID: UINT) -> BOOL;\n\n\n\n pub fn SetAxis(Value: LONG, rID: UINT, Axis: UINT) -> BOOL;\n\n\n\n pub fn SetBtn(Value: BOOL, rID: UINT, nBtn: UCHAR) -> BOOL;\n\n\n\n pub fn SetDiscPov(Value: ::std::os::raw::c_int, rID: UINT, nPov: UCHAR) -> BOOL;\n\n\n\n pub fn SetContPov(Value: DWORD, rID: UINT, nPov: UCHAR) -> BOOL;\n\n}\n", "file_path": "src/ffi.rs", "rank": 30, "score": 21478.49583392214 }, { "content": " pub bHatsEx3: DWORD,\n\n pub lButtonsEx1: LONG,\n\n pub lButtonsEx2: LONG,\n\n pub lButtonsEx3: LONG,\n\n}\n\n\n\n#[repr(i32)]\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub enum VjdStat {\n\n VjdStatOwned = 0,\n\n VjdStatFree = 1,\n\n VjdStatBusy = 2,\n\n VjdStatMissing = 3,\n\n VjdStatUnknown = 4,\n\n}\n\n\n\n#[link(name = \"vJoyInterface\")]\n\nextern \"C\" {\n\n pub fn GetvJoyVersion() -> SHORT;\n\n\n", "file_path": "src/ffi.rs", "rank": 31, "score": 21478.252081056085 }, { "content": "#![allow(non_upper_case_globals)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n\n\npub type UCHAR = ::std::os::raw::c_uchar;\n\npub type DWORD = ::std::os::raw::c_ulong;\n\npub type BOOL = ::std::os::raw::c_int;\n\npub type BYTE = ::std::os::raw::c_uchar;\n\npub type WORD = ::std::os::raw::c_ushort;\n\npub type UINT = ::std::os::raw::c_uint;\n\npub type PVOID = *mut ::std::os::raw::c_void;\n\npub type SHORT = ::std::os::raw::c_short;\n\npub type LONG = ::std::os::raw::c_long;\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct _JOYSTICK_POSITION_V2 {\n\n pub bDevice: BYTE,\n\n pub wThrottle: LONG,\n\n pub wRudder: LONG,\n", "file_path": "src/ffi.rs", "rank": 32, "score": 21477.4763197094 }, { "content": " pub fn GetVJDAxisExist(rID: UINT, Axis: UINT) -> BOOL;\n\n\n\n pub fn GetVJDAxisMax(rID: UINT, Axis: UINT, Max: *mut LONG) -> BOOL;\n\n\n\n pub fn GetVJDAxisMin(rID: UINT, Axis: UINT, Min: *mut LONG) -> BOOL;\n\n\n\n pub fn GetVJDStatus(rID: UINT) -> VjdStat;\n\n\n\n pub fn isVJDExists(rID: UINT) -> BOOL;\n\n\n\n pub fn GetOwnerPid(rID: UINT) -> ::std::os::raw::c_int;\n\n\n\n pub fn AcquireVJD(rID: UINT) -> BOOL;\n\n\n\n pub fn RelinquishVJD(rID: UINT);\n\n\n\n pub fn UpdateVJD(rID: UINT, pData: PVOID) -> BOOL;\n\n\n\n pub fn ResetVJD(rID: UINT) -> BOOL;\n\n\n", "file_path": "src/ffi.rs", "rank": 33, "score": 21477.257383363765 }, { "content": " pub wAileron: LONG,\n\n pub wAxisX: LONG,\n\n pub wAxisY: LONG,\n\n pub wAxisZ: LONG,\n\n pub wAxisXRot: LONG,\n\n pub wAxisYRot: LONG,\n\n pub wAxisZRot: LONG,\n\n pub wSlider: LONG,\n\n pub wDial: LONG,\n\n pub wWheel: LONG,\n\n pub wAxisVX: LONG,\n\n pub wAxisVY: LONG,\n\n pub wAxisVZ: LONG,\n\n pub wAxisVBRX: LONG,\n\n pub wAxisVBRY: LONG,\n\n pub wAxisVBRZ: LONG,\n\n pub lButtons: LONG,\n\n pub bHats: DWORD,\n\n pub bHatsEx1: DWORD,\n\n pub bHatsEx2: DWORD,\n", "file_path": "src/ffi.rs", "rank": 34, "score": 21476.76233465794 }, { "content": " .allowlist_function(\"SetAxis\")\n\n .allowlist_function(\"SetBtn\")\n\n .allowlist_function(\"SetDiscPov\")\n\n .allowlist_function(\"SetContPov\")\n\n //\n\n .allowlist_var(\"DEVICENAME_STRING\")\n\n .allowlist_var(\"NTDEVICE_NAME_STRING\")\n\n .allowlist_var(\"SYMBOLIC_NAME_STRING\")\n\n .allowlist_var(\"VER_X_\")\n\n .allowlist_var(\"VER_H_\")\n\n .allowlist_var(\"VER_M_\")\n\n .allowlist_var(\"VER_L_\")\n\n .allowlist_var(\"DOS_FILE_NAME\")\n\n .allowlist_var(\"VJOY_INTERFACE\")\n\n .allowlist_var(\"VENDOR_N_ID\")\n\n .allowlist_var(\"PRODUCT_N_ID\")\n\n .allowlist_var(\"VERSION_N\")\n\n .allowlist_var(\"VENDOR_STR_ID\")\n\n .allowlist_var(\"PRODUCT_STR_ID\")\n\n .allowlist_var(\"F_.*\")\n", "file_path": "build.rs", "rank": 36, "score": 3.60364969075392 }, { "content": " .allowlist_var(\"HID_USAGE_.*\")\n\n .allowlist_var(\"HID_ID_.*\")\n\n //\n\n .allowlist_type(\"HID_DEVICE_ATTRIBUTES\")\n\n .allowlist_type(\"JOYSTICK_POSITION\")\n\n .allowlist_type(\"JOYSTICK_POSITION_V2\")\n\n .allowlist_type(\"DEVCTRLS\")\n\n .allowlist_type(\"DeviceStat\")\n\n .allowlist_type(\"DEV_INFO\")\n\n //\n\n .rustified_enum(\"VjdStat\")\n\n //\n\n .generate_comments(false)\n\n .layout_tests(false)\n\n .generate()\n\n .expect(\"Unable to generate bindings\");\n\n\n\n // Write the bindings to the $OUT_DIR/bindings.rs file.\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n\n\n bindings\n\n .write_to_file(out_path.join(\"bindings.rs\"))\n\n //.write_to_file(Path::new(\"../src\").join(\"bindings.rs\"))\n\n .expect(\"Couldn't write to file\");\n\n}\n", "file_path": "build.rs", "rank": 37, "score": 3.0994009986045583 }, { "content": " //\n\n .allowlist_function(\"GetVJDButtonNumber\")\n\n .allowlist_function(\"GetVJDDiscPovNumber\")\n\n .allowlist_function(\"GetVJDContPovNumber\")\n\n .allowlist_function(\"GetVJDAxisExist\")\n\n .allowlist_function(\"GetVJDAxisMax\")\n\n .allowlist_function(\"GetVJDAxisMin\")\n\n .allowlist_function(\"GetVJDStatus\")\n\n .allowlist_function(\"isVJDExists\")\n\n .allowlist_function(\"GetOwnerPid\")\n\n //\n\n .allowlist_function(\"AcquireVJD\")\n\n .allowlist_function(\"RelinquishVJD\")\n\n .allowlist_function(\"UpdateVJD\")\n\n //\n\n .allowlist_function(\"ResetVJD\")\n\n .allowlist_function(\"ResetAll\")\n\n .allowlist_function(\"ResetButtons\")\n\n .allowlist_function(\"ResetPovs\")\n\n //\n", "file_path": "build.rs", "rank": 40, "score": 1.583497761451122 } ]
Rust
src/parse/bam.rs
devsebb/ImmunoGenotyper
47be20f747e0e7541209be648e01aecf26433565
use debruijn::dna_string::DnaString; use rust_htslib::{bam, bam::record::Aux, bam::Read, bam::Reader}; pub struct UMIReader { reader: bam::Reader, pub current_umi_group: Vec<DnaString>, pub current_umi: String, pub current_cell_barcode: String, pub next_umi_group: Vec<DnaString>, next_umi: String } impl UMIReader { pub fn new(file_path: &str) -> UMIReader { UMIReader { reader: Reader::from_path(file_path).unwrap(), current_umi_group: Vec::new(), current_umi: String::new(), current_cell_barcode: String::new(), next_umi_group: Vec::new(), next_umi: String::new() } } pub fn next(&mut self) -> bool { let mut final_umi = false; if self.get_umi_from_bam().is_none() { final_umi = true; } final_umi } fn get_umi_from_bam(&mut self) -> Option<bool> { self.current_umi_group = self.next_umi_group.clone(); self.current_umi = self.next_umi.clone(); self.current_cell_barcode.clear(); self.next_umi_group.clear(); self.next_umi.clear(); for r in self.reader.records() { let record = r.unwrap(); let read_umi = if let Ok(Aux::String(s)) = record.aux(b"UR") { s.to_owned() } else { panic!("Error -- Could not read UMI, internal error."); }; let current_cell_barcode = if let Ok(Aux::String(s)) = record.aux(b"CR") { s.to_owned() } else { panic!("Error -- Could not read cell barcode, internal error."); }; if self.current_umi == "" { self.current_umi = read_umi.clone(); } let seq = UMIReader::strip_nonbio_regions(&record.seq().as_bytes()[..]); if self.current_umi == read_umi { self.current_umi_group .push(seq); self.current_cell_barcode = current_cell_barcode.clone(); } else { self.next_umi_group .push(seq); self.next_umi = read_umi.clone(); return Some(true); } } None } fn strip_nonbio_regions(seq: &[u8]) -> DnaString { let seq = String::from_utf8(seq.to_owned()).unwrap(); let mut tso_idx = seq.find("TTTCTTATATGGG"); if tso_idx.is_none() { tso_idx = seq.find("AAAGAATATACCC"); }; let seq = if tso_idx.is_some() { String::from_utf8(seq.as_bytes()[tso_idx.unwrap()+13..].to_vec()).unwrap() } else { seq }; let poly_t_tail_idx = seq.find("TTTTTTTTTTTTTTTTTTTTTTTTT"); let seq = if poly_t_tail_idx.is_some() { String::from_utf8(seq.as_bytes()[..poly_t_tail_idx.unwrap()].to_vec()).unwrap() } else { seq }; let mut reverse_primer_idx = seq.find("GTACTCTGCGTTGATACCACTGCTT"); if reverse_primer_idx.is_none() { reverse_primer_idx = seq.find("CATGAGACGCAACTATGGTGACGAA"); }; let seq = if reverse_primer_idx.is_some() { String::from_utf8(seq.as_bytes()[..reverse_primer_idx.unwrap()].to_vec()).unwrap() } else { seq }; DnaString::from_dna_string(&seq) } }
use debruijn::dna_string::DnaString; use rust_htslib::{bam, bam::record::Aux, bam::Read, bam::Reader}; pub struct UMIReader { reader: bam::Reader, pub current_umi_group: Vec<DnaString>, pub current_umi: String, pub current_cell_barcode: String, pub next_umi_group: Vec<DnaString>, next_umi: String } impl UMIReader { pub fn new(file_path: &str) -> UMIReader { UMIReader { reader: Reader::from_path(file_path).unwrap(), current_umi_group: Vec::new(), current_umi: String::new(), current_cell_barcode: String::new(), next_umi_group: Vec::new(), next_umi: String::new() } } pub fn next(&mut self) -> bool { let mut final_umi = false; if self.get_umi_from_bam().is_none() { final_umi = true; } final_umi } fn get_umi_from_bam(&mut self) -> Option<bool> { self.current_umi_group = self.next_umi_group.clone(); self.current_umi = self.next_umi.clone(); self.current_cell_barcode.clear(); self.next_umi_group.clear(); self.next_umi.clear(); for r in self.reader.records() { let record = r.unwrap(); let read_umi = if let Ok(Aux::String(s)) = record.aux(b"UR") { s.to_owned() } else { panic!("Error -- Could not read UMI, internal error."); }; let current_cell_barcode = if let Ok(Aux::String(s)) = record.aux(b"CR") { s.to_owned() } else { panic!("Error -- Could not read cell barcode, internal error."); }; if self.current_umi == "" { self.current_umi = read_umi.clone(); } let seq = UMIReader::strip_nonbio_regions(&record.seq().as_bytes()[..]); if self.current_umi == read_umi { self.current_umi_group .push(seq); self.cur
fn strip_nonbio_regions(seq: &[u8]) -> DnaString { let seq = String::from_utf8(seq.to_owned()).unwrap(); let mut tso_idx = seq.find("TTTCTTATATGGG"); if tso_idx.is_none() { tso_idx = seq.find("AAAGAATATACCC"); }; let seq = if tso_idx.is_some() { String::from_utf8(seq.as_bytes()[tso_idx.unwrap()+13..].to_vec()).unwrap() } else { seq }; let poly_t_tail_idx = seq.find("TTTTTTTTTTTTTTTTTTTTTTTTT"); let seq = if poly_t_tail_idx.is_some() { String::from_utf8(seq.as_bytes()[..poly_t_tail_idx.unwrap()].to_vec()).unwrap() } else { seq }; let mut reverse_primer_idx = seq.find("GTACTCTGCGTTGATACCACTGCTT"); if reverse_primer_idx.is_none() { reverse_primer_idx = seq.find("CATGAGACGCAACTATGGTGACGAA"); }; let seq = if reverse_primer_idx.is_some() { String::from_utf8(seq.as_bytes()[..reverse_primer_idx.unwrap()].to_vec()).unwrap() } else { seq }; DnaString::from_dna_string(&seq) } }
rent_cell_barcode = current_cell_barcode.clone(); } else { self.next_umi_group .push(seq); self.next_umi = read_umi.clone(); return Some(true); } } None }
function_block-function_prefixed
[ { "content": "// Takes a reader and returns a csv reader that wraps it, configures to use tab delimiters\n\npub fn get_tsv_reader<R: Read>(reader: R) -> Reader<R> {\n\n csv::ReaderBuilder::new()\n\n .delimiter(b'\\t')\n\n .from_reader(reader)\n\n}\n\n\n\n/* Takes a reference to the ReferenceMetadata structure.\n\n * Produces 3 vectors of sequence-name pairs. Panics if there is a reference sequence that cannot be read.\n\n * If they can be read, converts the given sequence to a DnaString and get the associated name. */\n", "file_path": "src/utils.rs", "rank": 0, "score": 177479.78604027108 }, { "content": "// Determine if a file is a .bam or a .fasta based on file extension\n\npub fn is_fastq(file: &str) -> bool {\n\n let mut is_fasta = true;\n\n let components = file.split(\".\").skip(1);\n\n\n\n for component in components {\n\n if component == \"bam\" {\n\n is_fasta = false;\n\n }\n\n }\n\n\n\n is_fasta\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 1, "score": 129467.40470360385 }, { "content": "// Write the given vector of scores to a TSV file\n\npub fn write_to_tsv(results: Vec<(Vec<String>, i32)>, group_row: Option<Vec<String>>, write_header: bool, output_path: &str) {\n\n let mut str_rep = String::new();\n\n\n\n // Add the headers to the top of the string representation of the tsv file\n\n if write_header {\n\n str_rep += \"ambiguity class\\tscore\";\n\n\n\n match group_row {\n\n Some(ref _s) => {\n\n str_rep += \"\\t\";\n\n str_rep += \"cell barcode\";\n\n },\n\n None => ()\n\n }\n\n\n\n str_rep += \"\\n\";\n\n }\n\n\n\n let group_row_iter = match group_row {\n\n Some(ref vec) => vec.clone(),\n", "file_path": "src/utils.rs", "rank": 2, "score": 111980.11595647852 }, { "content": "pub fn sort_score_vector(mut scores: Vec<(Vec<String>, i32)>) -> Vec<(Vec<String>, i32)> {\n\n scores.sort_by(|a, b| a.0.cmp(&b.0));\n\n scores\n\n}\n", "file_path": "tests/utils.rs", "rank": 3, "score": 102668.98385963627 }, { "content": "// Take a score vector produced by utils::convert_scores_to_percentage() and sort them by name\n\npub fn sort_score_vector(mut scores: Vec<(Vec<String>, i32)>) -> Vec<(Vec<String>, i32)> {\n\n scores.sort_by(|a, b| a.0.cmp(&b.0));\n\n scores\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 4, "score": 102668.98385963627 }, { "content": "// Takes the path to a fastq.gz file and returns an error-checked iterator of the DnaStrings of the file\n\npub fn get_error_checked_fastq_readers(\n\n file_path: &str,\n\n) -> (\n\n Box<dyn Iterator<Item = Result<DnaString, Error>>>,\n\n Box<dyn Iterator<Item = Result<DnaString, Error>>>,\n\n) {\n\n (\n\n get_error_checked_fastq_reader(file_path),\n\n get_error_checked_fastq_reader(file_path),\n\n )\n\n}\n\n\n", "file_path": "src/parse/fastq.rs", "rank": 5, "score": 96292.36924294558 }, { "content": "// Convert a given serde_json value into a string array if possible, and crash otherwise\n\nfn to_string_vec(v: &Value, array_name: &str) -> Vec<String> {\n\n let result: Vec<String> = unwrap!(\n\n v.as_array(),\n\n \"Error -- could not parse {} as array\",\n\n array_name\n\n )\n\n .iter()\n\n .map(|string| {\n\n unwrap!(\n\n string.as_str(),\n\n \"Error -- could not parse {} element \\\"{}\\\" as a string\",\n\n array_name,\n\n string\n\n )\n\n .to_string()\n\n })\n\n .collect();\n\n\n\n result\n\n}\n", "file_path": "src/reference_library.rs", "rank": 6, "score": 89096.68599664315 }, { "content": "// Takes a result vector from the filtration pipeline and returns all results above a given score threshold\n\npub fn threshold_percentage(scores: Vec<(String, f32)>, threshold: f32) -> Vec<(String, f32)> {\n\n let mut results = Vec::new();\n\n\n\n for (name, score) in scores {\n\n if score >= threshold {\n\n results.push((name, score));\n\n }\n\n }\n\n\n\n results\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n // Tests for threshold_percentage\n\n // Case where the threshold is 0.0 -- nothing gets thresholded out\n\n #[test]\n\n fn threshold_percentage_no_threshold() {\n\n let scores = vec![\n\n (String::from(\"name1\"), 50.5),\n", "file_path": "src/filter/report.rs", "rank": 7, "score": 79773.18219746376 }, { "content": "// Given a column header, find the index of the corresponding column if it exists\n\nfn get_column_index(headers: &[String], search_header: &str) -> Option<usize> {\n\n for (i, header) in headers.iter().enumerate() {\n\n if header == search_header {\n\n return Some(i);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/reference_library.rs", "rank": 8, "score": 77639.4503380468 }, { "content": "pub fn filter_scores(reference_scores: Vec<(Vec<String>, i32)>, score_filter: &i32) -> Vec<(Vec<String>, i32)> {\n\n // Remove scores below the score threshold\n\n let reference_scores: Vec<(Vec<String>, i32)> = reference_scores\n\n .into_iter()\n\n .filter(|(_, val)| val > score_filter)\n\n .collect();\n\n\n\n reference_scores\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn is_fasta_short() {\n\n let expected_results = true;\n\n let results = super::is_fastq(\"reference.fastq\");\n\n assert_eq!(results, expected_results);\n\n }\n\n\n\n #[test]\n", "file_path": "src/utils.rs", "rank": 9, "score": 73353.08413915109 }, { "content": "pub fn score<'a>(\n\n sequence_iter_pair: (\n\n Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n ),\n\n reverse_sequence_iter_pair: Option<(\n\n Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n )>,\n\n index_pair: &(PseudoAligner, PseudoAligner),\n\n reference_metadata: &ReferenceMetadata,\n\n config: &AlignFilterConfig,\n\n debug_info: Option<&mut AlignDebugInfo>\n\n) -> Vec<(Vec<String>, i32)> {\n\n let (index_forward, index_backward) = index_pair;\n\n let (sequences, sequences_2) = sequence_iter_pair;\n\n let (reverse_sequences, reverse_sequences_2) = match reverse_sequence_iter_pair {\n\n Some((l, r)) => (Some(l), Some(r)),\n\n None => (None, None),\n\n };\n", "file_path": "src/align.rs", "rank": 10, "score": 64853.411956932076 }, { "content": "pub fn score<'a>(\n\n sequences: (\n\n Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n ),\n\n reverse_sequences: Option<(\n\n Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n )>,\n\n reference_index: &(align::PseudoAligner, align::PseudoAligner),\n\n reference_metadata: &ReferenceMetadata,\n\n align_config: &align::AlignFilterConfig,\n\n debug_info: Option<&mut align::AlignDebugInfo>\n\n) -> Vec<(Vec<String>, i32)> {\n\n // Perform filtered pseudoalignment\n\n let reference_scores = align::score(\n\n sequences,\n\n reverse_sequences,\n\n reference_index,\n\n reference_metadata,\n\n align_config,\n\n debug_info\n\n );\n\n\n\n utils::sort_score_vector(reference_scores)\n\n}\n", "file_path": "src/score.rs", "rank": 11, "score": 64853.411956932076 }, { "content": "pub fn get_data(\n\n seq_filename: &str,\n\n lib_filename: &str,\n\n) -> (\n\n (\n\n Box<dyn Iterator<Item = Result<DnaString, Error>>>,\n\n Box<dyn Iterator<Item = Result<DnaString, Error>>>,\n\n ),\n\n (align::PseudoAligner, align::PseudoAligner),\n\n reference_library::ReferenceMetadata,\n\n align::AlignFilterConfig,\n\n) {\n\n let mut data_path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n data_path.push(\"tests/test-sequences\");\n\n\n\n let mut library = data_path.clone();\n\n\n\n library.push(\"libraries/\");\n\n library.push(lib_filename);\n\n\n", "file_path": "tests/utils.rs", "rank": 12, "score": 64420.48152275062 }, { "content": "pub fn process(\n\n input_files: Vec<&str>,\n\n reference_index: &(PseudoAligner, PseudoAligner),\n\n reference_metadata: &ReferenceMetadata,\n\n align_config: &AlignFilterConfig,\n\n output_path: &str,\n\n debug_file: Option<String>\n\n) {\n\n /* Get error-checked iterators to the sequences that will be aligned to the reference from the\n\n * sequence genome file(s) */\n\n let sequences = get_error_checked_fastq_readers(input_files[0]);\n\n\n\n // Only get reverse sequences if a reverse sequence file is provided\n\n let reverse_sequences = if input_files.len() > 1 {\n\n println!(\"Reading reverse sequences\");\n\n Some(get_error_checked_fastq_readers(input_files[1]))\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "src/process/fastq.rs", "rank": 13, "score": 64420.48152275062 }, { "content": "pub fn process(\n\n input_files: Vec<&str>,\n\n reference_index: &(PseudoAligner, PseudoAligner),\n\n reference_metadata: &ReferenceMetadata,\n\n align_config: &AlignFilterConfig,\n\n output_path: &str,\n\n debug_file: Option<String>\n\n) {\n\n let mut reader = bam::UMIReader::new(input_files[0]);\n\n let mut score_map: HashMap<Vec<String>, (i32, String)> = HashMap::new();\n\n let mut cell_barcodes: Vec<String> = Vec::new();\n\n\n\n let owned_debug_file = if debug_file.is_some() {\n\n debug_file.unwrap()\n\n } else {\n\n \"\".to_owned()\n\n };\n\n\n\n let mut debug_info: AlignDebugInfo = Default::default();\n\n\n", "file_path": "src/process/bam.rs", "rank": 14, "score": 64420.48152275062 }, { "content": "fn get_error_checked_fastq_reader(\n\n file_path: &str,\n\n) -> Box<dyn Iterator<Item = Result<DnaString, Error>>> {\n\n let (reader, _) = unwrap!(\n\n niffler::from_path(path::Path::new(file_path)),\n\n \"Error -- could not determine compression format for {}\",\n\n file_path\n\n );\n\n\n\n Box::new(\n\n fastq::Reader::new(reader)\n\n .records()\n\n .map(|record| match record {\n\n Ok(rec) => Ok(DnaString::from_acgt_bytes(rec.seq())),\n\n _ => Err(Error::new(\n\n ErrorKind::InvalidData,\n\n \"Unable to read sequence\",\n\n )),\n\n }),\n\n )\n\n}\n", "file_path": "src/parse/fastq.rs", "rank": 15, "score": 63261.763961902936 }, { "content": "// Takes a result from the filtration pipeline and appends match percentages to the score tuples\n\npub fn append_match_percent(\n\n scores: Vec<(Vec<String>, i32)>,\n\n total_hits: usize,\n\n) -> Vec<(Vec<String>, i32, f32)> {\n\n scores\n\n .iter()\n\n .map(|(names, score)| {\n\n (\n\n names.clone(),\n\n *score,\n\n (*score as f32 / total_hits as f32) * 100.0,\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 16, "score": 62395.31402104304 }, { "content": "pub fn validate_reference_pairs(\n\n reference: &ReferenceMetadata,\n\n) -> (Vec<DnaString>, Vec<DnaString>, Vec<String>) {\n\n let reference_genome = reference.columns[reference.sequence_idx].iter();\n\n let mut reference_library = reference.columns[reference.sequence_name_idx].iter();\n\n\n\n let mut reference_seqs: Vec<DnaString> = Vec::new();\n\n let mut reference_seqs_rev: Vec<DnaString> = Vec::new();\n\n let mut reference_names: Vec<String> = Vec::new();\n\n\n\n let revcomp = match reference.data_type.as_str() {\n\n \"DNA\" => dna::revcomp,\n\n \"RNA\" => rna::revcomp,\n\n _ => panic!(\n\n \"Error -- cannot determine revcomp method to use -- ensure data_type is a valid type\"\n\n ),\n\n };\n\n\n\n for (i, reference) in reference_genome.enumerate() {\n\n reference_seqs.push(DnaString::from_acgt_bytes(reference.as_bytes()));\n\n reference_seqs_rev.push(DnaString::from_acgt_bytes(&revcomp(\n\n reference.clone().into_bytes(),\n\n )));\n\n reference_names.push(unwrap!(reference_library.next(), \"Error -- could not read library name #{} after JSON parse, corrupted internal state.\", i).clone());\n\n }\n\n\n\n (reference_seqs, reference_seqs_rev, reference_names)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 17, "score": 62395.31402104304 }, { "content": "pub fn filter_alignment_by_metrics(\n\n score: usize,\n\n equiv_class: Vec<u32>,\n\n score_threshold: usize,\n\n discard_multiple_matches: bool,\n\n) -> (Option<(Vec<u32>, usize)>, Option<FilterReason>) {\n\n if score >= score_threshold && !equiv_class.is_empty() {\n\n if discard_multiple_matches && equiv_class.len() > 1 {\n\n (None, Some(FilterReason::DiscardedMultipleMatch))\n\n } else {\n\n (Some((equiv_class, score)), None)\n\n }\n\n } else {\n\n (None, Some(FilterReason::ScoreBelowThreshold))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n // Tests for filter_by_alignment_score\n", "file_path": "src/filter/align.rs", "rank": 18, "score": 60550.386129562336 }, { "content": "pub fn write_debug_info(info: AlignDebugInfo) {\n\n println!(\"Writing debug info\");\n\n\n\n let mut str_rep = String::new();\n\n\n\n str_rep += \"Read units aligned: \"; str_rep += &info.read_units_aligned.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to being below score threshold: \"; str_rep += &info.score_below_threshold.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to multiple match: \"; str_rep += &info.discarded_multiple_match.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to non-zero mismatches: \"; str_rep += &info.discarded_nonzero_mismatch.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to not matching the reference library: \"; str_rep += &info.no_match.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to not matching the reference library and having a low score: \"; str_rep += &info.no_match_and_score_below_threshold.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered for different reasons between the forward and reverse read: \"; str_rep += &info.different_filter_reasons.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to non-matching pair alignments: \"; str_rep += &info.not_matching_pair.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to a failed force intersect: \"; str_rep += &info.force_intersect_failure.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to completely disjoint alignments: \"; str_rep += &info.disjoint_pair_intersection.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Units filtered due to mangled empty scores: \"; str_rep += &info.best_class_empty.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Forward-running alignments discarded: \"; str_rep += &info.forward_runs_discarded.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Reverse-running alignments discarded: \"; str_rep += &info.backward_runs_discarded.to_string(); str_rep += \"\\n\";\n\n str_rep += \"Reverse-read sets discarded due to mangled paired-end data: \"; str_rep += &info.reverse_read_sets_discarded_noneven.to_string(); str_rep += \"\\n\";\n\n\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .open(info.debug_file)\n\n .expect(\"Error -- could not create debug file\");\n\n\n\n file.write_all(str_rep.as_bytes())\n\n .expect(\"Error -- could not write debug info to file\");\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 19, "score": 52569.35130218371 }, { "content": "// Return matches that match in both seq_score and rev_seq_score; if soft intersection is enabled, fall back to best read if one of the reads is empty\n\nfn get_intersecting_reads(\n\n seq_score: &Option<(Vec<u32>, usize)>,\n\n rev_seq_score: &Option<Option<(Vec<u32>, usize)>>,\n\n fallback_on_intersect_fail: bool,\n\n debug_info: &mut AlignDebugInfo\n\n) -> Vec<u32> {\n\n if let (Some((eqv_class_seq, _)), Some(Some((eqv_class_rev_seq, _)))) =\n\n (&seq_score, &rev_seq_score)\n\n {\n\n let class = eqv_class_seq.intersect(eqv_class_rev_seq.to_vec());\n\n\n\n if class.len() == 0 {\n\n debug_info.update(Some(FilterReason::DisjointPairIntersection))\n\n }\n\n\n\n class\n\n } else if fallback_on_intersect_fail {\n\n let class = get_best_reads(seq_score, rev_seq_score);\n\n \n\n if class.len() == 0 {\n\n debug_info.update(Some(FilterReason::BestClassEmpty));\n\n }\n\n class\n\n } else {\n\n debug_info.update(Some(FilterReason::ForceIntersectFailure));\n\n Vec::new()\n\n }\n\n}\n\n\n", "file_path": "src/align.rs", "rank": 20, "score": 48591.53059717593 }, { "content": "// Return matches from seq_score -- otherwise, return matches from rev_seq_score\n\nfn get_best_reads(\n\n seq_score: &Option<(Vec<u32>, usize)>,\n\n rev_seq_score: &Option<Option<(Vec<u32>, usize)>>,\n\n) -> Vec<u32> {\n\n if let Some((eqv_class, _)) = &seq_score {\n\n (*eqv_class).clone()\n\n } else if let Some(Some((eqv_class, _))) = &rev_seq_score {\n\n (*eqv_class).clone()\n\n } else {\n\n Vec::new()\n\n }\n\n}\n\n\n\n/* Takes a equivalence class and returns a list of strings. If we're processing allele-level data, the strings will be\n\n * the nt_sequences of the relevant alleles. Otherwise, if we're doing a group_by, the equivalence class will be\n\n * filtered such that there is only one hit per group_by string (e.g. one hit per lineage) and the corresponding strings\n\n * (e.g. lineage name) will be returned. */\n", "file_path": "src/align.rs", "rank": 21, "score": 48588.91949898307 }, { "content": "// Parses a .json that contains a reference library. Returns a tuple of the library's config information and the library data\n\npub fn get_reference_library(path: &Path) -> (align::AlignFilterConfig, ReferenceMetadata) {\n\n // Parse raw JSON to serde_json value\n\n let raw_json_string = read_to_string(path).expect(\"Error -- could not read reference library\");\n\n\n\n let v: Value = serde_json::from_str(&raw_json_string)\n\n .expect(\"Error -- could not parse reference library JSON\");\n\n\n\n // Get aligner configuration from the first JSON object in the file\n\n let config_obj = &v[0];\n\n let score_threshold = config_obj[\"score_threshold\"]\n\n .as_i64()\n\n .expect(\"Error -- could not parse score_threshold as int64\")\n\n as usize;\n\n let score_filter = config_obj[\"score_filter\"]\n\n .as_i64()\n\n .expect(\"Error -- could not parse percent_threshold as int64\");\n\n let num_mismatches = config_obj[\"num_mismatches\"]\n\n .as_i64()\n\n .expect(\"Error -- could not parse num_mismatches as int64\")\n\n as usize;\n", "file_path": "src/reference_library.rs", "rank": 22, "score": 43452.61516740768 }, { "content": "// Case with zero mismatches\n\nfn mismatch() {\n\n let seq_filename = \"mismatch.fastq\";\n\n let lib_filename = \"mismatch.json\";\n\n let (sequences, reference_index, reference_metadata, mut align_config) = utils::get_data(seq_filename, lib_filename);\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (vec![String::from(\"NKG2E_NM_001104593\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n", "file_path": "tests/mismatch.rs", "rank": 23, "score": 27597.04349592862 }, { "content": "// Align the given sequence against the given reference with a score threshold\n\nfn pseudoalign(\n\n sequence: &DnaString,\n\n reference_index: &PseudoAligner,\n\n config: &AlignFilterConfig,\n\n) -> (Option<(Vec<u32>, usize)>, Option<FilterReason>){\n\n // Perform alignment\n\n match reference_index.map_read_with_mismatch(sequence, config.num_mismatches) {\n\n Some((equiv_class, score, mismatches)) => {\n\n // Filter nonzero mismatch\n\n if config.discard_nonzero_mismatch && mismatches != 0 {\n\n return (None, Some(FilterReason::DiscardedNonzeroMismatch));\n\n }\n\n\n\n // Filter by score and match threshold\n\n filter::align::filter_alignment_by_metrics(\n\n score,\n\n equiv_class,\n\n config.score_threshold,\n\n config.discard_multiple_matches,\n\n )\n\n }\n\n None => (None, Some(FilterReason::NoMatch)),\n\n }\n\n}\n", "file_path": "src/align.rs", "rank": 24, "score": 27597.04349592862 }, { "content": "// Determine whether a given pair of equivalence classes constitute a valid pair\n\nfn filter_pair(\n\n seq_score: &Option<(Vec<u32>, usize)>,\n\n rev_seq_score: &Option<Option<(Vec<u32>, usize)>>,\n\n) -> bool {\n\n // Unpack the data to check if the pairs have the same eq_class. If they both contain data, do the comparison\n\n if let (Some(Some((mut rev_eq_class, _))), Some((mut eq_class, _))) =\n\n ((rev_seq_score).clone(), (seq_score).clone())\n\n {\n\n // Sort the vectors and compare them by counting matching elements. If they don't match, don't modify the score for this read\n\n rev_eq_class.sort();\n\n eq_class.sort();\n\n let matching = eq_class\n\n .iter()\n\n .zip(&rev_eq_class)\n\n .filter(|&(classl, classr)| classl == classr)\n\n .count();\n\n\n\n if matching != eq_class.len() || matching != rev_eq_class.len() {\n\n return true;\n\n }\n\n } else {\n\n // Otherwise, require_valid_pair is on and rev_score != seq_score, or they're both None. In either case, don't modify score\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/align.rs", "rank": 25, "score": 26398.7509518879 }, { "content": "fn generate_score<'a>(\n\n sequences: Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>,\n\n mut reverse_sequences: Option<Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a>>,\n\n index: &PseudoAligner,\n\n reference_metadata: &ReferenceMetadata,\n\n config: &AlignFilterConfig,\n\n) -> (Vec<(Vec<String>, i32)>, AlignDebugInfo) {\n\n // HashMap of the alignment results. The keys are either strong hits or equivalence classes of hits\n\n let mut score_map: HashMap<Vec<String>, i32> = HashMap::new();\n\n let mut debug_info: AlignDebugInfo = Default::default();\n\n\n\n // Iterate over every read/reverse read pair and align it, incrementing scores for the matching references/equivalence classes\n\n for read in sequences {\n\n let read = read.expect(\"Error -- could not parse read. Input R1 data malformed.\");\n\n /* Generate score and equivalence class for this read by aligning the sequence against\n\n * the current reference, if there is a match.*/\n\n let (seq_score, forward_filter_reason) = pseudoalign(&read, index, &config);\n\n\n\n // If there's a reversed sequence, do the paired-end alignment\n\n let mut rev_seq_score = None;\n", "file_path": "src/align.rs", "rank": 26, "score": 25997.612388917085 }, { "content": "// Case with group_by instead of basic reverse allele-level reporting\n\nfn group_by_rev() {\n\n let seq_filename = \"basic.fastq\";\n\n let lib_filename = \"basic.json\";\n\n let (sequences, reference_index, reference_metadata, align_config) = get_group_by_data(seq_filename, lib_filename);\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (vec![String::from(\"g1\")], 1),\n\n (vec![String::from(\"g1\"), String::from(\"g2\")], 1),\n\n (vec![String::from(\"g2\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n", "file_path": "tests/basic-cases.rs", "rank": 27, "score": 25317.52519515794 }, { "content": "// Case with group_by instead of basic forward allele-level reporting\n\nfn group_by_forward() {\n\n let seq_filename = \"basic.fastq\";\n\n let lib_filename = \"basic.json\";\n\n let (sequences, reference_index, reference_metadata, align_config) = get_group_by_data(seq_filename, lib_filename);\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (vec![String::from(\"g1\")], 1),\n\n (vec![String::from(\"g1\"), String::from(\"g2\")], 1),\n\n (vec![String::from(\"g2\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n\n\n\n#[test]\n", "file_path": "tests/basic-cases.rs", "rank": 28, "score": 25317.52519515794 }, { "content": "fn get_score<'a>(\n\n current_umi_group: &'a Vec<DnaString>,\n\n reference_index: &(PseudoAligner, PseudoAligner),\n\n reference_metadata: &ReferenceMetadata,\n\n align_config: &AlignFilterConfig,\n\n debug_info: Option<&mut AlignDebugInfo>,\n\n) -> Vec<(Vec<String>, i32)> {\n\n let sequences: Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a> = Box::new(\n\n current_umi_group\n\n .iter()\n\n .step_by(2)\n\n .map(|rec| Ok(rec.to_owned())),\n\n );\n\n\n\n let sequences_clone: Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a> = Box::new(\n\n current_umi_group\n\n .iter()\n\n .step_by(2)\n\n .map(|rec| Ok(rec.to_owned())),\n\n );\n", "file_path": "src/process/bam.rs", "rank": 29, "score": 24916.38663218713 }, { "content": "fn get_group_by_data(\n\n seq_filename: &str,\n\n lib_filename: &str\n\n) -> (\n\n (\n\n Box<dyn Iterator<Item = Result<DnaString, Error>>>,\n\n Box<dyn Iterator<Item = Result<DnaString, Error>>>,\n\n ),\n\n (align::PseudoAligner, align::PseudoAligner),\n\n reference_library::ReferenceMetadata,\n\n align::AlignFilterConfig,\n\n) {\n\n let (sequences, reference_index, mut reference_metadata, align_config) = utils::get_data(seq_filename, lib_filename);\n\n\n\n reference_metadata.group_on = 4;\n\n reference_metadata.headers.push(\"test_group_on\".to_string());\n\n reference_metadata.columns.push(\n\n vec![\"g1\", \"g2\", \"g2\", \"g1\", \"g1\"]\n\n .into_iter()\n\n .map(|column| column.to_string())\n\n .collect(),\n\n );\n\n\n\n (sequences, reference_index, reference_metadata, align_config)\n\n}\n\n\n\n\n\n#[test]\n", "file_path": "tests/basic-cases.rs", "rank": 30, "score": 24337.009983859636 }, { "content": "fn get_score_map_key(\n\n equiv_class: Vec<u32>,\n\n reference_metadata: &ReferenceMetadata,\n\n config: &AlignFilterConfig,\n\n) -> Vec<String> {\n\n if reference_metadata.headers[reference_metadata.group_on] == \"nt_sequence\" {\n\n equiv_class\n\n .into_iter()\n\n .map(|ref_idx| {\n\n reference_metadata.columns[reference_metadata.group_on][ref_idx as usize].clone()\n\n })\n\n .collect()\n\n } else {\n\n let mut results = Vec::new();\n\n\n\n for ref_idx in equiv_class {\n\n let group = &reference_metadata.columns[reference_metadata.group_on][ref_idx as usize];\n\n if !results.contains(group) {\n\n results.push(group.to_string());\n\n }\n", "file_path": "src/align.rs", "rank": 31, "score": 24337.009983859636 }, { "content": "// Case with zero mismatches\n\nfn basic_single_strand_no_mismatch_forward() {\n\n let seq_filename = \"basic.fastq\";\n\n let lib_filename = \"basic.json\";\n\n let (sequences, reference_index, reference_metadata, align_config) = utils::get_data(seq_filename, lib_filename);\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (\n\n vec![\n\n String::from(\"A02-0\"),\n\n String::from(\"A02-1\"),\n", "file_path": "tests/basic-cases.rs", "rank": 32, "score": 22626.623072812567 }, { "content": "// Case with zero mismatches\n\nfn basic_single_strand_no_mismatch_reverse() {\n\n let seq_filename = \"basic.fastq\";\n\n let lib_filename = \"basic-rev.json\";\n\n let (sequences, reference_index, reference_metadata, align_config) = utils::get_data(seq_filename, lib_filename);\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (\n\n vec![\n\n String::from(\"A02-0\"),\n\n String::from(\"A02-1\"),\n", "file_path": "tests/basic-cases.rs", "rank": 33, "score": 22626.623072812567 }, { "content": "// Case with one mismatch\n\nfn basic_single_strand_one_mismatch_forward() {\n\n let seq_filename = \"basic.fastq\";\n\n let lib_filename = \"basic.json\";\n\n let (sequences, reference_index, reference_metadata, mut align_config) = utils::get_data(seq_filename, lib_filename);\n\n\n\n align_config.num_mismatches = 1;\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (\n\n vec![\n", "file_path": "tests/basic-cases.rs", "rank": 34, "score": 21876.2672236371 }, { "content": "// Case with two mismatches\n\nfn basic_single_strand_two_mismatch_reverse() {\n\n let seq_filename = \"basic.fastq\";\n\n let lib_filename = \"basic-rev.json\";\n\n let (sequences, reference_index, reference_metadata, mut align_config) = utils::get_data(seq_filename, lib_filename);\n\n\n\n align_config.num_mismatches = 2;\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (\n\n vec![\n", "file_path": "tests/basic-cases.rs", "rank": 35, "score": 21876.2672236371 }, { "content": "// Case with one mismatch\n\nfn basic_single_strand_one_mismatch_reverse() {\n\n let seq_filename = \"basic.fastq\";\n\n let lib_filename = \"basic-rev.json\";\n\n let (sequences, reference_index, reference_metadata, mut align_config) = utils::get_data(seq_filename, lib_filename);\n\n\n\n align_config.num_mismatches = 1;\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (\n\n vec![\n", "file_path": "tests/basic-cases.rs", "rank": 36, "score": 21876.2672236371 }, { "content": "// Case with two mismatches\n\nfn basic_single_strand_two_mismatch_forward() {\n\n let seq_filename = \"basic.fastq\";\n\n let lib_filename = \"basic.json\";\n\n let (sequences, reference_index, reference_metadata, mut align_config) = utils::get_data(seq_filename, lib_filename);\n\n\n\n align_config.num_mismatches = 2;\n\n\n\n let results = nimble::align::score(\n\n sequences,\n\n None,\n\n &reference_index,\n\n &reference_metadata,\n\n &align_config,\n\n None\n\n );\n\n let results = utils::sort_score_vector(results);\n\n\n\n let expected_results = vec![\n\n (\n\n vec![\n", "file_path": "tests/basic-cases.rs", "rank": 37, "score": 21876.2672236371 }, { "content": "use crate::align;\n\nuse serde_json::Value;\n\nuse std::fs::read_to_string;\n\nuse std::path::Path;\n\nuse unwrap::unwrap;\n\n\n\n#[derive(Debug)]\n\npub struct ReferenceMetadata {\n\n pub group_on: usize,\n\n pub headers: Vec<String>,\n\n pub columns: Vec<Vec<String>>,\n\n pub sequence_name_idx: usize,\n\n pub sequence_idx: usize,\n\n pub data_type: String,\n\n}\n\n\n\n// Parses a .json that contains a reference library. Returns a tuple of the library's config information and the library data\n", "file_path": "src/reference_library.rs", "rank": 42, "score": 9.881308525265263 }, { "content": "use crate::reference_library::ReferenceMetadata;\n\nuse crate::align::AlignDebugInfo;\n\nuse bio::alphabets::{dna, rna};\n\nuse csv::Reader;\n\nuse debruijn::dna_string::DnaString;\n\nuse std::fs::OpenOptions;\n\nuse std::io::{Read, Write};\n\nuse unwrap::unwrap;\n\n\n\n// Takes a reader and returns a csv reader that wraps it, configures to use tab delimiters\n", "file_path": "src/utils.rs", "rank": 44, "score": 9.620460472343202 }, { "content": " return;\n\n };\n\n\n\n let mut current_umi_group = reader.current_umi_group.clone();\n\n\n\n let extra_read = if current_umi_group.len() % 2 != 0 {\n\n current_umi_group.pop()\n\n } else {\n\n None\n\n };\n\n \n\n\n\n let mut s = if owned_debug_file.clone() != \"\" {\n\n get_score(\n\n &current_umi_group,\n\n reference_index,\n\n reference_metadata,\n\n align_config,\n\n Some(&mut debug_info))\n\n } else {\n", "file_path": "src/process/bam.rs", "rank": 45, "score": 9.239584314842755 }, { "content": "\n\n let mut file = OpenOptions::new()\n\n .create(true)\n\n .append(true)\n\n .open(output_path)\n\n .expect(\"Error -- could not create results file\");\n\n\n\n file.write_all(str_rep.as_bytes())\n\n .expect(\"Error -- could not write results to file\");\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 46, "score": 9.17976435552109 }, { "content": "use bio::io::fastq;\n\nuse debruijn::dna_string::DnaString;\n\nuse std::io::{Error, ErrorKind};\n\nuse std::path;\n\nuse unwrap::unwrap;\n\n\n\n// Takes the path to a fastq.gz file and returns an error-checked iterator of the DnaStrings of the file\n", "file_path": "src/parse/fastq.rs", "rank": 47, "score": 8.923052520869554 }, { "content": " if owned_debug_file != \"\".to_owned() {\n\n debug_info.debug_file = owned_debug_file.clone();\n\n };\n\n\n\n loop {\n\n let final_umi = reader.next();\n\n\n\n if final_umi {\n\n if owned_debug_file != \"\".to_owned() {\n\n write_debug_info(debug_info);\n\n }\n\n\n\n let mut results = Vec::new();\n\n for (key, value) in score_map.into_iter() {\n\n results.push((key, value.0));\n\n cell_barcodes.push(value.1);\n\n }\n\n\n\n write_to_tsv(filter_scores(results, &align_config.score_filter), Some(cell_barcodes), false, output_path);\n\n\n", "file_path": "src/process/bam.rs", "rank": 48, "score": 8.588913647654103 }, { "content": "use crate::filter;\n\nuse crate::reference_library;\n\n\n\nuse std::collections::HashMap;\n\nuse std::io::Error;\n\n\n\nuse array_tool::vec::Intersect;\n\nuse debruijn::dna_string::DnaString;\n\nuse reference_library::ReferenceMetadata;\n\n\n\npub type PseudoAligner = debruijn_mapping::pseudoaligner::Pseudoaligner<\n\n debruijn::kmer::VarIntKmer<u64, debruijn::kmer::K20>,\n\n>;\n\n\n\npub enum IntersectLevel {\n\n NoIntersect,\n\n IntersectWithFallback,\n\n ForceIntersect,\n\n}\n\n\n", "file_path": "src/align.rs", "rank": 49, "score": 8.499519369705999 }, { "content": " break;\n\n }\n\n\n\n group = group.intersect(next.unwrap().0.clone());\n\n score += next.unwrap().1;\n\n }\n\n\n\n if group.len() > 0 {\n\n let accessor = score_map.entry(group).or_insert((0, reader.current_cell_barcode.clone()));\n\n *accessor = (accessor.0 + score, reader.current_cell_barcode.clone());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/process/bam.rs", "rank": 50, "score": 7.763421722374845 }, { "content": " );\n\n\n\n let reference_index = (reference_index_forward, reference_index_reverse);\n\n\n\n let sequences = parse::fastq::get_error_checked_fastq_readers(\n\n &sequences\n\n .into_os_string()\n\n .into_string()\n\n .expect(\"Could not convert unit test sequence to OsStr slice.\"),\n\n );\n\n\n\n (sequences, reference_index, reference_metadata, align_config)\n\n}\n\n\n", "file_path": "tests/utils.rs", "rank": 51, "score": 7.471133971498658 }, { "content": "\n\n // Take the \"best\" alignment. The specific behavior is determined by the intersect level set in the aligner config\n\n let match_eqv_class = match config.intersect_level {\n\n IntersectLevel::NoIntersect => get_best_reads(&seq_score, &rev_seq_score),\n\n IntersectLevel::IntersectWithFallback => {\n\n get_intersecting_reads(&seq_score, &rev_seq_score, true, &mut debug_info)\n\n }\n\n IntersectLevel::ForceIntersect => {\n\n get_intersecting_reads(&seq_score, &rev_seq_score, false, &mut debug_info)\n\n }\n\n };\n\n\n\n if !match_eqv_class.is_empty() {\n\n let key = get_score_map_key(match_eqv_class, reference_metadata, &config); // Process the equivalence class into a score key\n\n\n\n // Add the key to the score map and increment the score\n\n let accessor = score_map.entry(key).or_insert(0);\n\n *accessor += 1;\n\n debug_info.read_units_aligned += 1;\n\n }\n", "file_path": "src/align.rs", "rank": 52, "score": 7.324191245784297 }, { "content": "\n\n let reverse_sequences: Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a> = Box::new(\n\n current_umi_group\n\n .iter()\n\n .skip(1)\n\n .step_by(2)\n\n .map(|rec| Ok(rec.to_owned())),\n\n );\n\n\n\n let reverse_sequences_clone: Box<dyn Iterator<Item = Result<DnaString, Error>> + 'a> = Box::new(\n\n current_umi_group\n\n .iter()\n\n .skip(1)\n\n .step_by(2)\n\n .map(|rec| Ok(rec.to_owned())),\n\n );\n\n\n\n let reverse_sequence_pair = Some((reverse_sequences, reverse_sequences_clone));\n\n\n\n // Perform alignment and filtration using the score package\n", "file_path": "src/process/bam.rs", "rank": 53, "score": 7.040446992170241 }, { "content": "extern crate csv;\n\nextern crate debruijn;\n\nextern crate debruijn_mapping;\n\nextern crate nimble;\n\n\n\nuse nimble::align;\n\nuse nimble::parse;\n\nuse nimble::reference_library;\n\nuse nimble::utils;\n\nuse std::collections::HashMap;\n\nuse std::path::PathBuf;\n\n\n\nuse debruijn::dna_string::DnaString;\n\nuse std::io::Error;\n\n\n", "file_path": "tests/utils.rs", "rank": 54, "score": 7.005567201133443 }, { "content": " None => Vec::new()\n\n };\n\n let mut group_row_iter = group_row_iter.iter();\n\n\n\n // Append the results to the tsv string\n\n for (group, score) in results {\n\n str_rep += &group.join(\",\");\n\n str_rep += \"\\t\";\n\n str_rep += &score.to_string();\n\n\n\n match group_row {\n\n Some(ref _vec) => {\n\n str_rep += \"\\t\";\n\n str_rep += group_row_iter.next().unwrap();\n\n },\n\n None => ()\n\n }\n\n\n\n str_rep += \"\\n\";\n\n }\n", "file_path": "src/utils.rs", "rank": 55, "score": 6.928831198205132 }, { "content": "#![feature(allocator_api)]\n\nextern crate csv;\n\nextern crate debruijn;\n\nextern crate debruijn_mapping;\n\nextern crate nimble;\n\n\n\nuse nimble::align;\n\nuse nimble::reference_library;\n\n\n\nuse debruijn::dna_string::DnaString;\n\nuse std::io::Error;\n\n\n\n#[path = \"./utils.rs\"]\n\nmod utils;\n\n\n", "file_path": "tests/basic-cases.rs", "rank": 56, "score": 6.90818130525483 }, { "content": " pub score_filter: i32,\n\n pub intersect_level: IntersectLevel,\n\n pub require_valid_pair: bool,\n\n pub discard_multi_hits: usize,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct AlignDebugInfo {\n\n pub debug_file: String,\n\n pub read_units_aligned: usize,\n\n pub score_below_threshold: usize,\n\n pub discarded_multiple_match: usize,\n\n pub discarded_nonzero_mismatch: usize,\n\n pub no_match: usize,\n\n pub no_match_and_score_below_threshold: usize,\n\n pub different_filter_reasons: usize,\n\n pub not_matching_pair: usize,\n\n pub force_intersect_failure: usize,\n\n pub disjoint_pair_intersection: usize,\n\n pub best_class_empty: usize,\n", "file_path": "src/align.rs", "rank": 58, "score": 6.793508719415382 }, { "content": "use array_tool::vec::Intersect;\n\nuse debruijn::dna_string::DnaString;\n\nuse std::io::Error;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::align::{AlignFilterConfig, AlignDebugInfo, PseudoAligner};\n\nuse crate::parse::bam;\n\nuse crate::reference_library::ReferenceMetadata;\n\nuse crate::score::score;\n\nuse crate::utils::{write_to_tsv, write_debug_info, filter_scores};\n\n\n", "file_path": "src/process/bam.rs", "rank": 59, "score": 6.684824903153279 }, { "content": "use crate::align;\n\nuse crate::reference_library;\n\nuse crate::utils;\n\nuse reference_library::ReferenceMetadata;\n\n\n\nuse debruijn::dna_string::DnaString;\n\nuse std::io::Error;\n\n\n\n/* Takes a list of sequences and optionally reverse sequences, a reference library index, reference library metadata,\n\n * and an aligner configuration object, and returns a vector of scores and relative match percentages generated from an alignment\n\n * of the sequences to the reference library. */\n", "file_path": "src/score.rs", "rank": 60, "score": 6.650353469791778 }, { "content": " let mut rev_filter_reason: Option<FilterReason> = None;\n\n if let Some(itr) = &mut reverse_sequences {\n\n let reverse_read = itr\n\n .next()\n\n .expect(\"Error -- read and reverse read files do not have matching lengths: \")\n\n .expect(\"Error -- could not parse reverse read. Input R2 data malformed.\");\n\n let (score, reason) = pseudoalign(&reverse_read, index, &config);\n\n rev_seq_score = Some(score);\n\n rev_filter_reason = reason;\n\n }\n\n\n\n if reverse_sequences.is_some() {\n\n match (forward_filter_reason, rev_filter_reason) {\n\n (Some(fr), Some(rr)) => {\n\n if fr == rr {\n\n debug_info.update(Some(fr));\n\n } else if (fr == FilterReason::NoMatch && rr == FilterReason::ScoreBelowThreshold) ||\n\n (rr == FilterReason::NoMatch && fr == FilterReason::ScoreBelowThreshold) { \n\n\n\n debug_info.update(Some(FilterReason::NoMatchAndScoreBelowThreshold));\n", "file_path": "src/align.rs", "rank": 61, "score": 6.603993580140951 }, { "content": " get_score(\n\n &current_umi_group,\n\n reference_index,\n\n reference_metadata,\n\n align_config,\n\n None)\n\n };\n\n\n\n let mut s_extra = match extra_read {\n\n Some(read) => {\n\n let read_f = vec![Ok(read.clone())];\n\n let read_r = vec![Ok(read.clone())];\n\n score(\n\n (Box::new(read_f.into_iter()), Box::new(read_r.into_iter())),\n\n None,\n\n reference_index,\n\n &reference_metadata,\n\n align_config,\n\n None\n\n )\n", "file_path": "src/process/bam.rs", "rank": 62, "score": 6.463741579569488 }, { "content": "use crate::align::{AlignFilterConfig, AlignDebugInfo, PseudoAligner};\n\nuse crate::parse::fastq::get_error_checked_fastq_readers;\n\nuse crate::reference_library::ReferenceMetadata;\n\nuse crate::score::score;\n\nuse crate::utils::{write_to_tsv, write_debug_info, filter_scores};\n\n\n", "file_path": "src/process/fastq.rs", "rank": 63, "score": 6.390841165458395 }, { "content": " let sequence_idx =\n\n get_column_index(&headers, \"sequence\").expect(\"Error -- could not find sequences column\");\n\n\n\n // Parse columns into a matrix of strings\n\n let columns = columns\n\n .as_array()\n\n .expect(\"Error -- could not parse columns as array\");\n\n let columns: Vec<Vec<String>> = columns\n\n .iter()\n\n .map(|column| to_string_vec(column, \"column\"))\n\n .collect();\n\n let data_type = config_obj[\"data_type\"]\n\n .as_str()\n\n .expect(\"Error -- could not parse data_type as string\")\n\n .to_string();\n\n\n\n let align_config = align::AlignFilterConfig {\n\n reference_genome_size: columns[sequence_name_idx].len(),\n\n score_threshold,\n\n num_mismatches,\n", "file_path": "src/reference_library.rs", "rank": 64, "score": 6.063043416751951 }, { "content": " let mut sequences = data_path.clone();\n\n sequences.push(\"reads/\");\n\n sequences.push(seq_filename);\n\n\n\n let (align_config, reference_metadata) =\n\n reference_library::get_reference_library(library.as_path());\n\n\n\n let (reference_seqs, reference_seqs_rev, reference_names) =\n\n utils::validate_reference_pairs(&reference_metadata);\n\n\n\n let reference_index_forward = debruijn_mapping::build_index::build_index::<\n\n debruijn_mapping::config::KmerType,\n\n >(&reference_seqs, &reference_names, &HashMap::new(), 1)\n\n .expect(\"Error -- could not create pseudoaligner index of the unit test reference library\");\n\n\n\n let reference_index_reverse = debruijn_mapping::build_index::build_index::<\n\n debruijn_mapping::config::KmerType,\n\n >(&reference_seqs_rev, &reference_names, &HashMap::new(), 1)\n\n .expect(\n\n \"Error -- could not create reverse pseudoaligner index of the unit test reference library\",\n", "file_path": "tests/utils.rs", "rank": 65, "score": 5.79734424159328 }, { "content": " let group_on = config_obj[\"group_on\"]\n\n .as_str()\n\n .expect(\"Error -- could not parse group_on as string\")\n\n .to_string();\n\n\n\n // Get reference library metadata from the second JSON object in the file\n\n let reference = &v[1];\n\n let headers = to_string_vec(&reference[\"headers\"], \"headers\");\n\n let columns = &reference[\"columns\"];\n\n let sequence_name_idx =\n\n get_column_index(&headers, \"sequence_name\").expect(\"Could not find header sequence_name\");\n\n let group_on = if group_on == \"\" {\n\n sequence_name_idx\n\n } else {\n\n unwrap!(\n\n get_column_index(&headers, &group_on),\n\n \"Error -- could not find column for group_on {}\",\n\n &group_on\n\n )\n\n };\n", "file_path": "src/reference_library.rs", "rank": 66, "score": 5.586281059979497 }, { "content": "#[derive(Debug, PartialEq)]\n\npub enum FilterReason {\n\n ScoreBelowThreshold,\n\n DiscardedMultipleMatch,\n\n DiscardedNonzeroMismatch,\n\n NoMatch,\n\n NoMatchAndScoreBelowThreshold,\n\n DifferentFilterReasons,\n\n NotMatchingPair,\n\n ForceIntersectFailure,\n\n DisjointPairIntersection,\n\n BestClassEmpty\n\n}\n\n\n\npub struct AlignFilterConfig {\n\n pub reference_genome_size: usize,\n\n pub score_threshold: usize,\n\n pub num_mismatches: usize,\n\n pub discard_nonzero_mismatch: bool,\n\n pub discard_multiple_matches: bool,\n", "file_path": "src/align.rs", "rank": 67, "score": 5.24638566824978 }, { "content": " pub forward_runs_discarded: usize,\n\n pub backward_runs_discarded: usize,\n\n pub reverse_read_sets_discarded_noneven: usize,\n\n}\n\n\n\nimpl AlignDebugInfo {\n\n fn update(&mut self, reason: Option<FilterReason>) {\n\n match reason {\n\n Some(FilterReason::ScoreBelowThreshold) => self.score_below_threshold += 1,\n\n Some(FilterReason::DiscardedMultipleMatch) => self.discarded_multiple_match += 1,\n\n Some(FilterReason::DiscardedNonzeroMismatch) => self.discarded_nonzero_mismatch += 1,\n\n Some(FilterReason::NoMatch) => self.no_match += 1,\n\n Some(FilterReason::NoMatchAndScoreBelowThreshold) => self.no_match_and_score_below_threshold += 1,\n\n Some(FilterReason::DifferentFilterReasons) => self.different_filter_reasons += 1,\n\n Some(FilterReason::NotMatchingPair) => self.not_matching_pair += 1,\n\n Some(FilterReason::ForceIntersectFailure) => self.force_intersect_failure += 1,\n\n Some(FilterReason::DisjointPairIntersection) => self.disjoint_pair_intersection += 1,\n\n Some(FilterReason::BestClassEmpty) => self.best_class_empty += 1,\n\n None => (),\n\n }\n", "file_path": "src/align.rs", "rank": 68, "score": 4.569483478647097 }, { "content": " (String::from(\"name2\"), 17.2),\n\n (String::from(\"name3\"), 98.3),\n\n ];\n\n let results = super::threshold_percentage(scores, 0.0);\n\n\n\n let mut expected_results: Vec<(String, f32)> = Vec::new();\n\n expected_results.push((String::from(\"name1\"), 50.5));\n\n expected_results.push((String::from(\"name2\"), 17.2));\n\n expected_results.push((String::from(\"name3\"), 98.3));\n\n\n\n assert_eq!(results.len(), 3);\n\n assert_eq!(results, expected_results);\n\n }\n\n\n\n // Case where the threshold is 100% -- nothing should get included\n\n #[test]\n\n fn threshold_percentage_max_threshold() {\n\n let scores = vec![\n\n (String::from(\"name1\"), 50.5),\n\n (String::from(\"name2\"), 17.2),\n", "file_path": "src/filter/report.rs", "rank": 69, "score": 4.445920445950125 }, { "content": " println!(\"Pseudo-aligning reads to reference index\");\n\n\n\n let owned_debug_file = if debug_file.is_some() {\n\n debug_file.unwrap()\n\n } else {\n\n \"\".to_owned()\n\n };\n\n\n\n let mut debug_info: AlignDebugInfo = Default::default();\n\n\n\n if owned_debug_file != \"\".to_owned() {\n\n debug_info.debug_file = owned_debug_file.clone();\n\n };\n\n\n\n // Perform alignment and filtration using the score package\n\n let results = if owned_debug_file.clone() != \"\" {\n\n score(\n\n sequences,\n\n reverse_sequences,\n\n reference_index,\n", "file_path": "src/process/fastq.rs", "rank": 70, "score": 4.15753733454925 }, { "content": " let discard_multiple_matches = config_obj[\"discard_multiple_matches\"]\n\n .as_bool()\n\n .expect(\"Error -- could not parse discard_multiple_mismatches as boolean\");\n\n let require_valid_pair = config_obj[\"require_valid_pair\"]\n\n .as_bool()\n\n .expect(\"Error -- could not parse require_valid_pair as boolean\");\n\n let discard_multi_hits = config_obj[\"discard_multi_hits\"]\n\n .as_i64()\n\n .expect(\"Error -- could not parse discard_multi_hits as int64\")\n\n as usize;\n\n let intersect_level = config_obj[\"intersect_level\"]\n\n .as_i64()\n\n .expect(\"Error -- could not parse intersect_level as int64\");\n\n let intersect_level = match intersect_level {\n\n 0 => align::IntersectLevel::NoIntersect,\n\n 1 => align::IntersectLevel::IntersectWithFallback,\n\n 2 => align::IntersectLevel::ForceIntersect,\n\n _ => panic!(\"Error -- invalid intersect level in config file. Please choose intersect level 0, 1, or 2.\")\n\n };\n\n\n", "file_path": "src/reference_library.rs", "rank": 71, "score": 4.108167318892217 }, { "content": " (String::from(\"name3\"), 98.2),\n\n ];\n\n let results = super::threshold_percentage(scores, 100.0);\n\n\n\n let expected_results: Vec<(String, f32)> = Vec::new();\n\n\n\n assert_eq!(results.len(), 0);\n\n assert_eq!(results, expected_results);\n\n }\n\n\n\n // Case where the threshold is an arbitrary value -- some should be filtered out\n\n #[test]\n\n fn threshold_percentage_half_threshold() {\n\n let scores = vec![\n\n (String::from(\"name1\"), 50.5),\n\n (String::from(\"name2\"), 17.2),\n\n (String::from(\"name3\"), 98.3),\n\n ];\n\n let results = super::threshold_percentage(scores, 25.0);\n\n\n\n let mut expected_results: Vec<(String, f32)> = Vec::new();\n\n expected_results.push((String::from(\"name1\"), 50.5));\n\n expected_results.push((String::from(\"name3\"), 98.3));\n\n\n\n assert_eq!(results.len(), 2);\n\n assert_eq!(results, expected_results);\n\n }\n\n}\n", "file_path": "src/filter/report.rs", "rank": 72, "score": 4.097393551686279 }, { "content": "pub mod align;\n\npub mod filter;\n\npub mod parse;\n\npub mod process;\n\npub mod reference_library;\n\npub mod score;\n\npub mod utils;\n", "file_path": "src/lib.rs", "rank": 73, "score": 3.7595263717473637 }, { "content": "pub mod bam;\n\npub mod fastq;\n", "file_path": "src/parse/mod.rs", "rank": 74, "score": 3.45325939747327 }, { "content": "pub mod align;\n\npub mod report;\n", "file_path": "src/filter/mod.rs", "rank": 75, "score": 3.45325939747327 }, { "content": "pub mod bam;\n\npub mod fastq;\n", "file_path": "src/process/mod.rs", "rank": 76, "score": 3.45325939747327 }, { "content": " String::from(\"A02-0\"),\n\n String::from(\"A02-1\"),\n\n String::from(\"A02-2\"),\n\n String::from(\"A02-LC\"),\n\n ],\n\n 1,\n\n ),\n\n (vec![String::from(\"A02-0\"), String::from(\"A02-LC\")], 1),\n\n (vec![String::from(\"A02-1\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n\n\n\n#[test]\n", "file_path": "tests/basic-cases.rs", "rank": 77, "score": 3.3790508622871265 }, { "content": " String::from(\"A02-0\"),\n\n String::from(\"A02-1\"),\n\n String::from(\"A02-2\"),\n\n String::from(\"A02-LC\"),\n\n ],\n\n 1,\n\n ),\n\n (vec![String::from(\"A02-0\"), String::from(\"A02-LC\")], 1),\n\n (vec![String::from(\"A02-1\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n\n\n\n#[test]\n", "file_path": "tests/basic-cases.rs", "rank": 78, "score": 3.3790508622871265 }, { "content": " String::from(\"A02-0\"),\n\n String::from(\"A02-1\"),\n\n String::from(\"A02-2\"),\n\n String::from(\"A02-LC\"),\n\n ],\n\n 1,\n\n ),\n\n (vec![String::from(\"A02-0\"), String::from(\"A02-LC\")], 1),\n\n (vec![String::from(\"A02-1\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n\n\n\n#[test]\n", "file_path": "tests/basic-cases.rs", "rank": 79, "score": 3.3790508622871265 }, { "content": " String::from(\"A02-0\"),\n\n String::from(\"A02-1\"),\n\n String::from(\"A02-2\"),\n\n String::from(\"A02-LC\"),\n\n ],\n\n 1,\n\n ),\n\n (vec![String::from(\"A02-0\"), String::from(\"A02-LC\")], 1),\n\n (vec![String::from(\"A02-1\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n\n\n\n#[test]\n", "file_path": "tests/basic-cases.rs", "rank": 80, "score": 3.3790508622871265 }, { "content": " String::from(\"A02-2\"),\n\n String::from(\"A02-LC\"),\n\n ],\n\n 1,\n\n ),\n\n (vec![String::from(\"A02-0\"), String::from(\"A02-LC\")], 1),\n\n (vec![String::from(\"A02-1\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n\n\n\n#[test]\n", "file_path": "tests/basic-cases.rs", "rank": 81, "score": 3.236600960046916 }, { "content": " String::from(\"A02-2\"),\n\n String::from(\"A02-LC\"),\n\n ],\n\n 1,\n\n ),\n\n (vec![String::from(\"A02-0\"), String::from(\"A02-LC\")], 1),\n\n (vec![String::from(\"A02-1\")], 2),\n\n ];\n\n let expected_results = utils::sort_score_vector(expected_results);\n\n\n\n assert_eq!(results, expected_results);\n\n}\n\n\n\n#[test]\n", "file_path": "tests/basic-cases.rs", "rank": 82, "score": 3.236600960046916 }, { "content": " },\n\n None => Vec::new()\n\n };\n\n\n\n s.append(&mut s_extra);\n\n\n\n if s.len() == 0 {\n\n continue;\n\n }\n\n\n\n let mut scores = s.iter();\n\n\n\n let first_score = scores.next().unwrap();\n\n let mut group = first_score.0.clone();\n\n let mut score = first_score.1;\n\n\n\n loop {\n\n let next = scores.next();\n\n\n\n if next.is_none() {\n", "file_path": "src/process/bam.rs", "rank": 83, "score": 3.1389698116100453 }, { "content": " reference_metadata,\n\n align_config,\n\n Some(&mut debug_info)\n\n )\n\n } else {\n\n score(\n\n sequences,\n\n reverse_sequences,\n\n reference_index,\n\n reference_metadata,\n\n align_config,\n\n None\n\n )\n\n };\n\n\n\n println!(\"Writing results to file\");\n\n\n\n write_to_tsv(filter_scores(results, &align_config.score_filter), None, true, output_path);\n\n\n\n if owned_debug_file != \"\".to_owned() {\n\n write_debug_info(debug_info);\n\n }\n\n\n\n print!(\"Output results written to output path\");\n\n}\n", "file_path": "src/process/fastq.rs", "rank": 84, "score": 2.8319332486809543 }, { "content": " }\n\n\n\n fn merge(&mut self, info: AlignDebugInfo) {\n\n self.read_units_aligned += info.read_units_aligned;\n\n self.read_units_aligned += info.read_units_aligned;\n\n self.score_below_threshold += info.score_below_threshold;\n\n self.discarded_multiple_match += info.discarded_multiple_match;\n\n self.discarded_nonzero_mismatch += info.discarded_nonzero_mismatch;\n\n self.no_match += info.no_match;\n\n self.no_match_and_score_below_threshold += info.no_match_and_score_below_threshold;\n\n self.different_filter_reasons += info.different_filter_reasons;\n\n self.not_matching_pair += info.not_matching_pair;\n\n self.force_intersect_failure += info.force_intersect_failure;\n\n self.disjoint_pair_intersection += info.disjoint_pair_intersection;\n\n self.best_class_empty += info.best_class_empty;\n\n }\n\n}\n\n\n\n/* Takes a set of sequences and optionally, reverse sequences, a debrujin map index of the reference\n\n * genome, the reference library metadata object, and the aligner configuration, and performs a\n\n * debrujin-graph based pseduoalignment, returning a score for each readable reference in the reference\n\n * genome.\n\n * This function does some alignment-time filtration based on the provided configuration. */\n", "file_path": "src/align.rs", "rank": 85, "score": 2.8312663515622454 }, { "content": "\n\n assert_eq!(results, expected_results);\n\n }\n\n\n\n /* Case where the score is higher than the threshold, but matches more than one\n\n * reference and has discard_multiple_matches = true -- should filter the alignment */\n\n #[test]\n\n fn filter_multiple_matches() {\n\n let score = 100;\n\n let equiv_class = vec![1, 2];\n\n\n\n let (results, _) = super::filter_alignment_by_metrics(score, equiv_class, 50, true);\n\n let expected_results = None;\n\n\n\n assert_eq!(results, expected_results);\n\n }\n\n}\n", "file_path": "src/filter/align.rs", "rank": 86, "score": 2.310125883321894 }, { "content": "use crate::align::FilterReason;\n\n\n\n/* Takes a given alignment and returns it if it's above the given match threshold and\n\n * has an equivalence class. Can be configured to discard alignments with more than one match. */\n", "file_path": "src/filter/align.rs", "rank": 87, "score": 2.221190910957631 }, { "content": "# nimble-aligner\n\n**nimble-aligner should be installed and used via its [Python cli package](https://github.com/BimberLab/nimble).**\n\n\n\nnimble-aligner is the backend for nimble, a tool that executes lightweight, flexible alignments to generate supplemental alignment data. nimble-aligner uses pseudoalignment in order to rapidly generate supplemental calls via [rust-pseudoaligner](https://github.com/10XGenomics/rust-pseudoaligner/tree/master/src).\n\n \n\nThis project uses Rust-Bio: [Köster, J. (2016). Rust-Bio: a fast and safe bioinformatics library. Bioinformatics, 32(3), 444-446.](http://bioinformatics.oxfordjournals.org/content/early/2015/10/06/bioinformatics.btv573.short?rss=1)\n", "file_path": "README.md", "rank": 88, "score": 2.1852148439880725 }, { "content": " }\n\n\n\n // Update the results map\n\n let mut results = Vec::new();\n\n for (key, value) in score_map.into_iter() {\n\n results.push((key, value));\n\n }\n\n\n\n (results, debug_info)\n\n}\n\n\n", "file_path": "src/align.rs", "rank": 89, "score": 2.15261773478114 }, { "content": " } else {\n\n debug_info.update(Some(FilterReason::DifferentFilterReasons));\n\n }\n\n },\n\n (None, Some(rr)) => debug_info.update(Some(rr)),\n\n (Some(fr), None) => debug_info.update(Some(fr)),\n\n (None, None) => (),\n\n };\n\n } else {\n\n debug_info.update(forward_filter_reason);\n\n }\n\n\n\n // If there are no reverse sequences, ignore the require_valid_pair filter\n\n if reverse_sequences.is_some()\n\n && config.require_valid_pair\n\n && filter_pair(&seq_score, &rev_seq_score)\n\n {\n\n debug_info.update(Some(FilterReason::NotMatchingPair));\n\n continue;\n\n }\n", "file_path": "src/align.rs", "rank": 90, "score": 2.052322264968543 }, { "content": " fn is_fasta_long() {\n\n let expected_results = true;\n\n let results = super::is_fastq(\"reference.bin.fastq.gz\");\n\n assert_eq!(results, expected_results);\n\n }\n\n\n\n #[test]\n\n fn is_bam_short() {\n\n let expected_results = false;\n\n let results = super::is_fastq(\"reference.bam.gz\");\n\n assert_eq!(results, expected_results);\n\n }\n\n\n\n #[test]\n\n fn is_bam_long() {\n\n let expected_results = false;\n\n let results = super::is_fastq(\"reference.bin.bam.zip\");\n\n assert_eq!(results, expected_results);\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 91, "score": 1.4105158132223732 } ]
Rust
loadstone_config/src/codegen/memory_map.rs
arron-speake-bluefruit/loadstone
c65cb32809dc069b17830dd4a39d95c913994200
use anyhow::Result; use quote::{format_ident, quote}; use std::{fs::OpenOptions, io::Write, path::Path}; use crate::{ memory::{ExternalMemoryMap, InternalMemoryMap, MemoryConfiguration}, port::{Port, Subfamily}, }; use super::prettify_file; pub fn generate<P: AsRef<Path>>( autogenerated_folder_path: P, memory_configuration: &MemoryConfiguration, port: &Port, ) -> Result<()> { let filename = autogenerated_folder_path.as_ref().join("memory_map.rs"); let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(&filename)?; let base_index = 1usize; let imports = generate_imports(&memory_configuration, port)?; let mcu_banks = generate_mcu_banks( base_index, &memory_configuration.internal_memory_map, memory_configuration.golden_index, )?; let external_banks = generate_external_banks( memory_configuration.internal_memory_map.banks.len() + base_index, &memory_configuration.external_memory_map, memory_configuration.golden_index, )?; file.write_all(imports.as_bytes())?; file.write_all(mcu_banks.as_bytes())?; file.write_all(external_banks.as_bytes())?; prettify_file(filename).ok(); Ok(()) } fn generate_imports(memory_configuration: &MemoryConfiguration, port: &Port) -> Result<String> { let external_address: Vec<_> = match &memory_configuration.external_flash { Some(external_flash) if external_flash.name.to_lowercase().contains("n25q128a") => { ["blue_hal", "drivers", "micron", "n25q128a_flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect() } None if *port == Port::Stm32F412 => ["blue_hal", "hal", "null", "NullAddress"] .iter() .map(|f| format_ident!("{}", f)) .collect(), _ => ["usize"].iter().map(|f| format_ident!("{}", f)).collect(), }; let mcu_address: Vec<_> = match port.subfamily() { Subfamily::Stm32f4 => ["blue_hal", "drivers", "stm32f4", "flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect(), Subfamily::Efm32Gg11 => ["blue_hal", "drivers", "efm32gg11b", "flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect(), }; let code = quote! { use crate::devices::image as image; #[allow(unused_imports)] use super::pin_configuration::ExternalFlash; use #(#mcu_address)::* as McuAddress; use #(#external_address)::* as ExternalAddress; }; Ok(format!("{}", code)) } fn generate_external_banks( base_index: usize, map: &ExternalMemoryMap, golden_index: Option<usize>, ) -> Result<String> { let number_of_external_banks = map.banks.len(); let index: Vec<u8> = map.banks.iter().enumerate().map(|(i, _)| (i + base_index) as u8).collect(); let bootable = vec![false; number_of_external_banks]; let location: Vec<u32> = map.banks.iter().map(|b| b.start_address).collect(); let size: Vec<usize> = map.banks.iter().map(|b| (b.size_kb * 1024) as usize).collect(); let golden: Vec<bool> = (0..number_of_external_banks).map(|i| Some((i + base_index).saturating_sub(1)) == golden_index).collect(); let code = quote! { const NUMBER_OF_EXTERNAL_BANKS: usize = #number_of_external_banks; pub static EXTERNAL_BANKS: [image::Bank<ExternalAddress>; NUMBER_OF_EXTERNAL_BANKS] = [ #(image::Bank { index: #index, bootable: #bootable, location: ExternalAddress(#location), size: #size, is_golden: #golden, }),* ]; }; Ok(format!("{}", code)) } fn generate_mcu_banks( base_index: usize, map: &InternalMemoryMap, golden_index: Option<usize>, ) -> Result<String> { let number_of_mcu_banks = map.banks.len(); let index: Vec<u8> = map.banks.iter().enumerate().map(|(i, _)| (i + base_index) as u8).collect(); let bootable: Vec<bool> = (0..number_of_mcu_banks).map(|i| Some(i) == map.bootable_index).collect(); let location: Vec<u32> = map.banks.iter().map(|b| b.start_address).collect(); let size: Vec<usize> = map.banks.iter().map(|b| (b.size_kb * 1024) as usize).collect(); let golden: Vec<bool> = (0..number_of_mcu_banks).map(|i| Some(i) == golden_index).collect(); let code = quote! { const NUMBER_OF_MCU_BANKS: usize = #number_of_mcu_banks; pub static MCU_BANKS: [image::Bank<McuAddress>; NUMBER_OF_MCU_BANKS] = [ #(image::Bank { index: #index, bootable: #bootable, location: McuAddress(#location), size: #size, is_golden: #golden, }),* ]; }; Ok(format!("{}", code)) }
use anyhow::Result; use quote::{format_ident, quote}; use std::{fs::OpenOptions, io::Write, path::Path}; use crate::{ memory::{ExternalMemoryMap, InternalMemoryMap, MemoryConfiguration}, port::{Port, Subfamily}, }; use super::prettify_file; pub fn generate<P: AsRef<Path>>( autogenerated_folder_path: P, memory_configuration: &MemoryConfiguration, port: &Port, ) -> Result<()> { let filename = autogenerated_folder_path.as_ref().join("memory_map.rs"); let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(&filename)?; let base_index = 1usize; let imports = generate_imports(&memory_configuration, port)?; let mcu_banks = generate_mcu_banks( base_index, &memory_configuration.internal_memory_map, memory_configuration.golden_index, )?; let external_banks = generate_external_banks( memory_configuration.internal_memory_map.banks.len() + base_index, &memory_configuration.external_memory_map, memory_configuration.golden_index, )?; file.write_all(imports.as_bytes())?; file.write_all(mcu_banks.as_bytes())?; file.write_all(external_banks.as_bytes())?; prettify_file(filename).ok(); Ok(()) }
fn generate_external_banks( base_index: usize, map: &ExternalMemoryMap, golden_index: Option<usize>, ) -> Result<String> { let number_of_external_banks = map.banks.len(); let index: Vec<u8> = map.banks.iter().enumerate().map(|(i, _)| (i + base_index) as u8).collect(); let bootable = vec![false; number_of_external_banks]; let location: Vec<u32> = map.banks.iter().map(|b| b.start_address).collect(); let size: Vec<usize> = map.banks.iter().map(|b| (b.size_kb * 1024) as usize).collect(); let golden: Vec<bool> = (0..number_of_external_banks).map(|i| Some((i + base_index).saturating_sub(1)) == golden_index).collect(); let code = quote! { const NUMBER_OF_EXTERNAL_BANKS: usize = #number_of_external_banks; pub static EXTERNAL_BANKS: [image::Bank<ExternalAddress>; NUMBER_OF_EXTERNAL_BANKS] = [ #(image::Bank { index: #index, bootable: #bootable, location: ExternalAddress(#location), size: #size, is_golden: #golden, }),* ]; }; Ok(format!("{}", code)) } fn generate_mcu_banks( base_index: usize, map: &InternalMemoryMap, golden_index: Option<usize>, ) -> Result<String> { let number_of_mcu_banks = map.banks.len(); let index: Vec<u8> = map.banks.iter().enumerate().map(|(i, _)| (i + base_index) as u8).collect(); let bootable: Vec<bool> = (0..number_of_mcu_banks).map(|i| Some(i) == map.bootable_index).collect(); let location: Vec<u32> = map.banks.iter().map(|b| b.start_address).collect(); let size: Vec<usize> = map.banks.iter().map(|b| (b.size_kb * 1024) as usize).collect(); let golden: Vec<bool> = (0..number_of_mcu_banks).map(|i| Some(i) == golden_index).collect(); let code = quote! { const NUMBER_OF_MCU_BANKS: usize = #number_of_mcu_banks; pub static MCU_BANKS: [image::Bank<McuAddress>; NUMBER_OF_MCU_BANKS] = [ #(image::Bank { index: #index, bootable: #bootable, location: McuAddress(#location), size: #size, is_golden: #golden, }),* ]; }; Ok(format!("{}", code)) }
fn generate_imports(memory_configuration: &MemoryConfiguration, port: &Port) -> Result<String> { let external_address: Vec<_> = match &memory_configuration.external_flash { Some(external_flash) if external_flash.name.to_lowercase().contains("n25q128a") => { ["blue_hal", "drivers", "micron", "n25q128a_flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect() } None if *port == Port::Stm32F412 => ["blue_hal", "hal", "null", "NullAddress"] .iter() .map(|f| format_ident!("{}", f)) .collect(), _ => ["usize"].iter().map(|f| format_ident!("{}", f)).collect(), }; let mcu_address: Vec<_> = match port.subfamily() { Subfamily::Stm32f4 => ["blue_hal", "drivers", "stm32f4", "flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect(), Subfamily::Efm32Gg11 => ["blue_hal", "drivers", "efm32gg11b", "flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect(), }; let code = quote! { use crate::devices::image as image; #[allow(unused_imports)] use super::pin_configuration::ExternalFlash; use #(#mcu_address)::* as McuAddress; use #(#external_address)::* as ExternalAddress; }; Ok(format!("{}", code)) }
function_block-full_function
[ { "content": "pub fn read_key(mut file: File) -> Result<SigningKey, Error> {\n\n let mut string = String::new();\n\n file.read_to_string(&mut string).map_err(|_| Error::KeyParseFailed)?;\n\n SigningKey::from_str(string.as_str()).map_err(|_| Error::KeyParseFailed)\n\n}\n\n\n", "file_path": "tools/signing_tool/src/signing.rs", "rank": 0, "score": 196527.14849511793 }, { "content": "pub fn generate_stm32f4_pins(configuration: &Configuration, file: &mut File) -> Result<()> {\n\n let mut code = quote! {\n\n use blue_hal::{enable_gpio, gpio, gpio_inner, alternate_functions, enable_qspi, enable_spi, enable_serial, pin_rows};\n\n use blue_hal::drivers::stm32f4::gpio::*;\n\n };\n\n\n\n generate_imports_and_types(configuration, &mut code);\n\n generate_gpio_macros(configuration, &mut code);\n\n generate_pin_constructor(configuration, &mut code);\n\n\n\n file.write_all(format!(\"{}\", code).as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "loadstone_config/src/codegen/pins/stm32.rs", "rank": 1, "score": 193359.53535266945 }, { "content": "/// Renders the dropdown menu to select one of the supported\n\n/// hardware ports.\n\npub fn select_port(ui: &mut egui::Ui, port: &mut Port) {\n\n ui.horizontal_wrapped(|ui| {\n\n egui::ComboBox::from_label(format!(\n\n \"Family [{}] - Subfamily [{}]\",\n\n port.family(),\n\n port.subfamily()\n\n ))\n\n .selected_text(port.to_string())\n\n .show_ui(ui, |ui| {\n\n for port_choice in Port::into_enum_iter() {\n\n ui.selectable_value(port, port_choice, port_choice.to_string());\n\n }\n\n });\n\n });\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/mod.rs", "rank": 2, "score": 175915.0369806171 }, { "content": "fn read_file(file: &mut File) -> Result<Vec<u8>, Error> {\n\n let mut contents = Vec::new();\n\n match file.read_to_end(&mut contents) {\n\n Ok(_) => Ok(contents),\n\n Err(_) => Err(Error::FileReadFailed(error::File::Image)),\n\n }\n\n}\n\n\n", "file_path": "tools/signing_tool/src/signing.rs", "rank": 3, "score": 168971.4242692728 }, { "content": "fn generate_efm32gg(_configuration: &Configuration, file: &mut File) -> Result<()> {\n\n let code = quote! {\n\n pub use blue_hal::hal::null::NullFlash as ExternalFlash;\n\n };\n\n file.write_all(format!(\"{}\", code).as_bytes())?;\n\n Ok(())\n\n}\n", "file_path": "loadstone_config/src/codegen/pins/mod.rs", "rank": 4, "score": 167364.83598733955 }, { "content": "/// Renders the menu that configures serial communication features, including\n\n/// whether serial communication is available at all, whether it allows for image\n\n/// recovery, and what pins and peripherals it uses in a particular port.\n\npub fn configure_serial(ui: &mut egui::Ui, serial: &mut Serial, port: &Port) {\n\n let mut available_peripherals =\n\n pins::serial_tx(port).chain(pins::serial_rx(port)).map(|p| p.peripheral).collect_vec();\n\n available_peripherals.sort();\n\n available_peripherals.dedup();\n\n\n\n let first_valid_tx_pin = || {\n\n pins::serial_tx(port)\n\n .find_map(|p| (p.peripheral == available_peripherals[0]).then_some(p))\n\n .unwrap()\n\n };\n\n\n\n let first_valid_rx_pin = || {\n\n pins::serial_rx(port)\n\n .find_map(|p| (p.peripheral == available_peripherals[0]).then_some(p))\n\n .unwrap()\n\n };\n\n\n\n let mut serial_box = matches!(serial, Serial::Enabled { .. });\n\n ui.horizontal_wrapped(|ui| {\n", "file_path": "loadstone_front/src/app/menus/serial.rs", "rank": 5, "score": 159478.34240861714 }, { "content": "/// Renders the menu to configure the boot metrics feature (information relayed from the bootloader\n\n/// to the running application, including an optional boot timing report.\n\npub fn configure_boot_metrics(ui: &mut egui::Ui, boot_metrics: &mut BootMetrics, port: &Port) {\n\n let mut metrics_box = matches!(boot_metrics, BootMetrics::Enabled { .. });\n\n ui.horizontal_wrapped(|ui| {\n\n ui.checkbox(&mut metrics_box, \"Boot Metrics\");\n\n match (metrics_box, &boot_metrics) {\n\n (true, BootMetrics::Disabled) => *boot_metrics = BootMetrics::Enabled { timing: false },\n\n (false, BootMetrics::Enabled { .. }) => *boot_metrics = BootMetrics::Disabled,\n\n _ => {}\n\n }\n\n ui.label(\"Relay information about the boot process through RAM memory.\");\n\n });\n\n ui.horizontal_wrapped(|ui| {\n\n let mut dummy = false;\n\n let timing_box =\n\n if let BootMetrics::Enabled { timing } = boot_metrics { timing } else { &mut dummy };\n\n ui.separator();\n\n ui.set_enabled(BootMetrics::timing_supported(port) && metrics_box);\n\n ui.checkbox(timing_box, \"Timing Metrics\");\n\n ui.label(\"Include boot timing as part of the boot metrics.\");\n\n });\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/mod.rs", "rank": 6, "score": 153104.15863979876 }, { "content": "pub fn decorate_file(image_filename: &str, is_golden: bool) -> Result<(), Error> {\n\n let file = open_image(image_filename)?;\n\n if file\n\n .bytes()\n\n .map(|b| b.unwrap())\n\n .until_sequence(magic_string_inverted().as_slice())\n\n .contains_sequence()\n\n {\n\n return Err(Error::FileAlreadySigned(error::File::Image));\n\n }\n\n let mut file = open_image(image_filename)?;\n\n if is_golden {\n\n file.write(GOLDEN_STRING.as_bytes())\n\n .map_err(|_| Error::FileWriteFailed(error::File::Image))?;\n\n println!(\"Successfully appended golden string.\");\n\n }\n\n file.write(magic_string_inverted().as_slice())\n\n .map_err(|_| Error::FileWriteFailed(error::File::Image))?;\n\n println!(\"Successfully appended magic string.\");\n\n Ok(())\n\n}\n", "file_path": "tools/signing_tool/src/decorating.rs", "rank": 7, "score": 149007.05466623663 }, { "content": "fn prettify_file<P: AsRef<Path>>(path: P) -> io::Result<()> {\n\n Command::new(\"rustfmt\").arg(path.as_ref()).spawn()?.wait()?;\n\n Ok(())\n\n}\n", "file_path": "loadstone_config/src/codegen/mod.rs", "rank": 8, "score": 148669.89851917647 }, { "content": "fn open_image(filename: &str) -> Result<File, Error> {\n\n OpenOptions::new()\n\n .read(true)\n\n .append(true)\n\n .open(filename)\n\n .map_err(|_| Error::FileOpenFailed(e::File::Image))\n\n}\n\n\n", "file_path": "tools/signing_tool/src/main.rs", "rank": 10, "score": 143043.30539137602 }, { "content": "/// The MCU flash available for a port. All ports must have exactly one\n\n/// main MCU flash for Loadstone to correctly function.\n\npub fn internal_flash(port: &Port) -> FlashChip {\n\n match port {\n\n Port::Stm32F412 => FlashChip {\n\n name: \"STM32F412 MCU Flash\".to_owned(),\n\n internal: true,\n\n start: 0x0800_0000,\n\n end: 0x0810_0000,\n\n region_size: KB!(16),\n\n },\n\n Port::Wgm160P => FlashChip {\n\n name: \"EFM32GG11 MCU Flash\".to_owned(),\n\n internal: true,\n\n start: 0x0000_0000,\n\n end: 512 * KB!(4),\n\n region_size: KB!(4),\n\n },\n\n }\n\n}\n\n\n", "file_path": "loadstone_config/src/memory.rs", "rank": 11, "score": 140685.1354325661 }, { "content": "/// Reads the contents of `file` and signs it using P256 ECDSA/SHA256 with the key in `key_file`.\n\npub fn sign_file(image_filename: &str, key: SigningKey) -> Result<usize, Error> {\n\n let mut file = open_image(image_filename)?;\n\n let plaintext = read_file(&mut file)?;\n\n let signature = key.sign(&plaintext);\n\n let bytes_written =\n\n file.write(signature.as_bytes()).map_err(|_| Error::FileWriteFailed(error::File::Image))?;\n\n\n\n if bytes_written == signature.as_bytes().len() {\n\n Ok(bytes_written)\n\n } else {\n\n Err(Error::FileWriteFailed(error::File::Image))\n\n }\n\n}\n\n\n", "file_path": "tools/signing_tool/src/signing.rs", "rank": 12, "score": 140534.0616502769 }, { "content": "fn process_configuration_file() -> Result<()> {\n\n println!(\"cargo:rerun-if-env-changed=LOADSTONE_CONFIG\");\n\n\n\n let configuration: Configuration = if let Ok(config) = std::env::var(\"LOADSTONE_CONFIG\") {\n\n if config.is_empty() {\n\n return Ok(()); // Assuming tests\n\n } else {\n\n ron::from_str(&config)?\n\n }\n\n } else {\n\n panic!(\n\n \"\\r\\n\\r\\nBuilding Loadstone requires you supply a configuration file, \\\n\n embedded in the `LOADSTONE_CONFIG` environment variable. \\r\\nTry again with \\\n\n 'LOADSTONE_CONFIG=`cat my_config.ron` cargo... \\r\\nIf you're just looking \\\n\n to run unit tests, or to build a port that does not require any code \\\n\n generation (manual port), supply an empty string:\n\n 'LOADSTONE_CONFIG=\\\"\\\" cargo...`\\r\\n\\r\\n\"\n\n )\n\n };\n\n\n\n validate_feature_flags_against_configuration(&configuration);\n\n generate_modules(env!(\"CARGO_MANIFEST_DIR\"), &configuration)?;\n\n configure_runner(&configuration.port.to_string());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "build.rs", "rank": 13, "score": 134466.2953073016 }, { "content": "pub fn calculate_and_append_crc(image_filename: &str) -> Result<usize, Error> {\n\n let mut file = open_image(image_filename)?;\n\n let plaintext = read_file(&mut file)?;\n\n\n\n let mut digest = crc32::Digest::new(crc32::IEEE);\n\n digest.write(&plaintext);\n\n\n\n let bytes_written =\n\n file.write(&digest.sum32().to_le_bytes()).map_err(|_| Error::FileWriteFailed(error::File::Image))?;\n\n\n\n if bytes_written == core::mem::size_of::<u32>() {\n\n Ok(bytes_written)\n\n } else {\n\n Err(Error::FileWriteFailed(error::File::Image))\n\n }\n\n}\n", "file_path": "tools/signing_tool/src/signing.rs", "rank": 14, "score": 129760.44411337885 }, { "content": "fn select_recovery_mode(ui: &mut egui::Ui, recovery_enabled: &mut bool, port: &Port) {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.set_enabled(features::Serial::supported(port));\n\n ui.separator();\n\n ui.checkbox(recovery_enabled, \"Serial Recovery\");\n\n ui.label(\"Allow recovering a device by sending a new image via XModem.\");\n\n });\n\n}\n", "file_path": "loadstone_front/src/app/menus/serial.rs", "rank": 15, "score": 128272.67337028033 }, { "content": "/// Triggers a text file download prompt.\n\npub fn download_file(name: &str, data: &str) -> Result<(), JsValue> {\n\n use web_sys::{Blob, BlobPropertyBag, HtmlElement, Url};\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n\n\n let mut props = BlobPropertyBag::new();\n\n props.type_(\"text/plain\");\n\n\n\n let blob =\n\n Blob::new_with_str_sequence_and_options(&JsValue::from_serde(&[data]).unwrap(), &props)?;\n\n let link = document.create_element(\"a\")?.dyn_into::<HtmlElement>()?;\n\n link.set_attribute(\"href\", Url::create_object_url_with_blob(&blob)?.as_str())?;\n\n link.set_attribute(\"download\", name)?;\n\n\n\n let body = document.body().unwrap();\n\n body.append_child(&link)?;\n\n link.click();\n\n body.remove_child(&link)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "loadstone_front/src/app/utilities.rs", "rank": 16, "score": 126880.85343009408 }, { "content": "fn main() -> Result<()> { process_configuration_file() }\n\n\n", "file_path": "build.rs", "rank": 17, "score": 126845.01221771518 }, { "content": "fn select_tx_pins(ui: &mut egui::Ui, tx_pin: &mut PeripheralPin, port: &Port) {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.separator();\n\n ui.label(\"\\u{27A1}\");\n\n egui::ComboBox::from_label(\"Serial console output pin (TX)\")\n\n .selected_text(tx_pin.to_string())\n\n .show_ui(ui, |ui| {\n\n let peripheral = tx_pin.peripheral.clone();\n\n for pin in pins::serial_tx(port).filter(|pin| pin.peripheral == peripheral) {\n\n ui.selectable_value(tx_pin, pin.clone(), pin);\n\n }\n\n });\n\n });\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/serial.rs", "rank": 18, "score": 126653.65799219304 }, { "content": "fn select_rx_pins(ui: &mut egui::Ui, rx_pin: &mut PeripheralPin, port: &Port) {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.separator();\n\n ui.label(\"\\u{2B05}\");\n\n egui::ComboBox::from_label(\"Serial console input pin (RX)\")\n\n .selected_text(rx_pin.to_string())\n\n .show_ui(ui, |ui| {\n\n let peripheral = rx_pin.peripheral.clone();\n\n for pin in pins::serial_rx(port).filter(|pin| pin.peripheral == peripheral) {\n\n ui.selectable_value(rx_pin, pin.clone(), pin);\n\n }\n\n });\n\n });\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/serial.rs", "rank": 19, "score": 126653.65799219304 }, { "content": "/// Generates the `devices.rs` module, which contains type definitions and\n\n/// initialisation functions for bootloader features such as serial and external\n\n/// flash.\n\npub fn generate<P: AsRef<Path>>(\n\n autogenerated_folder_path: P,\n\n configuration: &Configuration,\n\n) -> Result<()> {\n\n let filename = autogenerated_folder_path.as_ref().join(\"devices.rs\");\n\n let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(&filename)?;\n\n let mut code = quote! {};\n\n\n\n match configuration.port {\n\n crate::port::Port::Stm32F412 => {\n\n generate_serial_stm32(configuration, &mut code)?;\n\n generate_flash_stm32(configuration, &mut code)?;\n\n }\n\n crate::port::Port::Wgm160P => {}\n\n }\n\n\n\n file.write_all(format!(\"{}\", code).as_bytes())?;\n\n prettify_file(filename).ok();\n\n Ok(())\n\n}\n\n\n", "file_path": "loadstone_config/src/codegen/devices.rs", "rank": 20, "score": 126185.0271461753 }, { "content": "/// Returns an iterator over all the flash chips compatible with the current\n\n/// port (a driver exists for them).\n\npub fn external_flash(port: &Port) -> impl Iterator<Item = FlashChip> {\n\n match port {\n\n Port::Stm32F412 => Some(FlashChip {\n\n name: \"Micron n25q128a\".to_owned(),\n\n internal: false,\n\n start: 0x0000_0000,\n\n end: 0x00FF_FFFF,\n\n region_size: KB!(4),\n\n })\n\n .into_iter(),\n\n Port::Wgm160P => None.into_iter(),\n\n }\n\n}\n", "file_path": "loadstone_config/src/memory.rs", "rank": 21, "score": 124288.61181163637 }, { "content": "/// Transforms a `Configuration` struct into a set of source code files\n\n/// that will be compiled into `Loadstone`. The resulting source is written\n\n/// to src/ports/<port>/autogenerated.\n\npub fn generate_modules<P: AsRef<Path>>(\n\n loadstone_path: P,\n\n configuration: &Configuration,\n\n) -> Result<()> {\n\n let autogenerated_folder_path = loadstone_path.as_ref().join(\n\n format!(\"src/ports/{}/autogenerated\", configuration.port)\n\n );\n\n fs::create_dir(&autogenerated_folder_path).ok();\n\n generate_linker_script(&configuration)?;\n\n generate_top_level_module(&autogenerated_folder_path, configuration)?;\n\n\n\n if std::env::var(\"CARGO_FEATURE_ECDSA_VERIFY\").is_ok() {\n\n generate_key(loadstone_path, configuration)?;\n\n }\n\n memory_map::generate(\n\n &autogenerated_folder_path,\n\n &configuration.memory_configuration,\n\n &configuration.port,\n\n )?;\n\n pins::generate(&autogenerated_folder_path, &configuration)?;\n\n devices::generate(&autogenerated_folder_path, &configuration)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "loadstone_config/src/codegen/mod.rs", "rank": 22, "score": 123629.34033343144 }, { "content": "/// Generates the `pin_configuration.rs` module, which contains pin definitions\n\n/// alternate function assignments for a particular loadstone build.\n\npub fn generate<P: AsRef<Path>>(\n\n autogenerated_folder_path: P,\n\n configuration: &Configuration,\n\n) -> Result<()> {\n\n let filename = autogenerated_folder_path.as_ref().join(\"pin_configuration.rs\");\n\n let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(&filename)?;\n\n\n\n match configuration.port.subfamily() {\n\n port::Subfamily::Stm32f4 => stm32::generate_stm32f4_pins(configuration, &mut file)?,\n\n port::Subfamily::Efm32Gg11 => generate_efm32gg(configuration, &mut file)?,\n\n };\n\n prettify_file(filename).ok();\n\n Ok(())\n\n}\n\n\n", "file_path": "loadstone_config/src/codegen/pins/mod.rs", "rank": 24, "score": 123618.79203267433 }, { "content": "/// Initializes the backup domain registers of the realtime clock, required for the update signal\n\n/// to function.\n\npub fn initialize_rtc_backup_domain(rcc: &mut blue_hal::stm32pac::RCC, pwr: &mut blue_hal::stm32pac::PWR) {\n\n rcc.apb1enr.modify(|_, w| { w.pwren().set_bit() });\n\n pwr.csr.modify(|_, w| { w.bre().set_bit() });\n\n pwr.cr.modify(|_, w| { w.dbp().set_bit() });\n\n rcc.bdcr.modify(|_, w| {\n\n w.rtcen().set_bit()\n\n .rtcsel().bits(0b10)\n\n });\n\n}\n", "file_path": "src/ports/stm32f412/update_signal.rs", "rank": 25, "score": 120099.38933975156 }, { "content": "/// Returns an iterator over the possible serial transmission pins for this port.\n\npub fn serial_tx(port: &Port) -> Box<dyn Iterator<Item = PeripheralPin>> {\n\n match port {\n\n Port::Stm32F412 => Box::new(IntoIter::new([\n\n PeripheralPin::new(Cow::from(\"USART1\"), Cow::from(\"a\"), 9, 7),\n\n PeripheralPin::new(Cow::from(\"USART1\"), Cow::from(\"b\"), 6, 7),\n\n PeripheralPin::new(Cow::from(\"USART2\"), Cow::from(\"a\"), 2, 7),\n\n PeripheralPin::new(Cow::from(\"USART2\"), Cow::from(\"d\"), 5, 7),\n\n PeripheralPin::new(Cow::from(\"USART1\"), Cow::from(\"a\"), 15, 6),\n\n PeripheralPin::new(Cow::from(\"USART6\"), Cow::from(\"c\"), 6, 8),\n\n PeripheralPin::new(Cow::from(\"USART6\"), Cow::from(\"a\"), 11, 8),\n\n PeripheralPin::new(Cow::from(\"USART6\"), Cow::from(\"g\"), 14, 8),\n\n ])),\n\n Port::Wgm160P => Box::new(None.into_iter()),\n\n }\n\n}\n\n\n", "file_path": "loadstone_config/src/pins.rs", "rank": 26, "score": 119760.35866906663 }, { "content": "/// Returns an iterator over the possible serial reception pins for this port.\n\npub fn serial_rx(port: &Port) -> Box<dyn Iterator<Item = PeripheralPin>> {\n\n match port {\n\n Port::Stm32F412 => Box::new(IntoIter::new([\n\n PeripheralPin::new(Cow::from(\"USART1\"), Cow::from(\"b\"), 3, 7),\n\n PeripheralPin::new(Cow::from(\"USART1\"), Cow::from(\"b\"), 7, 7),\n\n PeripheralPin::new(Cow::from(\"USART1\"), Cow::from(\"a\"), 10, 7),\n\n PeripheralPin::new(Cow::from(\"USART2\"), Cow::from(\"a\"), 3, 7),\n\n PeripheralPin::new(Cow::from(\"USART2\"), Cow::from(\"d\"), 6, 7),\n\n PeripheralPin::new(Cow::from(\"USART6\"), Cow::from(\"c\"), 7, 8),\n\n PeripheralPin::new(Cow::from(\"USART6\"), Cow::from(\"a\"), 12, 8),\n\n PeripheralPin::new(Cow::from(\"USART6\"), Cow::from(\"g\"), 9, 8),\n\n ])),\n\n Port::Wgm160P => Box::new(None.into_iter()),\n\n }\n\n}\n", "file_path": "loadstone_config/src/pins.rs", "rank": 27, "score": 119760.35866906663 }, { "content": "/// Generates a public key file under the `src/devices/assets/` folder.\n\nfn generate_key<P: AsRef<Path>>(loadstone_path: P, configuration: &Configuration) -> Result<()> {\n\n assert!(configuration.security_configuration.security_mode == SecurityMode::P256ECDSA,\n\n \"Configuration mismatch: Config file requires ECDSA verification, but feature is disabled\");\n\n\n\n fs::create_dir(loadstone_path.as_ref().join(\"src/devices/assets/\")).ok();\n\n let key_path = loadstone_path.as_ref().join(\n\n \"src/devices/assets/key.sec1\"\n\n );\n\n\n\n let key = VerifyingKey::from_str(&configuration.security_configuration.verifying_key_raw)\n\n .expect(\"Supplied public key is not valid\");\n\n\n\n let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(&key_path)?;\n\n file.write_all(key.to_encoded_point(false).as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "loadstone_config/src/codegen/mod.rs", "rank": 28, "score": 119532.4376362763 }, { "content": "/// Generates the linker script `memory.x`, which describes the amount and location\n\n/// of flash and RAM memory available to a particular Loadstone instance.\n\npub fn generate_linker_script(configuration: &Configuration) -> Result<()> {\n\n let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(\"memory.x\")?;\n\n\n\n #[allow(unused_mut)]\n\n let mut constants = configuration\n\n .port\n\n .linker_script_constants()\n\n .ok_or(anyhow!(\"Current board doesn't have linker script constants defined.\"))?;\n\n\n\n if std::env::var(\"CARGO_FEATURE_RELOCATE_TO_BOOTABLE_BANK\").is_ok() {\n\n relocate_to_bootable_bank(&mut constants, configuration)?;\n\n }\n\n\n\n write!(\n\n file,\n\n \"MEMORY\\n\\\n\n {{\\n\\\n\n FLASH : ORIGIN = 0x{:08X}, LENGTH = {}K\\n\\\n\n RAM : ORIGIN = 0x{:08X}, LENGTH = {}K\\n\\\n\n }}\\n\",\n\n constants.flash.origin,\n\n constants.flash.size / 1024,\n\n constants.ram.origin,\n\n constants.ram.size / 1024,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "loadstone_config/src/codegen/linker_script.rs", "rank": 29, "score": 118395.35672367614 }, { "content": "/// Configures the custom greetings feature; optional strings that will be printed via\n\n/// serial by both Loadstone and the companion demo app. When enabled, they default to\n\n/// a version string containing Git and Cargo information.\n\npub fn configure_custom_greetings(ui: &mut egui::Ui, greetings: &mut Greetings) {\n\n let mut greetings_box = matches!(greetings, Greetings::Custom { .. });\n\n let loadstone_with_version = || {\n\n format!(\n\n \"-- Loadstone [{}-{}] --\",\n\n env!(\"CARGO_PKG_VERSION\"),\n\n git_version::git_version!()\n\n )\n\n };\n\n let demo_with_version = || {\n\n format!(\n\n \"-- Loadstone Demo App [{}-{}] --\",\n\n env!(\"CARGO_PKG_VERSION\"),\n\n git_version::git_version!()\n\n )\n\n };\n\n ui.horizontal_wrapped(|ui| {\n\n ui.checkbox(&mut greetings_box, \"Custom Greetings\");\n\n match (greetings_box, &greetings) {\n\n (true, Greetings::Default) => {\n", "file_path": "loadstone_front/src/app/menus/mod.rs", "rank": 30, "score": 116022.2550080763 }, { "content": "pub fn configure_update_signal(ui: &mut egui::Ui, update_signal: &mut UpdateSignal) {\n\n let mut enabled = matches!(update_signal, UpdateSignal::Enabled);\n\n\n\n ui.horizontal_wrapped(|ui| {\n\n ui.checkbox(&mut enabled, \"Update Signal\");\n\n ui.label(\"Enable update signal to control when image updates happen.\");\n\n if enabled {\n\n *update_signal = UpdateSignal::Enabled;\n\n } else {\n\n *update_signal = UpdateSignal::Disabled;\n\n }\n\n });\n\n}\n", "file_path": "loadstone_front/src/app/menus/update_signal.rs", "rank": 31, "score": 111112.29219157518 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\n#[wasm_bindgen]\n\npub fn start(canvas_id: &str) -> Result<(), eframe::wasm_bindgen::JsValue> {\n\n let app = LoadstoneApp::default();\n\n eframe::start_web(canvas_id, Box::new(app))\n\n}\n", "file_path": "loadstone_front/src/lib.rs", "rank": 32, "score": 104925.33849412092 }, { "content": "fn enforce_internal_banks_are_contiguous(internal_memory_map: &mut InternalMemoryMap) {\n\n if internal_memory_map.banks.len() > 1 {\n\n for i in 0..internal_memory_map.banks.len().saturating_sub(1) {\n\n let pair = &mut internal_memory_map.banks[i..=(i + 1)];\n\n pair[1].start_address = pair[0].end_address();\n\n }\n\n }\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/normalize.rs", "rank": 33, "score": 94080.46057327572 }, { "content": "/// Renders the menu to configure security options (at the moment,\n\n/// `CRC` and `ECDSA` image verification.\n\npub fn configure_security(\n\n ui: &mut egui::Ui,\n\n security_mode: &mut SecurityMode,\n\n verifying_key_raw: &mut String,\n\n verifying_key_text_field: &mut String,\n\n) {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.radio_value(security_mode, SecurityMode::P256ECDSA, \"Enable P256 ECDSA mode.\")\n\n .on_hover_text(\"Enable P256 ECDSA signature verification.\");\n\n ui.radio_value(security_mode, SecurityMode::Crc, \"Enable CRC32 mode.\")\n\n .on_hover_text(\"Disable ECDSA verification in favor of IEEE CRC32\");\n\n });\n\n\n\n match security_mode {\n\n SecurityMode::Crc => {\n\n ui.colored_label(\n\n Color32::YELLOW,\n\n \"WARNING: Disabling ECDSA Image Verification replaces cryptographic \\\n\n signatures with insecure CRC. This removes the guarantee of image authenticity.\",\n\n );\n", "file_path": "loadstone_front/src/app/menus/security.rs", "rank": 34, "score": 92775.22329025283 }, { "content": "fn main() -> Result<(), String> {\n\n let matches = clap_app!(app =>\n\n (name: env!(\"CARGO_PKG_NAME\"))\n\n (version: env!(\"CARGO_PKG_VERSION\"))\n\n (author: env!(\"CARGO_PKG_AUTHORS\"))\n\n (about: env!(\"CARGO_PKG_DESCRIPTION\"))\n\n (@arg image: +required \"The firmware image to be signed.\")\n\n (@arg golden: -g --golden \"Label the image as golden (Loadstone firmware fallback)\")\n\n (@arg private_key: \"The PKCS8 private key used to sign the image. \\\n\n If absent, an IEEE CRC32 code will be appended instead of a signature.\")\n\n )\n\n .get_matches();\n\n\n\n let image_filename = matches.value_of(\"image\").unwrap().to_owned();\n\n let private_key_filename = matches.value_of(\"private_key\").map(str::to_owned);\n\n\n\n match process_image_file(\n\n image_filename,\n\n private_key_filename.clone(),\n\n matches.occurrences_of(\"golden\") > 0,\n", "file_path": "tools/signing_tool/src/main.rs", "rank": 35, "score": 92504.674829127 }, { "content": "/// Renders the image generation menu.\n\npub fn generate<'a>(\n\n ui: &mut Ui,\n\n frame: &mut epi::Frame<'_>,\n\n personal_access_token_field: &mut String,\n\n git_ref_field: &mut String,\n\n git_fork_field: &mut String,\n\n last_request_response: &mut Arc<Mutex<Option<Result<Response, reqwest_wasm::Error>>>>,\n\n configuration: &Configuration,\n\n) {\n\n if configuration.complete() {\n\n if frame.is_web() {\n\n ui.group(|ui| {\n\n generate_in_ci(\n\n ui,\n\n personal_access_token_field,\n\n git_ref_field,\n\n git_fork_field,\n\n configuration,\n\n last_request_response,\n\n );\n", "file_path": "loadstone_front/src/app/menus/generate.rs", "rank": 36, "score": 90971.87467343848 }, { "content": "/// Ensures internal consistency of the memory maps is maintained. Rules like banks\n\n/// staying contiguous, single boot banks, etc are enforced here. This is called\n\n/// after the GUI drives any modification of the memory map, as the invariants can't\n\n/// be easily upheld by the types alone, and having this additional step makes writing\n\n/// the GUI code a lot simpler.\n\npub fn normalize(\n\n internal_memory_map: &mut InternalMemoryMap,\n\n external_memory_map: &mut ExternalMemoryMap,\n\n internal_flash: &memory::FlashChip,\n\n external_flash: &mut Option<memory::FlashChip>,\n\n golden_index: &mut Option<usize>,\n\n port: &Port,\n\n) {\n\n enforce_bootable_bank_not_golden(golden_index, internal_memory_map);\n\n enforce_internal_banks_follow_bootloader(internal_memory_map, internal_flash);\n\n enforce_internal_banks_are_contiguous(internal_memory_map);\n\n enforce_internal_bank_ranges_are_maintained(internal_memory_map, internal_flash);\n\n\n\n if let Some(chip) = external_flash {\n\n if memory::external_flash(port).any(|c| c.name == chip.name) {\n\n enforce_external_banks_are_contiguous(external_memory_map, chip);\n\n } else {\n\n *external_flash = None;\n\n }\n\n } else {\n\n external_memory_map.banks.clear();\n\n }\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/normalize.rs", "rank": 37, "score": 90959.19847751067 }, { "content": "fn generate_gpio_macros(configuration: &Configuration, code: &mut quote::__private::TokenStream) {\n\n for bank in 'a'..='h' {\n\n let input_tokens = input_tokens(configuration).filter(|t| t.bank == bank).collect_vec();\n\n let serial_tokens = serial_tokens(configuration).filter(|t| t.bank == bank).collect_vec();\n\n let qspi_flash_pin_tokens =\n\n qspi_flash_pin_tokens(configuration).filter(|t| t.bank == bank).collect_vec();\n\n\n\n let input_index = input_tokens.iter().map(|t| &t.index);\n\n let input_mode = input_tokens.iter().map(|t| &t.mode);\n\n\n\n let serial_index = serial_tokens.iter().map(|t| &t.index);\n\n let serial_mode = serial_tokens.iter().map(|t| &t.mode);\n\n let serial_direction = serial_tokens.iter().map(|t| &t.direction);\n\n let serial_peripheral = serial_tokens.iter().map(|t| &t.peripheral);\n\n\n\n let qspi_flash_index = qspi_flash_pin_tokens.iter().map(|t| &t.index);\n\n let qspi_flash_mode = qspi_flash_pin_tokens.iter().map(|t| &t.mode);\n\n let qspi_flash_earmark = qspi_flash_pin_tokens.iter().map(|t| &t.earmark);\n\n\n\n let bank = format_ident!(\"{}\", bank);\n", "file_path": "loadstone_config/src/codegen/pins/stm32.rs", "rank": 38, "score": 87692.62657946085 }, { "content": "/// Renders the menu to configure the entire memory map, consisting of a mandatory internal\n\n/// flash (and its bank distribution, which must contain a bootable bank) and an optional\n\n/// external flash.\n\npub fn configure_memory_map(\n\n ui: &mut egui::Ui,\n\n internal_memory_map: &mut InternalMemoryMap,\n\n external_memory_map: &mut ExternalMemoryMap,\n\n external_flash: &mut Option<FlashChip>,\n\n golden_index: &mut Option<usize>,\n\n port: &Port,\n\n) {\n\n let internal_flash = memory::internal_flash(port);\n\n\n\n normalize(\n\n internal_memory_map,\n\n external_memory_map,\n\n &internal_flash,\n\n external_flash,\n\n golden_index,\n\n port,\n\n );\n\n\n\n ui.group(|ui| {\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 39, "score": 87681.67115295056 }, { "content": "/// Writes the top level autogenerated module, which includes a few boolean feature flags and\n\n/// the module definitions of every autogenerated submodule.\n\nfn generate_top_level_module<P: AsRef<Path>>(\n\n autogenerated_folder_path: P,\n\n configuration: &Configuration,\n\n) -> Result<()> {\n\n let filename = autogenerated_folder_path.as_ref().join(\"mod.rs\");\n\n let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(&filename)?;\n\n\n\n let (serial_enabled, recovery_enabled) = if let Serial::Enabled { recovery_enabled, .. } =\n\n configuration.feature_configuration.serial\n\n {\n\n if !Serial::supported(&configuration.port) {\n\n panic!(\n\n \"Serial features enabled for a port that doesn't support them: {:?}\",\n\n configuration.port\n\n );\n\n }\n\n (true, recovery_enabled)\n\n } else {\n\n (false, false)\n\n };\n", "file_path": "loadstone_config/src/codegen/mod.rs", "rank": 40, "score": 82814.28324454396 }, { "content": "/// utility function to invert the [`MAGIC_STRING`].\n\npub fn magic_string_inverted() -> [u8; MAGIC_STRING.len()] {\n\n let mut inverted = [0u8; MAGIC_STRING.len()];\n\n let mut bytes = MAGIC_STRING.as_bytes().iter().map(|b| !b);\n\n bytes.collect_slice(&mut inverted);\n\n inverted\n\n}\n\n\n\n/// Image bank descriptor.\n\n///\n\n/// A bank represents a section of flash memory that may contain a single signed/crc'd\n\n/// firmware image, for the purposes of booting, backup, update or recovery.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Bank<A: Address> {\n\n /// Numeric identifier of the bank, unique even across multiple flash chips.\n\n pub index: u8,\n\n /// Size in bytes of the flash range occupied by this bank.\n\n pub size: usize,\n\n /// Address of the start of the image bank.\n\n pub location: A,\n\n /// Whether Loadstone is allowed to boot an image residing in this bank.\n", "file_path": "src/devices/image/mod.rs", "rank": 41, "score": 80807.1802455907 }, { "content": "/// Generic file transfer iterator trait, returning an iterator over byte blocks.\n\npub trait FileTransfer: TimeoutRead + Write {\n\n fn blocks(&mut self, max_retries: Option<u32>) -> BlockIterator<Self> {\n\n BlockIterator {\n\n serial: self,\n\n received_block: false,\n\n finished: false,\n\n block_number: 0,\n\n max_retries,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: TimeoutRead + Write> FileTransfer for T {}\n\n\n\n/// Generic iterator over byte blocks.\n\npub struct BlockIterator<'a, S: TimeoutRead + Write + ?Sized> {\n\n serial: &'a mut S,\n\n received_block: bool,\n\n finished: bool,\n\n block_number: u8,\n", "file_path": "src/devices/cli/file_transfer.rs", "rank": 42, "score": 78195.09769129244 }, { "content": "/// Renders a link to download the finished .ron file.\n\nfn generate_download(ui: &mut Ui, configuration: &Configuration) {\n\n ui.heading(\"Option 2: Local\");\n\n ui.horizontal_wrapped(|ui| {\n\n if ui.button(\"Download\").clicked() {\n\n download_file(\n\n \"loadstone_config.ron\",\n\n &ron::ser::to_string_pretty(&configuration, PrettyConfig::default()).unwrap(),\n\n )\n\n .unwrap();\n\n }\n\n ui.label(\"Download the .ron file to build Loadstone locally.\");\n\n });\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/generate.rs", "rank": 43, "score": 75674.39756993073 }, { "content": "/// Generates a .ron file and saves it to the current directory. This is the\n\n/// only available approach when running loadstone_front natively.\n\nfn generate_native(ui: &mut Ui, configuration: &Configuration) {\n\n ui.group(|ui| {\n\n ui.heading(\"Local generation\");\n\n ui.horizontal_wrapped(|ui| {\n\n if ui.button(\"Generate\").clicked() {\n\n // TODO clean up unwraps\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .truncate(true)\n\n .open(LOCAL_OUTPUT_FILENAME)\n\n .unwrap();\n\n file.write_all(\n\n ron::ser::to_string_pretty(&configuration, PrettyConfig::default())\n\n .unwrap()\n\n .as_bytes(),\n\n )\n\n .unwrap();\n\n }\n\n ui.label(\"Generate a\");\n\n ui.colored_label(Color32::LIGHT_BLUE, LOCAL_OUTPUT_FILENAME);\n\n ui.label(\"file to be used locally to build Loadstone.\");\n\n });\n\n });\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/generate.rs", "rank": 44, "score": 75674.07105401896 }, { "content": "fn process_image_file(\n\n image_filename: String,\n\n private_key_filename: Option<String>,\n\n image_is_golden: bool,\n\n) -> Result<usize, Error> {\n\n decorate_file(&image_filename, image_is_golden)?;\n\n\n\n if let Some(private_key_filename) = private_key_filename {\n\n let key_file =\n\n File::open(private_key_filename).map_err(|_| Error::FileOpenFailed(e::File::Key))?;\n\n let key = signing::read_key(key_file)?;\n\n sign_file(&image_filename, key)\n\n } else {\n\n calculate_and_append_crc(&image_filename)\n\n }\n\n}\n\n\n", "file_path": "tools/signing_tool/src/main.rs", "rank": 45, "score": 74441.4477320472 }, { "content": "fn generate_imports_and_types(\n\n configuration: &Configuration,\n\n code: &mut quote::__private::TokenStream,\n\n) {\n\n if let Serial::Enabled { tx_pin, rx_pin, .. } = &configuration.feature_configuration.serial {\n\n let peripheral = format_ident!(\"{}\", tx_pin.peripheral);\n\n let tx_af = format_ident!(\"AF{}\", tx_pin.af_index);\n\n let tx_pin = format_ident!(\"P{}{}\", tx_pin.bank, tx_pin.index);\n\n let rx_af = format_ident!(\"AF{}\", rx_pin.af_index);\n\n let rx_pin = format_ident!(\"P{}{}\", rx_pin.bank, rx_pin.index);\n\n\n\n code.append_all(quote! {\n\n use blue_hal::drivers::stm32f4::serial::{TxPin, RxPin};\n\n #[allow(unused_imports)]\n\n use blue_hal::stm32pac::{self, USART1, USART2, USART6};\n\n pub type UsartPins = (#tx_pin<#tx_af>, #rx_pin<#rx_af>);\n\n pub type Serial = blue_hal::drivers::stm32f4::serial::Serial<#peripheral, UsartPins>;\n\n });\n\n } else {\n\n code.append_all(quote! {\n", "file_path": "loadstone_config/src/codegen/pins/stm32.rs", "rank": 46, "score": 72725.72776405925 }, { "content": "pub fn magic_string_inverted() -> Vec<u8> { MAGIC_STRING.as_bytes().iter().map(|b| !b).collect() }\n\n\n", "file_path": "tools/signing_tool/src/decorating.rs", "rank": 47, "score": 62858.97841482473 }, { "content": "pub trait Convertible {\n\n fn into(self) -> Error;\n\n}\n\nimpl<T: Convertible> From<T> for Error {\n\n fn from(t: T) -> Self { t.into() }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 48, "score": 54849.671109622446 }, { "content": " #[marker_blanket]\n\n pub trait Serial:\n\n serial::Read<Error: error::Convertible>\n\n + serial::Write\n\n + serial::TimeoutRead<Error: error::Convertible>\n\n {\n\n }\n\n}\n", "file_path": "src/devices/mod.rs", "rank": 49, "score": 53491.35246207332 }, { "content": "pub trait Reader {\n\n fn image_at<A, F>(flash: &mut F, bank: Bank<A>) -> Result<Image<A>, error::Error>\n\n where\n\n A: Address,\n\n F: flash::ReadWrite<Address = A>,\n\n error::Error: From<F::Error>;\n\n}\n\n\n\nimpl<A: Address> Image<A> {\n\n /// Address of the start of the firmware image. Will generally coincide\n\n /// with the start of its associated image bank.\n\n pub fn location(&self) -> A { self.location }\n\n /// Size of the firmware image, excluding decoration and signature/crc.\n\n pub fn size(&self) -> usize { self.size }\n\n /// Size of the firmware image, including decoration and signature.\n\n #[cfg(feature = \"ecdsa-verify\")]\n\n pub fn total_size(&self) -> usize {\n\n self.size()\n\n + image_ecdsa::SignatureSize::<image_ecdsa::NistP256>::to_usize()\n\n + MAGIC_STRING.len()\n", "file_path": "src/devices/image/mod.rs", "rank": 50, "score": 52242.86467886265 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\nfn main() {\n\n let app = loadstone_front::LoadstoneApp::default();\n\n eframe::run_native(Box::new(app), NativeOptions::default());\n\n}\n", "file_path": "loadstone_front/src/main.rs", "rank": 51, "score": 50119.4256419282 }, { "content": "pub trait WriteUpdateSignal {\n\n fn write_update_plan(&mut self, plan: UpdatePlan);\n\n}\n", "file_path": "src/devices/update_signal.rs", "rank": 52, "score": 50026.083652375586 }, { "content": "pub trait ReadUpdateSignal {\n\n fn read_update_plan(&self) -> UpdatePlan;\n\n}\n\n\n", "file_path": "src/devices/update_signal.rs", "rank": 53, "score": 50026.083652375586 }, { "content": "/// Generates a loadstone image when loadstone_front is ran as a web application. Offers\n\n/// both a download link and an automated Github Actions CI trigger.\n\nfn generate_web(\n\n configuration: &Configuration,\n\n token: &str,\n\n git_ref: &str,\n\n git_fork: &str,\n\n ron: &str,\n\n last_request_response: &mut Arc<Mutex<Option<Result<Response, reqwest_wasm::Error>>>>,\n\n) -> Result<()> {\n\n let client = reqwest_wasm::Client::new();\n\n let cloned_response = last_request_response.clone();\n\n\n\n let mut auth_bytes = b\"Basic \".to_vec();\n\n let mut encoder = Base64Encoder::new(&mut auth_bytes, base64::STANDARD);\n\n write!(encoder, \"{}:\", token).unwrap();\n\n drop(encoder);\n\n\n\n let formatted_body =format!(\n\n \"{{\\\"ref\\\":\\\"{}\\\", \\\"inputs\\\": {{\\\"loadstone_configuration\\\":\\\"{}\\\",\\\"loadstone_features\\\":\\\"{}\\\"}}}}\",\n\n git_ref,\n\n ron.replace(\"\\\"\", \"\\\\\\\"\").replace(\"\\n\",\"\"),\n", "file_path": "loadstone_front/src/app/menus/generate.rs", "rank": 54, "score": 46742.20746780362 }, { "content": "/// Automatically triggers a Loadstone build in Github Actions. By default, this requires a\n\n/// personal access token with write access to the main Loadstone repository, but it can\n\n/// be pointed at different forks.\n\nfn generate_in_ci(\n\n ui: &mut Ui,\n\n personal_access_token_field: &mut String,\n\n git_ref_field: &mut String,\n\n git_fork_field: &mut String,\n\n configuration: &Configuration,\n\n last_request_response: &mut Arc<Mutex<Option<Result<Response, reqwest_wasm::Error>>>>,\n\n) {\n\n ui.heading(\"Option 1: Github CI\");\n\n ui.horizontal_wrapped(|ui| {\n\n ui.label(\n\n \"Paste your Github PAT to trigger a Github Actions build. \\\n\n You must have sufficient permissions on the chosen Loadstone fork \\\n\n to trigger workflow dispatches. \\\n\n For instructions on how to generate a Github Personal Access Token,\",\n\n );\n\n ui.hyperlink_to(\"visit this link.\", GITHUB_TOKEN_INSTRUCTIONS);\n\n });\n\n ui.horizontal_wrapped(|ui| {\n\n ui.text_edit_singleline(personal_access_token_field);\n", "file_path": "loadstone_front/src/app/menus/generate.rs", "rank": 55, "score": 46742.20746780362 }, { "content": "fn select_peripheral(\n\n ui: &mut egui::Ui,\n\n port: &Port,\n\n tx_pin: &mut PeripheralPin,\n\n rx_pin: &mut PeripheralPin,\n\n available_peripherals: impl Iterator<Item = Peripheral>,\n\n) {\n\n let mut inferred_peripheral = tx_pin.peripheral.clone();\n\n\n\n ui.horizontal_wrapped(|ui| {\n\n egui::ComboBox::from_label(\"Serial Peripheral\")\n\n .selected_text(&inferred_peripheral)\n\n .show_ui(ui, |ui| {\n\n for peripheral in available_peripherals {\n\n ui.selectable_value(&mut inferred_peripheral, peripheral.clone(), peripheral);\n\n }\n\n });\n\n });\n\n\n\n let first_valid_tx = |peripheral| {\n", "file_path": "loadstone_front/src/app/menus/serial.rs", "rank": 56, "score": 46742.20746780362 }, { "content": "fn generate_flash_stm32(\n\n configuration: &Configuration,\n\n code: &mut quote::__private::TokenStream,\n\n) -> Result<()> {\n\n if configuration.memory_configuration.external_flash.is_some() {\n\n code.append_all(quote!{\n\n use blue_hal::hal::time;\n\n use super::pin_configuration::*;\n\n pub fn construct_flash(qspi_pins: QspiPins, qspi: stm32pac::QUADSPI) -> Option<ExternalFlash> {\n\n let qspi_config = qspi::Config::<mode::Single>::default().with_flash_size(24).unwrap();\n\n let qspi = Qspi::from_config(qspi, qspi_pins, qspi_config).unwrap();\n\n let external_flash = ExternalFlash::with_timeout(qspi, time::Milliseconds(5000)).unwrap();\n\n Some(external_flash)\n\n }\n\n })\n\n } else {\n\n code.append_all(quote!{\n\n use blue_hal::hal::time;\n\n use super::pin_configuration::*;\n\n #[allow(unused)]\n\n pub fn construct_flash(qspi_pins: QspiPins, qspi: stm32pac::QUADSPI) -> Option<ExternalFlash> { None }\n\n })\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "loadstone_config/src/codegen/devices.rs", "rank": 57, "score": 46742.20746780362 }, { "content": "fn generate_serial_stm32(\n\n configuration: &Configuration,\n\n code: &mut quote::__private::TokenStream,\n\n) -> Result<()> {\n\n if let Serial::Enabled { tx_pin, .. } = &configuration.feature_configuration.serial {\n\n let peripheral = format_ident!(\"{}\", tx_pin.peripheral.to_lowercase());\n\n code.append_all(quote! {\n\n use super::pin_configuration::{UsartPins, Serial};\n\n use blue_hal::stm32pac;\n\n use blue_hal::drivers::stm32f4::rcc::Clocks;\n\n use blue_hal::drivers::stm32f4::serial::{self, UsartExt};\n\n #[allow(unused)]\n\n pub fn construct_serial(\n\n serial_pins: UsartPins,\n\n clocks: Clocks,\n\n usart1: stm32pac::USART1,\n\n usart2: stm32pac::USART2,\n\n usart6: stm32pac::USART6\n\n ) -> Option<Serial> {\n\n let serial_config = serial::config::Config::default().baudrate(time::Bps(115200));\n", "file_path": "loadstone_config/src/codegen/devices.rs", "rank": 58, "score": 46742.20746780362 }, { "content": "fn configure_runner(target: &str) {\n\n println!(\"cargo:rerun-if-changed={}\", RUNNER_TARGET_FILE);\n\n\n\n const RUNNER_TARGET_FILE: &str = \".cargo/.runner-target\";\n\n fs::write(RUNNER_TARGET_FILE, target).unwrap();\n\n}\n\n\n", "file_path": "build.rs", "rank": 59, "score": 46414.714502768125 }, { "content": "#[allow(unused)]\n\nfn relocate_to_bootable_bank(\n\n constants: &mut LinkerScriptConstants,\n\n configuration: &Configuration,\n\n) -> Result<()> {\n\n let bootable_address = configuration.memory_configuration.bootable_address().ok_or(anyhow!(\n\n \"Impossible to relocate: bootable bank is undefined in configuration file.\"\n\n ))?;\n\n let offset = bootable_address - constants.flash.origin;\n\n constants.flash.size = constants.flash.size.saturating_sub(offset as usize);\n\n constants.flash.origin = bootable_address;\n\n Ok(())\n\n}\n", "file_path": "loadstone_config/src/codegen/linker_script.rs", "rank": 61, "score": 45778.8261306365 }, { "content": "fn generate_pin_constructor(\n\n configuration: &Configuration,\n\n code: &mut quote::__private::TokenStream,\n\n) -> () {\n\n let banks = 'a'..='h';\n\n let gpio_fields = banks.clone().map(|b| format_ident!(\"gpio{}\", b)).collect_vec();\n\n let pac_gpio_fields = banks.map(|b| format_ident!(\"GPIO{}\", b.to_uppercase().next().unwrap()));\n\n\n\n let serial_pin_structs: Box<dyn Iterator<Item = Ident>> =\n\n if let Serial::Enabled { tx_pin, rx_pin, .. } = &configuration.feature_configuration.serial\n\n {\n\n Box::new(IntoIter::new([\n\n format_ident!(\"gpio{}\", tx_pin.bank),\n\n format_ident!(\"gpio{}\", rx_pin.bank),\n\n ]))\n\n } else {\n\n Box::new(None.into_iter())\n\n };\n\n\n\n let serial_pin_fields: Box<dyn Iterator<Item = Ident>> =\n", "file_path": "loadstone_config/src/codegen/pins/stm32.rs", "rank": 63, "score": 45778.8261306365 }, { "content": "fn define_serial_options(\n\n ui: &mut egui::Ui,\n\n port: &Port,\n\n recovery_enabled: &mut bool,\n\n tx_pin: &mut PeripheralPin,\n\n rx_pin: &mut PeripheralPin,\n\n available_peripherals: impl Iterator<Item = Peripheral>,\n\n) {\n\n ui.vertical(|ui| {\n\n select_peripheral(ui, port, tx_pin, rx_pin, available_peripherals);\n\n select_tx_pins(ui, tx_pin, port);\n\n select_rx_pins(ui, rx_pin, port);\n\n select_recovery_mode(ui, recovery_enabled, port);\n\n });\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/serial.rs", "rank": 64, "score": 45778.8261306365 }, { "content": "fn qspi_flash_pin_tokens(\n\n configuration: &Configuration,\n\n) -> Box<dyn Iterator<Item = QspiFlashPinTokens>> {\n\n // TODO parse these from config file. They're currently hardcoded here\n\n if let Some(_) = &configuration.memory_configuration.external_flash {\n\n Box::new(IntoIter::new([\n\n QspiFlashPinTokens {\n\n bank: 'b',\n\n index: 2.into(),\n\n mode: format_ident!(\"AF9\"),\n\n earmark: format_ident!(\"QspiClk\"),\n\n },\n\n QspiFlashPinTokens {\n\n bank: 'f',\n\n index: 6.into(),\n\n mode: format_ident!(\"AF9\"),\n\n earmark: format_ident!(\"QspiSecondaryInput\"),\n\n },\n\n QspiFlashPinTokens {\n\n bank: 'f',\n", "file_path": "loadstone_config/src/codegen/pins/stm32.rs", "rank": 65, "score": 44882.49822869756 }, { "content": "/// Exposes a report_unwrap() method that behaves like\n\n/// unwrap(), but also reports any errors via serial before panicking.\n\npub trait ReportOnUnwrap<T, S: Write> {\n\n fn report_unwrap(self, serial: &mut S) -> T;\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 66, "score": 44730.72560604688 }, { "content": "fn validate_feature_flags_against_configuration(configuration: &Configuration) {\n\n let supplied_flags: Vec<_> = std::env::vars()\n\n .filter_map(|(k, _)| {\n\n k.starts_with(\"CARGO_FEATURE_\")\n\n .then_some(k.strip_prefix(\"CARGO_FEATURE_\")?.to_owned().to_lowercase())\n\n })\n\n .collect();\n\n\n\n let missing_flags: Vec<_> = configuration\n\n .required_feature_flags()\n\n .map(|s| s.replace(\"-\", \"_\"))\n\n .filter(|f| !&supplied_flags.contains(&(*f).to_owned()))\n\n .collect();\n\n\n\n if configuration.security_configuration.security_mode != SecurityMode::P256ECDSA\n\n && supplied_flags.contains(&\"ecdsa_verify\".to_owned())\n\n {\n\n panic!(\"Configuration mismatch. Configuration file does not specify ECDSA security mode, \\\n\n but the `ecdsa-verify` flag was supplied. Try again without `ecdsa-verify` for CRC mode.\");\n\n }\n\n\n\n if !missing_flags.is_empty() {\n\n panic!(\n\n \"\\r\\n\\r\\nThe configuration file requires flags that haven't been supplied. \\\n\n Please build again with `--features={}`\\r\\n\\r\\n\",\n\n missing_flags.join(\",\"),\n\n );\n\n }\n\n}\n", "file_path": "build.rs", "rank": 67, "score": 44075.747785677624 }, { "content": "fn select_bootloader_length(\n\n ui: &mut egui::Ui,\n\n internal_memory_map: &mut InternalMemoryMap,\n\n internal_flash: &memory::FlashChip,\n\n) {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.add(\n\n Slider::new(\n\n &mut internal_memory_map.bootloader_length_kb,\n\n 1..=cmp::min(\n\n BOOTLOADER_MAX_LENGTH_KB,\n\n (internal_flash.end - internal_memory_map.bootloader_location) / KB!(1),\n\n ),\n\n )\n\n .clamp_to_range(true)\n\n .suffix(\"KB\"),\n\n );\n\n ui.label(\"Bootloader allocated length\");\n\n });\n\n if internal_memory_map.bootloader_length_kb < 64 {\n\n ui.colored_label(\n\n Color32::YELLOW,\n\n \"You must manually ensure you've allocated enough \\\n\n bootloader space to hold the final compiled binary.\",\n\n );\n\n }\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 68, "score": 44046.458599711776 }, { "content": "fn configure_external_banks(\n\n ui: &mut egui::Ui,\n\n external_memory_map: &mut ExternalMemoryMap,\n\n internal_memory_map: &InternalMemoryMap,\n\n external_flash: &memory::FlashChip,\n\n golden_index: &mut Option<usize>,\n\n) {\n\n let ExternalMemoryMap { banks: external_banks } = external_memory_map;\n\n let InternalMemoryMap { banks: internal_banks, .. } = internal_memory_map;\n\n\n\n let mut to_delete: Option<usize> = None;\n\n for (i, bank) in external_banks.iter_mut().enumerate() {\n\n configure_external_bank(\n\n i,\n\n internal_banks,\n\n ui,\n\n bank,\n\n external_flash,\n\n golden_index,\n\n &mut to_delete,\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 69, "score": 44046.458599711776 }, { "content": "fn configure_external_bank(\n\n i: usize,\n\n internal_banks: &Vec<Bank>,\n\n ui: &mut egui::Ui,\n\n bank: &mut Bank,\n\n external_flash: &FlashChip,\n\n golden_index: &mut Option<usize>,\n\n to_delete: &mut Option<usize>,\n\n) {\n\n let global_index = i + internal_banks.len();\n\n ui.horizontal_wrapped(|ui| {\n\n ui.add(\n\n Slider::new(\n\n &mut bank.size_kb,\n\n 1..=external_flash.end.saturating_sub(bank.start_address + 1) / KB!(1),\n\n )\n\n .clamp_to_range(true)\n\n .suffix(\"KB\"),\n\n );\n\n ui.label(format!(\"Bank {}\", global_index + 1));\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 70, "score": 44046.458599711776 }, { "content": "fn configure_internal_banks(\n\n ui: &mut egui::Ui,\n\n internal_memory_map: &mut InternalMemoryMap,\n\n internal_flash: &memory::FlashChip,\n\n golden_index: &mut Option<usize>,\n\n) {\n\n let InternalMemoryMap { banks, bootable_index, .. } = internal_memory_map;\n\n let mut to_delete: Option<usize> = None;\n\n for (i, bank) in banks.iter_mut().enumerate() {\n\n configure_internal_bank(\n\n ui,\n\n bank,\n\n internal_flash,\n\n bootable_index,\n\n i,\n\n golden_index,\n\n &mut to_delete,\n\n );\n\n }\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 71, "score": 44046.458599711776 }, { "content": "fn add_internal_bank(\n\n ui: &mut egui::Ui,\n\n golden_index: &mut Option<usize>,\n\n internal_memory_map: &mut InternalMemoryMap,\n\n bank_start_address: u32,\n\n internal_flash: &FlashChip,\n\n) {\n\n if ui.button(\"Add bank\").clicked() {\n\n // Bump the golden index if we added a bank under the golden one\n\n match golden_index {\n\n Some(index) if *index >= internal_memory_map.banks.len() => *index = *index + 1,\n\n _ => (),\n\n };\n\n internal_memory_map.banks.push(Bank {\n\n start_address: bank_start_address,\n\n size_kb: internal_flash.region_size / KB!(1),\n\n });\n\n };\n\n ui.label(format!(\n\n \"({}KB available space)\",\n\n internal_flash.end.saturating_sub(bank_start_address) / KB!(1)\n\n ));\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 72, "score": 44046.458599711776 }, { "content": "fn configure_internal_bank(\n\n ui: &mut egui::Ui,\n\n bank: &mut Bank,\n\n internal_flash: &FlashChip,\n\n bootable_index: &mut Option<usize>,\n\n i: usize,\n\n golden_index: &mut Option<usize>,\n\n to_delete: &mut Option<usize>,\n\n) {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.add(\n\n Slider::new(\n\n &mut bank.size_kb,\n\n 1..=internal_flash.end.saturating_sub(bank.start_address + 1) / KB!(1),\n\n )\n\n .clamp_to_range(true)\n\n .suffix(\"KB\"),\n\n );\n\n ui.label(format!(\"Bank {}\", i + 1));\n\n ui.add(\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 73, "score": 44046.458599711776 }, { "content": "fn select_bootloader_location(\n\n ui: &mut egui::Ui,\n\n internal_memory_map: &mut InternalMemoryMap,\n\n internal_flash: &memory::FlashChip,\n\n) {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.add(\n\n Slider::new(\n\n &mut internal_memory_map.bootloader_location,\n\n internal_flash.start\n\n ..=(internal_flash.end.saturating_sub(KB!(BOOTLOADER_MAX_LENGTH_KB))),\n\n )\n\n .clamp_to_range(true),\n\n );\n\n ui.label(\"Bootloader location\");\n\n ui.add(\n\n Label::new(format!(\n\n \"(0x{:x} - 0x{:x})\",\n\n internal_memory_map.bootloader_location,\n\n internal_memory_map.bootloader_location\n\n + KB!(internal_memory_map.bootloader_length_kb)\n\n ))\n\n .text_color(Color32::LIGHT_BLUE),\n\n );\n\n });\n\n}\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 74, "score": 44046.458599711776 }, { "content": "fn add_external_bank(\n\n ui: &mut egui::Ui,\n\n external_memory_map: &mut ExternalMemoryMap,\n\n bank_start_address: u32,\n\n external_flash: &FlashChip,\n\n) {\n\n if ui.button(\"Add bank\").clicked() {\n\n external_memory_map.banks.push(Bank {\n\n start_address: bank_start_address,\n\n size_kb: external_flash.region_size / KB!(1),\n\n });\n\n };\n\n ui.label(format!(\n\n \"({}KB available space)\",\n\n external_flash.end.saturating_sub(bank_start_address) / KB!(1)\n\n ));\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/mod.rs", "rank": 75, "score": 44046.458599711776 }, { "content": "/// Exposes a report_unwrap() method that behaves like\n\n/// unwrap(), but also reports any errors via serial before panicking.\n\npub trait ReportOnUnwrapWithPrefix<T, S: Write> {\n\n fn report_unwrap(self, prefix: &'static str, serial: &mut S) -> T;\n\n}\n\n\n\nimpl<T, S: Write> ReportOnUnwrap<T, S> for Result<T, Error> {\n\n fn report_unwrap(self, serial: &mut S) -> T {\n\n match self {\n\n Ok(value) => value,\n\n Err(error) => {\n\n error.report(serial);\n\n panic!();\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T, S: Write> ReportOnUnwrapWithPrefix<T, S> for Result<T, Error> {\n\n fn report_unwrap(self, prefix: &'static str, serial: &mut S) -> T {\n\n match self {\n\n Ok(value) => value,\n", "file_path": "src/error.rs", "rank": 76, "score": 43742.227046756896 }, { "content": "fn retrieve_key() -> VerifyingKey {\n\n #[allow(unused)]\n\n use core::str::FromStr;\n\n\n\n #[cfg(test)]\n\n return VerifyingKey::from_str(include_str!(\"../assets/test_key.pem\"))\n\n .expect(\"Invalic public key supplied on compilation\");\n\n\n\n #[cfg(not(test))]\n\n return VerifyingKey::from_encoded_point(\n\n &EncodedPoint::from_bytes(include_bytes!(\"../assets/key.sec1\"))\n\n .expect(\"Invalic public key supplied on compilation\"),\n\n )\n\n .expect(\"Invalic public key supplied on compilation\");\n\n}\n\n\n\npub struct EcdsaImageReader;\n\n\n\nimpl Reader for EcdsaImageReader {\n\n fn image_at<A, F>(flash: &mut F, bank: Bank<A>) -> Result<Image<A>, error::Error>\n", "file_path": "src/devices/image/image_ecdsa.rs", "rank": 77, "score": 43737.63819716722 }, { "content": "fn enforce_external_banks_are_contiguous(\n\n external_memory_map: &mut ExternalMemoryMap,\n\n chip: &mut FlashChip,\n\n) {\n\n if external_memory_map.banks.len() > 0 {\n\n external_memory_map.banks[0].start_address = chip.start;\n\n }\n\n if external_memory_map.banks.len() > 1 {\n\n for i in 0..external_memory_map.banks.len().saturating_sub(1) {\n\n let pair = &mut external_memory_map.banks[i..=(i + 1)];\n\n pair[1].start_address = pair[0].end_address();\n\n }\n\n }\n\n external_memory_map.banks.retain(|b| b.end_address() < chip.end);\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/normalize.rs", "rank": 78, "score": 43264.82253960021 }, { "content": "fn enforce_bootable_bank_not_golden(\n\n golden_index: &mut Option<usize>,\n\n internal_memory_map: &mut InternalMemoryMap,\n\n) {\n\n if *golden_index == internal_memory_map.bootable_index {\n\n *golden_index = None;\n\n }\n\n}\n", "file_path": "loadstone_front/src/app/menus/memory_map/normalize.rs", "rank": 79, "score": 43264.82253960021 }, { "content": "#[cfg(target_arch = \"arm\")]\n\n#[alloc_error_handler]\n\nfn oom(_: core::alloc::Layout) -> ! {\n\n defmt::error!(\"Out of heap memory!\");\n\n loop {}\n\n}\n\n\n\n#[cfg(target_arch = \"arm\")]\n\nuse panic_semihosting as _;\n\n\n\n#[cfg(target_arch = \"arm\")]\n\nuse defmt_rtt as _; // global logger\n\n\n\npub mod devices;\n\npub mod error;\n\n\n\n#[cfg(feature = \"cortex_m_any\")]\n\npub mod ports;\n\n\n\n#[cfg(all(target_arch = \"arm\", not(feature = \"cortex_m_any\")))]\n\ncompile_error!(\n\n \"Loadstone can't be built for `arm` without further target specification \\\n\n Either run tests with `cargo test` natively, or define a target through the \\\n\n appropriate configuration and/or feature flags.\"\n\n);\n", "file_path": "src/lib.rs", "rank": 80, "score": 42650.100461248374 }, { "content": "fn enforce_internal_bank_ranges_are_maintained(\n\n internal_memory_map: &mut InternalMemoryMap,\n\n internal_flash: &FlashChip,\n\n) {\n\n internal_memory_map.banks.retain(|b| b.end_address() < internal_flash.end);\n\n if let Some(index) = internal_memory_map.bootable_index {\n\n if index >= internal_memory_map.banks.len() {\n\n internal_memory_map.bootable_index = None;\n\n }\n\n }\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/normalize.rs", "rank": 81, "score": 42532.44708094916 }, { "content": "fn enforce_internal_banks_follow_bootloader(\n\n internal_memory_map: &mut InternalMemoryMap,\n\n internal_flash: &FlashChip,\n\n) {\n\n if internal_memory_map.banks.len() > 0 {\n\n // The start of the first bank must be aligned to the chip's erase granularity\n\n internal_memory_map.bootloader_location =\n\n internal_memory_map.bootloader_location.clamp(internal_flash.start, internal_flash.end);\n\n\n\n let bootloader_end = internal_memory_map.bootloader_location\n\n + KB!(1) * internal_memory_map.bootloader_length_kb;\n\n\n\n let bootloader_end_offset_from_start_of_flash =\n\n bootloader_end.saturating_sub(internal_flash.start);\n\n let aligned_offset =\n\n match bootloader_end_offset_from_start_of_flash % internal_flash.region_size {\n\n 0 => bootloader_end_offset_from_start_of_flash,\n\n modulo => {\n\n bootloader_end_offset_from_start_of_flash\n\n + (internal_flash.region_size.saturating_sub(modulo))\n\n }\n\n };\n\n assert!(aligned_offset % internal_flash.region_size == 0);\n\n let start_of_banks = internal_flash.start + aligned_offset;\n\n internal_memory_map.banks[0].start_address = start_of_banks;\n\n }\n\n}\n\n\n", "file_path": "loadstone_front/src/app/menus/memory_map/normalize.rs", "rank": 82, "score": 42532.44708094916 }, { "content": " #[marker_blanket]\n\n pub trait Flash: flash::ReadWrite<Error: error::Convertible> {}\n\n\n\n /// A supported serial must be able to read, write, read with a timeout,\n\n /// and report errors to the bootloader or boot manager.\n", "file_path": "src/devices/mod.rs", "rank": 83, "score": 39626.33586518553 }, { "content": "//! Full project ports for specific targets. They mainly\n\n//! provide a method to construct a generic bootloader from\n\n//! specific parts.\n\n\n\n#[allow(unused)]\n\nuse blue_hal::port;\n\n\n\n#[cfg(feature = \"stm32f412\")]\n\nport!(stm32f412: [bootloader, boot_manager, autogenerated, update_signal,]);\n\n\n\n#[cfg(feature = \"wgm160p\")]\n\nport!(wgm160p: [bootloader, autogenerated, update_signal,]);\n", "file_path": "src/ports/mod.rs", "rank": 84, "score": 33808.498806563046 }, { "content": "/// Supported hardware families.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Family {\n\n Stm32,\n\n Efm32,\n\n}\n\n\n\n/// Supported hardware subfamilies.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Subfamily {\n\n Stm32f4,\n\n Efm32Gg11,\n\n}\n\n\n\nimpl Display for Port {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(match self {\n\n Port::Stm32F412 => \"stm32f412\",\n\n Port::Wgm160P => \"wgm160p\",\n\n })\n", "file_path": "loadstone_config/src/port.rs", "rank": 85, "score": 32331.240004321728 }, { "content": "\n\nimpl Port {\n\n /// Hardware family of this port.\n\n pub fn family(&self) -> Family {\n\n match self {\n\n Port::Stm32F412 => Family::Stm32,\n\n Port::Wgm160P => Family::Efm32,\n\n }\n\n }\n\n\n\n /// Hardware subfamily of this port.\n\n pub fn subfamily(&self) -> Subfamily {\n\n match self {\n\n Port::Stm32F412 => Subfamily::Stm32f4,\n\n Port::Wgm160P => Subfamily::Efm32Gg11,\n\n }\n\n }\n\n\n\n /// Constants to be propagated to the linker script for this port. This mainly\n\n /// defines the sections of ram and flash memory.\n", "file_path": "loadstone_config/src/port.rs", "rank": 86, "score": 32330.490786553455 }, { "content": "use std::fmt::Display;\n\n\n\nuse crate::KB;\n\nuse enum_iterator::IntoEnumIterator;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Top level description of the hardware target. Typically a chip subfamily, but it\n\n/// may be more or less concrete depending on the available drivers.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize, IntoEnumIterator)]\n\npub enum Port {\n\n Stm32F412,\n\n Wgm160P,\n\n}\n\n\n\nimpl Default for Port {\n\n // Arbitrary default port for the purposes of seeding\n\n // the defaults in the web application\n\n fn default() -> Self { Self::Stm32F412 }\n\n}\n\n\n", "file_path": "loadstone_config/src/port.rs", "rank": 87, "score": 32330.39670630416 }, { "content": " }\n\n}\n\n\n\nimpl Display for Family {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(match self {\n\n Family::Stm32 => \"stm32\",\n\n Family::Efm32 => \"efm32\",\n\n })\n\n }\n\n}\n\n\n\nimpl Display for Subfamily {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(match self {\n\n Subfamily::Stm32f4 => \"f4\",\n\n Subfamily::Efm32Gg11 => \"gg11\",\n\n })\n\n }\n\n}\n", "file_path": "loadstone_config/src/port.rs", "rank": 88, "score": 32325.709447666814 }, { "content": "//! Concrete bootloader construction and flash bank layout for the wgm160p\n\n\n\nuse blue_hal::{drivers::efm32gg11b::{clocks, flash::{self, Flash}}, efm32pac, hal::null::{NullError, NullFlash, NullSerial, NullSystick}};\n\nuse crate::{devices::{bootloader::Bootloader}, error::{self, Error}};\n\nuse super::autogenerated;\n\nuse super::autogenerated::memory_map::{EXTERNAL_BANKS, MCU_BANKS};\n\n\n\n#[cfg(feature=\"ecdsa-verify\")]\n\nuse crate::devices::image::EcdsaImageReader as ImageReader;\n\n#[cfg(not(feature=\"ecdsa-verify\"))]\n\nuse crate::devices::image::CrcImageReader as ImageReader;\n\nuse super::update_signal::NullUpdateSignal;\n\n\n\nimpl Bootloader<NullFlash, Flash, NullSerial, NullSystick, ImageReader, NullUpdateSignal> {\n\n pub fn new() -> Self {\n\n let mut peripherals = efm32pac::Peripherals::take().unwrap();\n\n let clocks = clocks::Clocks::new(peripherals.CMU, &mut peripherals.MSC);\n\n let mcu_flash = flash::Flash::new(peripherals.MSC, &clocks);\n\n Bootloader {\n\n mcu_flash,\n", "file_path": "src/ports/wgm160p/bootloader.rs", "rank": 89, "score": 32324.900957868274 }, { "content": " // We might consider making these configurable later, but the need hasn't come up yet.\n\n pub fn linker_script_constants(&self) -> Option<LinkerScriptConstants> {\n\n match self {\n\n Port::Stm32F412 => Some(LinkerScriptConstants {\n\n flash: LinkerArea { origin: 0x08000000, size: KB!(896) },\n\n ram: LinkerArea { origin: 0x20000000, size: KB!(256) },\n\n }),\n\n Port::Wgm160P => Some(LinkerScriptConstants {\n\n flash: LinkerArea { origin: 0x00000000, size: KB!(1024) },\n\n ram: LinkerArea { origin: 0x20000000, size: KB!(128) },\n\n }),\n\n }\n\n }\n\n}\n\n\n\n/// Constants to be propagated to the linker script for this port.\n\npub struct LinkerScriptConstants {\n\n /// Available flash memory as defined in the linker script.\n\n pub flash: LinkerArea,\n\n /// Available ram memory as defined in the linker script.\n\n pub ram: LinkerArea,\n\n}\n\n\n\n/// A section of memory as defined in the linker script.\n\npub struct LinkerArea {\n\n pub origin: u32,\n\n pub size: usize,\n\n}\n", "file_path": "loadstone_config/src/port.rs", "rank": 90, "score": 32323.420517725226 }, { "content": "//! Concrete bootloader construction and flash bank layout for stm32f412\n\nuse crate::{devices::bootloader::Bootloader, error};\n\nuse crate::error::Error;\n\nuse blue_hal::hal::null::NullError;\n\nuse blue_hal::hal::time::Now;\n\nuse blue_hal::{drivers::{micron::n25q128a_flash,\n\n stm32f4::{flash, rcc::Clocks, serial, systick::SysTick}}, hal::time, stm32pac\n\n};\n\nuse super::autogenerated::{\n\n self,\n\n BOOT_TIME_METRICS_ENABLED,\n\n UPDATE_SIGNAL_ENABLED,\n\n RECOVERY_ENABLED, devices,\n\n memory_map::{EXTERNAL_BANKS, MCU_BANKS},\n\n pin_configuration::{self, *},\n\n};\n\n#[cfg(feature=\"ecdsa-verify\")]\n\nuse crate::devices::image::EcdsaImageReader as ImageReader;\n\n#[cfg(not(feature=\"ecdsa-verify\"))]\n\nuse crate::devices::image::CrcImageReader as ImageReader;\n", "file_path": "src/ports/stm32f412/bootloader.rs", "rank": 91, "score": 32322.52465199993 }, { "content": "use super::update_signal::{UpdateSignal, initialize_rtc_backup_domain};\n\n\n\nimpl Default for Bootloader<ExternalFlash, flash::McuFlash, Serial, SysTick, ImageReader, UpdateSignal> {\n\n fn default() -> Self { Self::new() }\n\n}\n\n\n\nimpl Bootloader<ExternalFlash, flash::McuFlash, Serial, SysTick, ImageReader, UpdateSignal> {\n\n pub fn new() -> Self {\n\n let mut peripherals = stm32pac::Peripherals::take().unwrap();\n\n let cortex_peripherals = cortex_m::Peripherals::take().unwrap();\n\n let mcu_flash = flash::McuFlash::new(peripherals.FLASH).unwrap();\n\n\n\n initialize_rtc_backup_domain(&mut peripherals.RCC, &mut peripherals.PWR);\n\n\n\n let (serial_pins, qspi_pins) = pin_configuration::pins(\n\n peripherals.GPIOA,\n\n peripherals.GPIOB,\n\n peripherals.GPIOC,\n\n peripherals.GPIOD,\n\n peripherals.GPIOE,\n", "file_path": "src/ports/stm32f412/bootloader.rs", "rank": 92, "score": 32321.09791352861 }, { "content": " peripherals.GPIOF,\n\n peripherals.GPIOG,\n\n peripherals.GPIOH,\n\n &mut peripherals.RCC,\n\n );\n\n let clocks = Clocks::hardcoded(peripherals.RCC);\n\n SysTick::init(cortex_peripherals.SYST, clocks);\n\n SysTick::wait(time::Seconds(1)); // Gives time for the flash chip to stabilize after powerup\n\n let optional_external_flash = devices::construct_flash(qspi_pins, peripherals.QUADSPI);\n\n let optional_serial = devices::construct_serial(serial_pins, clocks, peripherals.USART1, peripherals.USART2, peripherals.USART6);\n\n\n\n let start_time = if BOOT_TIME_METRICS_ENABLED {\n\n Some(SysTick::now())\n\n } else {\n\n None\n\n };\n\n\n\n let update_signal = if UPDATE_SIGNAL_ENABLED {\n\n let rtc = peripherals.RTC;\n\n Some(UpdateSignal::new(rtc))\n", "file_path": "src/ports/stm32f412/bootloader.rs", "rank": 93, "score": 32316.82800504439 }, { "content": "impl error::Convertible for flash::Error {\n\n fn into(self) -> Error {\n\n match self {\n\n flash::Error::MemoryNotReachable => Error::DriverError(\"[MCU Flash] Memory not reachable\"),\n\n flash::Error::MisalignedAccess => Error::DriverError(\"[MCU Flash] Misaligned memory access\"),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Convertible for n25q128a_flash::Error {\n\n fn into(self) -> Error {\n\n match self {\n\n n25q128a_flash::Error::TimeOut => Error::DriverError(\"[External Flash] Operation timed out\"),\n\n n25q128a_flash::Error::QspiError => Error::DriverError(\"[External Flash] Qspi error\"),\n\n n25q128a_flash::Error::WrongManufacturerId => Error::DriverError(\"[External Flash] Wrong manufacturer ID\"),\n\n n25q128a_flash::Error::MisalignedAccess => Error::DriverError(\"[External Flash] Misaligned memory access\"),\n\n n25q128a_flash::Error::AddressOutOfRange => Error::DriverError(\"[External Flash] Address out of range\"),\n\n }\n\n }\n\n}\n", "file_path": "src/ports/stm32f412/bootloader.rs", "rank": 94, "score": 32315.10904555808 }, { "content": " external_banks: &EXTERNAL_BANKS,\n\n mcu_banks: &MCU_BANKS,\n\n external_flash: None,\n\n serial: None,\n\n boot_metrics: Default::default(),\n\n start_time: None,\n\n recovery_enabled: false,\n\n greeting: autogenerated::LOADSTONE_GREETING,\n\n _marker: Default::default(),\n\n update_signal: None,\n\n }\n\n }\n\n}\n\n\n\nimpl error::Convertible for flash::Error {\n\n fn into(self) -> Error {\n\n match self {\n\n flash::Error::MemoryNotReachable => Error::DriverError(\"[MCU Flash] Memory not reachable\"),\n\n flash::Error::MisalignedAccess => Error::DriverError(\"[MCU Flash] Misaligned memory access\"),\n\n flash::Error::MemoryIsLocked => Error::DriverError(\"[MCU Flash] Memory is locked\"),\n\n flash::Error::InvalidAddress => Error::DriverError(\"[MCU Flash] Address is invalid\"),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Convertible for NullError {\n\n fn into(self) -> Error { panic!(\"This error should never happen!\") }\n\n}\n", "file_path": "src/ports/wgm160p/bootloader.rs", "rank": 95, "score": 32315.10904555808 }, { "content": " } else {\n\n None\n\n };\n\n\n\n Bootloader {\n\n mcu_flash,\n\n external_banks: &EXTERNAL_BANKS,\n\n mcu_banks: &MCU_BANKS,\n\n external_flash: optional_external_flash,\n\n serial: optional_serial,\n\n boot_metrics: Default::default(),\n\n start_time,\n\n recovery_enabled: RECOVERY_ENABLED,\n\n greeting: autogenerated::LOADSTONE_GREETING,\n\n _marker: Default::default(),\n\n update_signal,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ports/stm32f412/bootloader.rs", "rank": 96, "score": 32315.10904555808 }, { "content": "\n\nimpl error::Convertible for NullError {\n\n fn into(self) -> Error { panic!(\"This error should never happen!\") }\n\n}\n\n\n\nimpl error::Convertible for serial::Error {\n\n fn into(self) -> Error {\n\n match self {\n\n serial::Error::Framing => Error::DriverError(\"[Serial] Framing error\"),\n\n serial::Error::Noise => Error::DriverError(\"[Serial] Noise error\"),\n\n serial::Error::Overrun => Error::DriverError(\"[Serial] Overrun error\"),\n\n serial::Error::Parity => Error::DriverError(\"[Serial] Parity error\"),\n\n serial::Error::Timeout => Error::DriverError(\"[Serial] Timeout error\"),\n\n _ => Error::DriverError(\"[Serial] Unexpected serial error\"),\n\n }\n\n }\n\n}\n", "file_path": "src/ports/stm32f412/bootloader.rs", "rank": 97, "score": 32315.10904555808 }, { "content": " fn end_transmission(&mut self) {\n\n self.finished = true;\n\n if self.serial.write_char(xmodem::ACK as char).is_err() {\n\n return;\n\n }\n\n if let Ok(xmodem::ETB) = self.serial.read(xmodem::DEFAULT_TIMEOUT) {\n\n // We don't care about this being received, as there's no\n\n // recovering from a failure here.\n\n let _ = self.serial.write_char(xmodem::ACK as char);\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, S: TimeoutRead + Write + ?Sized> Drop for BlockIterator<'a, S> {\n\n // Must fully consume the iterator on drop\n\n // to close the xmodem communication cleanly\n\n fn drop(&mut self) { self.for_each(drop); }\n\n}\n", "file_path": "src/devices/cli/file_transfer.rs", "rank": 98, "score": 31205.60383535363 }, { "content": " return None;\n\n }\n\n }\n\n buffer_index += 1;\n\n if buffer_index == xmodem::MAX_PACKET_SIZE {\n\n continue 'block_loop;\n\n }\n\n }\n\n }\n\n\n\n // Fully timed out\n\n self.finished = true;\n\n None\n\n }\n\n}\n\n\n\nimpl<'a, S: TimeoutRead + Write + ?Sized> BlockIterator<'a, S> {\n\n fn process_message(&mut self, buffer: &[u8]) -> Option<[u8; BLOCK_SIZE]> {\n\n match xmodem::parse_message(&buffer) {\n\n Ok((_, xmodem::Message::EndOfTransmission)) => {\n", "file_path": "src/devices/cli/file_transfer.rs", "rank": 99, "score": 31204.963052751176 } ]
Rust
src/influxdb_ioxd/rpc/management.rs
mitch292/influxdb_iox
7e8f3aaafbd497c0c6e8f993b4ce0330e6d43ccb
use std::convert::TryInto; use std::fmt::Debug; use std::sync::Arc; use tonic::{Request, Response, Status}; use tracing::error; use data_types::database_rules::DatabaseRules; use data_types::DatabaseName; use generated_types::google::{ AlreadyExists, FieldViolation, FieldViolationExt, InternalError, NotFound, PreconditionViolation, }; use generated_types::influxdata::iox::management::v1::*; use query::DatabaseStore; use server::{ConnectionManager, Error, Server}; struct ManagementService<M: ConnectionManager> { server: Arc<Server<M>>, } fn default_error_handler(error: Error) -> tonic::Status { match error { Error::IdNotSet => PreconditionViolation { category: "Writer ID".to_string(), subject: "influxdata.com/iox".to_string(), description: "Writer ID must be set".to_string(), } .into(), error => { error!(?error, "Unexpected error"); InternalError {}.into() } } } #[tonic::async_trait] impl<M> management_service_server::ManagementService for ManagementService<M> where M: ConnectionManager + Send + Sync + Debug + 'static, { async fn get_writer_id( &self, _: Request<GetWriterIdRequest>, ) -> Result<Response<GetWriterIdResponse>, Status> { match self.server.require_id().ok() { Some(id) => Ok(Response::new(GetWriterIdResponse { id })), None => return Err(NotFound::default().into()), } } async fn update_writer_id( &self, request: Request<UpdateWriterIdRequest>, ) -> Result<Response<UpdateWriterIdResponse>, Status> { self.server.set_id(request.get_ref().id); Ok(Response::new(UpdateWriterIdResponse {})) } async fn list_databases( &self, _: Request<ListDatabasesRequest>, ) -> Result<Response<ListDatabasesResponse>, Status> { let names = self.server.db_names_sorted().await; Ok(Response::new(ListDatabasesResponse { names })) } async fn get_database( &self, request: Request<GetDatabaseRequest>, ) -> Result<Response<GetDatabaseResponse>, Status> { let name = DatabaseName::new(request.into_inner().name).field("name")?; match self.server.db_rules(&name).await { Some(rules) => Ok(Response::new(GetDatabaseResponse { rules: Some(rules.into()), })), None => { return Err(NotFound { resource_type: "database".to_string(), resource_name: name.to_string(), ..Default::default() } .into()) } } } async fn create_database( &self, request: Request<CreateDatabaseRequest>, ) -> Result<Response<CreateDatabaseResponse>, Status> { let rules: DatabaseRules = request .into_inner() .rules .ok_or_else(|| FieldViolation::required("")) .and_then(TryInto::try_into) .map_err(|e| e.scope("rules"))?; let name = DatabaseName::new(rules.name.clone()).expect("protobuf mapping didn't validate name"); match self.server.create_database(name, rules).await { Ok(_) => Ok(Response::new(CreateDatabaseResponse {})), Err(Error::DatabaseAlreadyExists { db_name }) => { return Err(AlreadyExists { resource_type: "database".to_string(), resource_name: db_name, ..Default::default() } .into()) } Err(e) => Err(default_error_handler(e)), } } } pub fn make_server<M>( server: Arc<Server<M>>, ) -> management_service_server::ManagementServiceServer< impl management_service_server::ManagementService, > where M: ConnectionManager + Send + Sync + Debug + 'static, { management_service_server::ManagementServiceServer::new(ManagementService { server }) }
use std::convert::TryInto; use std::fmt::Debug; use std::sync::Arc; use tonic::{Request, Response, Status}; use tracing::error; use data_types::database_rules::DatabaseRules; use data_types::DatabaseName; use generated_types::google::{ AlreadyExists, FieldViolation, FieldViolationExt, InternalError, NotFound, PreconditionViolation, }; use generated_types::influxdata::iox::management::v1::*; use query::DatabaseStore; use server::{ConnectionManager, Error, Server}; struct ManagementService<M: ConnectionManager> { server: Arc<Server<M>>, } fn default_error_handler(error: Error) -> tonic::Status { match error { Error::IdNotSet => PreconditionViolation { category: "Writer ID".to_string(), subject: "influxdata.com/iox".to_string(), description: "Writer ID must be set".to_string(), } .into(), error => { error!(?error, "Unexpected error"); InternalError {}.into() } } } #[tonic::async_trait] impl<M> management_service_server::ManagementService for ManagementService<M> where M: ConnectionManager + Send + Sync + Debug + 'static, { async fn get_writer_id( &self, _: Request<GetWriterIdRequest>, ) -> Result<Response<GetWriterIdResponse>, Status> { match self.server.require_id().ok() { Some(id) => Ok(Response::new(GetWriterIdResponse { id })), None => return Err(NotFound::default().into()), } } async fn update_writer_id( &self, request: Request<UpdateWriterIdRequest>, ) -> Result<Response<UpdateWriterIdResponse>, Status> { self.server.set_id(request.get_ref().id); Ok(Response::new(UpdateWriterIdResponse {})) } async fn list_databases( &sel
async fn get_database( &self, request: Request<GetDatabaseRequest>, ) -> Result<Response<GetDatabaseResponse>, Status> { let name = DatabaseName::new(request.into_inner().name).field("name")?; match self.server.db_rules(&name).await { Some(rules) => Ok(Response::new(GetDatabaseResponse { rules: Some(rules.into()), })), None => { return Err(NotFound { resource_type: "database".to_string(), resource_name: name.to_string(), ..Default::default() } .into()) } } } async fn create_database( &self, request: Request<CreateDatabaseRequest>, ) -> Result<Response<CreateDatabaseResponse>, Status> { let rules: DatabaseRules = request .into_inner() .rules .ok_or_else(|| FieldViolation::required("")) .and_then(TryInto::try_into) .map_err(|e| e.scope("rules"))?; let name = DatabaseName::new(rules.name.clone()).expect("protobuf mapping didn't validate name"); match self.server.create_database(name, rules).await { Ok(_) => Ok(Response::new(CreateDatabaseResponse {})), Err(Error::DatabaseAlreadyExists { db_name }) => { return Err(AlreadyExists { resource_type: "database".to_string(), resource_name: db_name, ..Default::default() } .into()) } Err(e) => Err(default_error_handler(e)), } } } pub fn make_server<M>( server: Arc<Server<M>>, ) -> management_service_server::ManagementServiceServer< impl management_service_server::ManagementService, > where M: ConnectionManager + Send + Sync + Debug + 'static, { management_service_server::ManagementServiceServer::new(ManagementService { server }) }
f, _: Request<ListDatabasesRequest>, ) -> Result<Response<ListDatabasesResponse>, Status> { let names = self.server.db_names_sorted().await; Ok(Response::new(ListDatabasesResponse { names })) }
function_block-function_prefixed
[ { "content": "type DatabaseError = Box<dyn std::error::Error + Send + Sync + 'static>;\n\n\n\n/// A server ID of 0 is reserved and indicates no ID has been configured.\n\nconst SERVER_ID_NOT_SET: u32 = 0;\n\n\n\n#[derive(Debug, Snafu)]\n\npub enum Error {\n\n #[snafu(display(\"Server error: {}\", source))]\n\n ServerError { source: std::io::Error },\n\n #[snafu(display(\"database not found: {}\", db_name))]\n\n DatabaseNotFound { db_name: String },\n\n #[snafu(display(\"invalid database: {}\", source))]\n\n InvalidDatabaseName { source: DatabaseNameError },\n\n #[snafu(display(\"database error: {}\", source))]\n\n UnknownDatabaseError { source: DatabaseError },\n\n #[snafu(display(\"no local buffer for database: {}\", db))]\n\n NoLocalBuffer { db: String },\n\n #[snafu(display(\"host group not found: {}\", id))]\n\n HostGroupNotFound { id: HostGroupId },\n\n #[snafu(display(\"no hosts in group: {}\", id))]\n", "file_path": "server/src/lib.rs", "rank": 0, "score": 334181.82957129396 }, { "content": "pub fn router_service<M: ConnectionManager + Send + Sync + Debug + 'static>(\n\n server: Arc<AppServer<M>>,\n\n) -> RouterService<Body, ApplicationError> {\n\n let router = router(server);\n\n RouterService::new(router).unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::net::{IpAddr, Ipv4Addr, SocketAddr};\n\n\n\n use arrow_deps::{arrow::record_batch::RecordBatch, assert_table_eq};\n\n use query::exec::Executor;\n\n use reqwest::{Client, Response};\n\n\n\n use hyper::Server;\n\n\n\n use data_types::{\n\n database_rules::{DatabaseRules, WalBufferConfig, WalBufferRollover},\n", "file_path": "src/influxdb_ioxd/http.rs", "rank": 1, "score": 323612.97257319186 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync + 'static>;\n", "file_path": "tests/end-to-end.rs", "rank": 2, "score": 304904.49254685536 }, { "content": "type TestError = Box<dyn std::error::Error + Send + Sync + 'static>;\n", "file_path": "wal/tests/no_concurrency.rs", "rank": 3, "score": 294671.69486609736 }, { "content": "type TestError = Box<dyn std::error::Error + Send + Sync + 'static>;\n\npub type Result<T = (), E = TestError> = std::result::Result<T, E>;\n\n\n", "file_path": "wal/tests/helpers/mod.rs", "rank": 4, "score": 290593.829636973 }, { "content": "type PanicFunctionPtr = Arc<Box<dyn Fn(&PanicInfo<'_>) + Sync + Send + 'static>>;\n\n\n\n/// RAII guard that installs a custom panic hook to send panic\n\n/// information to tracing.\n\n///\n\n/// Upon construction registers a custom panic\n\n/// hook which sends the panic to tracing first, before calling any\n\n/// prior panic hook.\n\n///\n\n/// Upon drop, restores the pre-existing panic hook\n\npub struct SendPanicsToTracing {\n\n /// The previously installed panic hook -- Note it is wrapped in an\n\n /// `Option` so we can `.take` it during the call to `drop()`;\n\n old_panic_hook: Option<PanicFunctionPtr>,\n\n}\n\n\n\nimpl SendPanicsToTracing {\n\n pub fn new() -> Self {\n\n let current_panic_hook: PanicFunctionPtr = Arc::new(panic::take_hook());\n\n let old_panic_hook = Some(Arc::clone(&current_panic_hook));\n", "file_path": "panic_logging/src/lib.rs", "rank": 5, "score": 265163.7690260489 }, { "content": "#[async_trait]\n\npub trait Database: Debug + Send + Sync {\n\n type Error: std::error::Error + Send + Sync + 'static;\n\n type Chunk: PartitionChunk;\n\n\n\n /// Stores the replicated write into the database.\n\n async fn store_replicated_write(&self, write: &ReplicatedWrite) -> Result<(), Self::Error>;\n\n\n\n /// Return the partition keys for data in this DB\n\n fn partition_keys(&self) -> Result<Vec<String>, Self::Error>;\n\n\n\n /// Returns a covering set of chunks in the specified partition. A\n\n /// covering set means that together the chunks make up a single\n\n /// complete copy of the data being queried.\n\n fn chunks(&self, partition_key: &str) -> Vec<Arc<Self::Chunk>>;\n\n}\n\n\n\n/// Collection of data that shares the same partition key\n", "file_path": "query/src/lib.rs", "rank": 6, "score": 233013.06090683682 }, { "content": "#[async_trait]\n\npub trait PartitionChunk: Debug + Send + Sync {\n\n type Error: std::error::Error + Send + Sync + 'static;\n\n\n\n /// returns the Id of this chunk. Ids are unique within a\n\n /// particular partition.\n\n fn id(&self) -> u32;\n\n\n\n /// returns the partition metadata stats for every table in the partition\n\n fn table_stats(&self) -> Result<Vec<TableSummary>, Self::Error>;\n\n\n\n /// Returns true if this chunk *might* have data that passes the\n\n /// predicate. If false is returned, this chunk can be\n\n /// skipped entirely. If true is returned, there still may not be\n\n /// rows that match.\n\n ///\n\n /// This is used during query planning to skip including entire chunks\n\n fn could_pass_predicate(&self, _predicate: &Predicate) -> Result<bool, Self::Error> {\n\n Ok(true)\n\n }\n\n\n", "file_path": "query/src/lib.rs", "rank": 7, "score": 228746.9505202389 }, { "content": "/// Storage for `Databases` which can be retrieved by name\n\npub trait DatabaseStore: Debug + Send + Sync {\n\n /// The type of database that is stored by this DatabaseStore\n\n type Database: Database;\n\n\n\n /// The type of error this DataBase store generates\n\n type Error: std::error::Error + Send + Sync + 'static;\n\n\n\n /// List the database names.\n\n async fn db_names_sorted(&self) -> Vec<String>;\n\n\n\n /// Retrieve the database specified by `name` returning None if no\n\n /// such database exists\n\n async fn db(&self, name: &str) -> Option<Arc<Self::Database>>;\n\n\n\n /// Retrieve the database specified by `name`, creating it if it\n\n /// doesn't exist.\n\n async fn db_or_create(&self, name: &str) -> Result<Arc<Self::Database>, Self::Error>;\n\n\n\n /// Provide a query executor to use for running queries on\n\n /// databases in this `DatabaseStore`\n", "file_path": "query/src/lib.rs", "rank": 8, "score": 228740.7511309476 }, { "content": "/// Implements the logic of the specific selector function (this is a\n\n/// cutdown version of the Accumulator DataFusion trait, to allow\n\n/// sharing between implementations)\n\ntrait Selector: Debug + Default + Send + Sync {\n\n /// What type of values does this selector function work with (time is\n\n /// always I64)\n\n fn value_data_type() -> DataType;\n\n\n\n /// return state in a form that DataFusion can store during execution\n\n fn datafusion_state(&self) -> DataFusionResult<Vec<ScalarValue>>;\n\n\n\n /// produces the final value of this selector for the specified output type\n\n fn evaluate(&self, output: &SelectorOutput) -> DataFusionResult<ScalarValue>;\n\n\n\n /// Update this selector's state based on values in value_arr and time_arr\n\n fn update_batch(&mut self, value_arr: &ArrayRef, time_arr: &ArrayRef) -> DataFusionResult<()>;\n\n}\n\n\n\n// Describes which part of the selector to return: the timestamp or\n\n// the value (when https://issues.apache.org/jira/browse/ARROW-10945\n\n// is fixed, this enum should be removed)\n\n#[derive(Debug, Clone, Copy)]\n\npub enum SelectorOutput {\n", "file_path": "query/src/func/selectors.rs", "rank": 9, "score": 228740.7511309476 }, { "content": "#[async_trait]\n\npub trait ObjectStoreApi: Send + Sync + 'static {\n\n /// The type of the locations used in interacting with this object store.\n\n type Path: path::ObjectStorePath;\n\n\n\n /// The error returned from fallible methods\n\n type Error: std::error::Error + Send + Sync + 'static;\n\n\n\n /// Return a new location path appropriate for this object storage\n\n fn new_path(&self) -> Self::Path;\n\n\n\n /// Save the provided bytes to the specified location.\n\n async fn put<S>(\n\n &self,\n\n location: &Self::Path,\n\n bytes: S,\n\n length: Option<usize>,\n\n ) -> Result<(), Self::Error>\n\n where\n\n S: Stream<Item = io::Result<Bytes>> + Send + Sync + 'static;\n\n\n", "file_path": "object_store/src/lib.rs", "rank": 10, "score": 220872.04044326983 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct MemWriter {\n\n mem: Arc<Mutex<Cursor<Vec<u8>>>>,\n\n}\n\n\n\nimpl MemWriter {\n\n /// Returns the inner buffer as long as there are no other references to the\n\n /// Arc.\n\n pub fn into_inner(self) -> Option<Vec<u8>> {\n\n Arc::try_unwrap(self.mem)\n\n .ok()\n\n .map(|mutex| mutex.into_inner().into_inner())\n\n }\n\n}\n\n\n\nimpl Write for MemWriter {\n\n fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {\n\n let mut inner = self.mem.lock();\n\n inner.write(buf)\n\n }\n\n\n", "file_path": "server/src/snapshot.rs", "rank": 11, "score": 192953.1750219579 }, { "content": "pub trait NumericEncoding: Send + Sync + std::fmt::Display + std::fmt::Debug {\n\n type Item;\n\n\n\n fn size(&self) -> usize;\n\n fn value(&self, row_id: usize) -> Option<Self::Item>;\n\n fn values(&self, row_ids: &[usize]) -> Vec<Option<Self::Item>>;\n\n\n\n fn encoded_values(&self, row_ids: &[usize]) -> Vec<Self::Item>;\n\n fn all_encoded_values(&self) -> Vec<Self::Item>;\n\n\n\n fn scan_from(&self, row_id: usize) -> &[Option<Self::Item>];\n\n\n\n fn sum_by_ids(&self, row_ids: &mut croaring::Bitmap) -> Option<Self::Item>;\n\n fn sum_by_id_range(&self, from_row_id: usize, to_row_id: usize) -> Option<Self::Item>;\n\n\n\n fn count_by_id_range(&self, from_row_id: usize, to_row_id: usize) -> u64;\n\n fn count_by_ids(&self, row_ids: &croaring::Bitmap) -> u64;\n\n\n\n // Returns the index of the first value equal to `v`\n\n fn row_id_eq_value(&self, v: Self::Item) -> Option<usize>;\n", "file_path": "mem_qe/src/encoding.rs", "rank": 12, "score": 186354.41179618385 }, { "content": "fn router<M>(server: Arc<AppServer<M>>) -> Router<Body, ApplicationError>\n\nwhere\n\n M: ConnectionManager + Send + Sync + Debug + 'static,\n\n{\n\n // Create a router and specify the the handlers.\n\n Router::builder()\n\n .data(server)\n\n .middleware(Middleware::pre(|req| async move {\n\n info!(request = ?req, \"Processing request\");\n\n Ok(req)\n\n }))\n\n .middleware(Middleware::post(|res| async move {\n\n info!(response = ?res, \"Successfully processed request\");\n\n Ok(res)\n\n })) // this endpoint is for API backward compatibility with InfluxDB 2.x\n\n .post(\"/api/v2/write\", write::<M>)\n\n .get(\"/ping\", ping)\n\n .get(\"/health\", health)\n\n .get(\"/iox/api/v1/databases\", list_databases::<M>)\n\n .put(\"/iox/api/v1/databases/:name\", create_database::<M>)\n", "file_path": "src/influxdb_ioxd/http.rs", "rank": 13, "score": 175075.45094433965 }, { "content": "pub fn type_description(value: wb::ColumnValue) -> &'static str {\n\n use wb::ColumnValue::*;\n\n\n\n match value {\n\n NONE => \"none\",\n\n TagValue => \"tag\",\n\n I64Value => \"i64\",\n\n U64Value => \"u64\",\n\n F64Value => \"f64\",\n\n BoolValue => \"bool\",\n\n StringValue => \"String\",\n\n }\n\n}\n\n\n\n/// A friendlier wrapper to help deal with the Flatbuffers write data\n\n#[derive(Debug, Default, Clone, PartialEq)]\n\npub struct ReplicatedWrite {\n\n pub data: Vec<u8>,\n\n}\n\n\n", "file_path": "data_types/src/data.rs", "rank": 15, "score": 171439.9308664534 }, { "content": "type TonicStream<T> = Pin<Box<dyn Stream<Item = Result<T, tonic::Status>> + Send + Sync + 'static>>;\n\n\n\n#[derive(Deserialize, Debug)]\n", "file_path": "src/influxdb_ioxd/rpc/flight.rs", "rank": 16, "score": 162033.90754526635 }, { "content": "pub fn make_server<T: DatabaseStore + 'static>(db_store: Arc<T>) -> FlightServer<impl Flight> {\n\n FlightServer::new(FlightService { db_store })\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl<T> Flight for FlightService<T>\n\nwhere\n\n T: DatabaseStore + 'static,\n\n{\n\n type HandshakeStream = TonicStream<HandshakeResponse>;\n\n type ListFlightsStream = TonicStream<FlightInfo>;\n\n type DoGetStream = TonicStream<FlightData>;\n\n type DoPutStream = TonicStream<PutResult>;\n\n type DoActionStream = TonicStream<arrow_flight::Result>;\n\n type ListActionsStream = TonicStream<ActionType>;\n\n type DoExchangeStream = TonicStream<FlightData>;\n\n\n\n async fn get_schema(\n\n &self,\n\n _request: Request<FlightDescriptor>,\n", "file_path": "src/influxdb_ioxd/rpc/flight.rs", "rank": 17, "score": 157850.02131487042 }, { "content": "pub fn make_server<T: DatabaseStore + 'static>(db_store: Arc<T>) -> StorageServer<impl Storage> {\n\n StorageServer::new(StorageService { db_store })\n\n}\n", "file_path": "src/influxdb_ioxd/rpc/storage.rs", "rank": 18, "score": 157850.02131487042 }, { "content": "fn escape_or_fallback_inner<'a, Error>(\n\n normal: impl Fn(&'a str) -> IResult<&'a str, &'a str, Error>,\n\n escape_char: &'static str,\n\n escaped: impl Fn(&'a str) -> IResult<&'a str, &'a str, Error>,\n\n) -> impl Fn(&'a str) -> IResult<&'a str, EscapedStr<'a>, Error>\n\nwhere\n\n Error: nom::error::ParseError<&'a str>,\n\n{\n\n move |i| {\n\n let mut result = SmallVec::<[&str; 4]>::new();\n\n let mut head = i;\n\n\n\n loop {\n\n match normal(head) {\n\n Ok((remaining, parsed)) => {\n\n result.push(parsed);\n\n head = remaining;\n\n }\n\n Err(nom::Err::Error(_)) => {\n\n // FUTURE: https://doc.rust-lang.org/std/primitive.str.html#method.strip_prefix\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 19, "score": 155054.49309946183 }, { "content": "// Converts from line protocol `Schema` to the equivalent parquet schema `Type`.\n\nfn convert_to_parquet_schema(schema: &Schema) -> Result<Arc<parquet::schema::types::Type>, Error> {\n\n let mut parquet_columns = Vec::new();\n\n\n\n for (i, (influxdb_column_type, field)) in schema.iter().enumerate() {\n\n debug!(\n\n \"Determining parquet schema for column[{}] {:?} -> {:?}\",\n\n i, influxdb_column_type, field\n\n );\n\n let (physical_type, logical_type) = match influxdb_column_type {\n\n Some(InfluxColumnType::Tag) => (PhysicalType::BYTE_ARRAY, Some(LogicalType::UTF8)),\n\n Some(InfluxColumnType::Field(InfluxFieldType::Boolean)) => {\n\n (PhysicalType::BOOLEAN, None)\n\n }\n\n Some(InfluxColumnType::Field(InfluxFieldType::Float)) => (PhysicalType::DOUBLE, None),\n\n Some(InfluxColumnType::Field(InfluxFieldType::Integer)) => {\n\n (PhysicalType::INT64, Some(LogicalType::UINT_64))\n\n }\n\n Some(InfluxColumnType::Field(InfluxFieldType::UInteger)) => {\n\n (PhysicalType::INT64, Some(LogicalType::UINT_64))\n\n }\n", "file_path": "ingest/src/parquet/writer.rs", "rank": 20, "score": 155023.33712526798 }, { "content": "fn main() -> Result<(), std::io::Error> {\n\n // load all environment variables from .env before doing anything\n\n load_dotenv();\n\n\n\n let config = Config::from_args();\n\n\n\n // Logging level is determined via:\n\n // 1. If RUST_LOG environment variable is set, use that value\n\n // 2. if `-vv` (multiple instances of verbose), use DEFAULT_DEBUG_LOG_LEVEL\n\n // 2. if `-v` (single instances of verbose), use DEFAULT_VERBOSE_LOG_LEVEL\n\n // 3. Otherwise use DEFAULT_LOG_LEVEL\n\n let logging_level = LoggingLevel::new(config.verbose);\n\n\n\n let tokio_runtime = get_runtime(config.num_threads)?;\n\n tokio_runtime.block_on(async move {\n\n let host = config.host;\n\n\n\n match config.command {\n\n Some(Command::Convert {\n\n input,\n", "file_path": "src/main.rs", "rank": 21, "score": 153178.15169092297 }, { "content": "#[derive(Debug)]\n\nstruct TestDebugStruct {\n\n b: bool,\n\n s: String,\n\n}\n\nimpl TestDebugStruct {\n\n fn new() -> Self {\n\n Self {\n\n b: true,\n\n s: \"The String\".into(),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for TestDebugStruct {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"Display for TestDebugStruct b:{} s:\\\"{}\\\"\",\n\n self.b, self.s\n\n )\n\n }\n\n}\n\n\n", "file_path": "logfmt/tests/logging.rs", "rank": 22, "score": 152766.8710869213 }, { "content": "type DecodeFn<T> = fn(src: &[u8], dst: &mut Vec<T>) -> Result<(), Box<dyn std::error::Error>>;\n\n\n", "file_path": "benches/encoders.rs", "rank": 23, "score": 146767.3094019841 }, { "content": "type EncodeFn<T> = fn(src: &[T], dst: &mut Vec<u8>) -> Result<(), Box<dyn std::error::Error>>;\n", "file_path": "benches/encoders.rs", "rank": 24, "score": 146767.3094019841 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Set {\n\n /// The writer ID to set\n\n id: NonZeroU32,\n\n}\n\n\n", "file_path": "src/commands/writer.rs", "rank": 25, "score": 146040.46147614636 }, { "content": "fn nullable_to_str(nullability: bool) -> &'static str {\n\n if nullability {\n\n \"can be null\"\n\n } else {\n\n \"can not be null\"\n\n }\n\n}\n\n\n\npub type Result<T, E = Error> = std::result::Result<T, E>;\n\n\n\n/// Schema for an IOx table.\n\n///\n\n/// This struct is a wrapper around an Arrow `SchemaRef` that knows\n\n/// how to create and interpret the \"user defined metadata\" added to that schema\n\n/// by IOx.\n\n///\n\n/// The metadata can be used to map back and forth to the InfluxDB\n\n/// data model, which is described in the\n\n/// [documentation](https://docs.influxdata.com/influxdb/v2.0/reference/syntax/line-protocol/).\n\n///\n", "file_path": "data_types/src/schema.rs", "rank": 26, "score": 144918.26825147122 }, { "content": "/// Create the parquet writer properties (which defines the encoding\n\n/// and compression for each column) for a given schema.\n\nfn create_writer_props(\n\n schema: &Schema,\n\n compression_level: CompressionLevel,\n\n) -> Arc<WriterProperties> {\n\n let mut builder = WriterProperties::builder();\n\n\n\n // TODO: Maybe tweak more of these settings for maximum performance.\n\n\n\n // start off with GZIP for maximum compression ratio (at expense of CPU\n\n // performance...)\n\n builder = builder.set_compression(Compression::GZIP);\n\n\n\n // Setup encoding as defined in\n\n // https://github.com/influxdata/influxdb_iox/blob/alamb/encoding_thoughts/docs/encoding_thoughts.md\n\n //\n\n // Note: the property writer builder's default is to encode\n\n // everything with dictionary encoding, and it turns out that\n\n // dictionary encoding overrides all other encodings. Thus, we\n\n // must explicitly disable dictionary encoding when another\n\n // encoding is desired.\n", "file_path": "ingest/src/parquet/writer.rs", "rank": 27, "score": 144289.42413062954 }, { "content": "#[derive(Default, Clone)]\n\nstruct CapturedWriter {\n\n // all state is held in the LOG_LINES thread local variable\n\n}\n\n\n\nimpl CapturedWriter {\n\n fn new() -> Self {\n\n let global_writer = GLOBAL_WRITER.lock();\n\n global_writer.clone().clear()\n\n }\n\n\n\n /// Clear all thread local state\n\n fn clear(self) -> Self {\n\n LOG_LINES.with(|lines| {\n\n let mut cursor = lines.lock();\n\n cursor.get_mut().clear()\n\n });\n\n self\n\n }\n\n\n\n fn to_strings(&self) -> Vec<String> {\n", "file_path": "logfmt/tests/logging.rs", "rank": 28, "score": 142313.81954223267 }, { "content": "#[derive(Default, Debug)]\n\nstruct ConfigState {\n\n reservations: BTreeSet<DatabaseName<'static>>,\n\n databases: BTreeMap<DatabaseName<'static>, Arc<Db>>,\n\n host_groups: BTreeMap<HostGroupId, Arc<HostGroup>>,\n\n}\n\n\n\n/// CreateDatabaseHandle is retunred when a call is made to `create_db` on\n\n/// the Config struct. The handle can be used to hold a reservation for the\n\n/// database name. Calling `commit` on the handle will consume the struct\n\n/// and move the database from reserved to being in the config.\n\n///\n\n/// The goal is to ensure that database names can be reserved with\n\n/// minimal time holding a write lock on the config state. This allows\n\n/// the caller (the server) to reserve the database name, persist its\n\n/// configuration and then commit the change in-memory after it has been\n\n/// persisted.\n\n#[derive(Debug)]\n\npub(crate) struct CreateDatabaseHandle<'a> {\n\n pub db: Arc<Db>,\n\n pub name: DatabaseName<'static>,\n", "file_path": "server/src/config.rs", "rank": 29, "score": 142026.8353956734 }, { "content": "struct TestServer {\n\n server_process: Child,\n\n\n\n // The temporary directory **must** be last so that it is\n\n // dropped after the database closes.\n\n #[allow(dead_code)]\n\n dir: TempDir,\n\n}\n\n\n\nimpl TestServer {\n\n fn new() -> Result<Self> {\n\n let dir = test_helpers::tmp_dir().unwrap();\n\n\n\n let server_process = Command::cargo_bin(\"influxdb_iox\")\n\n .unwrap()\n\n // Can enable for debbugging\n\n //.arg(\"-vv\")\n\n .env(\"INFLUXDB_IOX_ID\", \"1\")\n\n .env(\"INFLUXDB_IOX_BIND_ADDR\", HTTP_BIND_ADDR)\n\n .env(\"INFLUXDB_IOX_GRPC_BIND_ADDR\", GRPC_BIND_ADDR)\n", "file_path": "tests/end-to-end.rs", "rank": 30, "score": 142020.6937340257 }, { "content": "pub fn file_name_for_sequence_number(id: u64) -> String {\n\n format!(\"wal_{:016x}.db\", id)\n\n}\n\n\n", "file_path": "wal/tests/helpers/mod.rs", "rank": 31, "score": 140173.366694125 }, { "content": "fn encode_bad_request(violation: Vec<FieldViolation>) -> Result<Any, EncodeError> {\n\n let mut buffer = BytesMut::new();\n\n\n\n pb::google::rpc::BadRequest {\n\n field_violations: violation\n\n .into_iter()\n\n .map(|f| pb::google::rpc::bad_request::FieldViolation {\n\n field: f.field,\n\n description: f.description,\n\n })\n\n .collect(),\n\n }\n\n .encode(&mut buffer)?;\n\n\n\n Ok(Any {\n\n type_url: \"type.googleapis.com/google.rpc.BadRequest\".to_string(),\n\n value: buffer.freeze(),\n\n })\n\n}\n\n\n", "file_path": "google_types/src/lib.rs", "rank": 32, "score": 140010.61053665582 }, { "content": "/// Handles actually packing (copy/reformat) of ParsedLines and\n\n/// writing them to a table writer.\n\nstruct MeasurementWriter<'a> {\n\n settings: ConversionSettings,\n\n\n\n /// Schema which describes the lines being written\n\n schema: Schema,\n\n\n\n /// The sink to which tables are being written\n\n table_writer: Box<dyn IOxTableWriter>,\n\n\n\n /// lines buffered\n\n write_buffer: Vec<ParsedLine<'a>>,\n\n}\n\n\n\n/// Tracks the conversation state for each measurement: either in\n\n/// \"UnknownSchema\" mode when the schema is still unknown or \"KnownSchema\" mode\n\n/// once the schema is known.\n", "file_path": "ingest/src/lib.rs", "rank": 33, "score": 139443.56998395358 }, { "content": "#[derive(Debug)]\n\nstruct Tracker<T> {\n\n data: T,\n\n abort: future::AbortHandle,\n\n}\n\n\n", "file_path": "server/src/tracker.rs", "rank": 34, "score": 139150.75730847009 }, { "content": "// A newtype struct to provide conversion into tonic::Status\n\nstruct EncodeError(prost::EncodeError);\n\n\n\nimpl From<EncodeError> for tonic::Status {\n\n fn from(error: EncodeError) -> Self {\n\n error!(error=%error.0, \"failed to serialise error response details\");\n\n tonic::Status::unknown(format!(\"failed to serialise server error: {}\", error.0))\n\n }\n\n}\n\n\n\nimpl From<prost::EncodeError> for EncodeError {\n\n fn from(e: prost::EncodeError) -> Self {\n\n Self(e)\n\n }\n\n}\n\n\n", "file_path": "google_types/src/lib.rs", "rank": 35, "score": 138938.34819730843 }, { "content": "#[derive(Debug)]\n\nstruct ParquetFileWriterSource {\n\n output_filename: String,\n\n compression_level: CompressionLevel,\n\n // This creator only supports a single filename at this time\n\n // so track if it has alread been made, for errors\n\n made_file: bool,\n\n}\n\n\n\nimpl IOxTableWriterSource for ParquetFileWriterSource {\n\n // Returns a `IOxTableWriter suitable for writing data from packers.\n\n fn next_writer(&mut self, schema: &Schema) -> Result<Box<dyn IOxTableWriter>, TableError> {\n\n let measurement = schema\n\n .measurement()\n\n .cloned()\n\n .context(InternalMeasurementNotSpecified)?;\n\n\n\n if self.made_file {\n\n return MultipleMeasurementsToSingleFile {\n\n new_measurement_name: measurement,\n\n }\n", "file_path": "src/commands/convert.rs", "rank": 36, "score": 135623.98845012978 }, { "content": "#[derive(Debug)]\n\nstruct ParquetDirectoryWriterSource {\n\n compression_level: CompressionLevel,\n\n output_dir_path: PathBuf,\n\n}\n\n\n\nimpl IOxTableWriterSource for ParquetDirectoryWriterSource {\n\n /// Returns a `IOxTableWriter` suitable for writing data from packers.\n\n /// named in the template of <measurement.parquet>\n\n fn next_writer(&mut self, schema: &Schema) -> Result<Box<dyn IOxTableWriter>, TableError> {\n\n let mut output_file_path: PathBuf = self.output_dir_path.clone();\n\n\n\n let measurement = schema\n\n .measurement()\n\n .context(InternalMeasurementNotSpecified)?;\n\n output_file_path.push(measurement);\n\n output_file_path.set_extension(\"parquet\");\n\n\n\n let output_file = fs::File::create(&output_file_path).map_err(|e| {\n\n TableError::from_io(\n\n e,\n", "file_path": "src/commands/convert.rs", "rank": 37, "score": 135623.98845012978 }, { "content": "#[test]\n\nfn event_fields_errors() {\n\n let capture = CapturedWriter::new();\n\n\n\n let err: Box<dyn Error + 'static> =\n\n io::Error::new(io::ErrorKind::Other, \"shaving yak failed!\").into();\n\n\n\n error!(the_error = err.as_ref(), \"This is an error message\");\n\n let expected = vec![\n\n \"level=error msg=\\\"This is an error message\\\" the_error=\\\"\\\\\\\"Custom { kind: Other, error: \\\\\\\\\\\\\\\"shaving yak failed!\\\\\\\\\\\\\\\" }\\\\\\\"\\\" the_error.display=\\\"shaving yak failed!\\\" target=\\\"logging\\\" location=\\\"logfmt/tests/logging.rs:99\\\" time=1612187170712947000\",\n\n ];\n\n assert_logs!(capture, expected);\n\n}\n\n\n", "file_path": "logfmt/tests/logging.rs", "rank": 38, "score": 133361.57459438374 }, { "content": "#[test]\n\nfn event_fields_structs() {\n\n let capture = CapturedWriter::new();\n\n let my_struct = TestDebugStruct::new();\n\n\n\n info!(s = ?my_struct, \"This is an info message\");\n\n\n\n let expected = vec![\n\n \"level=info msg=\\\"This is an info message\\\" s=\\\"TestDebugStruct { b: true, s: \\\\\\\"The String\\\\\\\" }\\\" target=\\\"logging\\\" location=\\\"logfmt/tests/logging.rs:111\\\" time=1612187170712937000\",\n\n ];\n\n\n\n assert_logs!(capture, expected);\n\n}\n\n\n", "file_path": "logfmt/tests/logging.rs", "rank": 39, "score": 133346.11170107662 }, { "content": "#[derive(Debug)]\n\nstruct TrackerContextInner<T> {\n\n id: AtomicUsize,\n\n trackers: Mutex<HashMap<TrackerId, Tracker<T>>>,\n\n}\n\n\n\n/// Allows tracking the lifecycle of futures registered by\n\n/// `TrackedFutureExt::track` with an accompanying metadata payload of type T\n\n///\n\n/// Additionally can trigger graceful termination of registered futures\n\n#[derive(Debug)]\n\npub struct TrackerRegistry<T> {\n\n inner: Arc<TrackerContextInner<T>>,\n\n}\n\n\n\n// Manual Clone to workaround https://github.com/rust-lang/rust/issues/26925\n\nimpl<T> Clone for TrackerRegistry<T> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n inner: Arc::clone(&self.inner),\n\n }\n", "file_path": "server/src/tracker.rs", "rank": 40, "score": 132474.0095750651 }, { "content": "/// Print parquet metadata that can be read from `input`, with a total\n\n/// size of `input_size` byes\n\npub fn print_parquet_metadata<R: 'static>(input: R) -> Result<()>\n\nwhere\n\n R: ChunkReader,\n\n{\n\n let input_len = input.len();\n\n\n\n let reader = SerializedFileReader::new(input).context(ParquetLibraryError {\n\n message: \"Creating parquet reader\",\n\n })?;\n\n\n\n let parquet_metadata = reader.metadata();\n\n let file_metadata = parquet_metadata.file_metadata();\n\n let num_columns = file_metadata.schema_descr().num_columns();\n\n\n\n println!(\"Parquet file size: {} bytes\", input_len);\n\n println!(\n\n \"Parquet file Schema: {}\",\n\n parquet_schema_as_string(file_metadata.schema()).trim_end()\n\n );\n\n println!(\"Parquet file metadata:\");\n", "file_path": "ingest/src/parquet/metadata.rs", "rank": 41, "score": 131665.05689612037 }, { "content": "fn benchmark_row_ids(\n\n c: &mut Criterion,\n\n benchmark_group_name: &str,\n\n batch_sizes: &[usize],\n\n cardinalities: &[usize],\n\n) {\n\n let mut group = c.benchmark_group(benchmark_group_name);\n\n for &batch_size in batch_sizes {\n\n for &cardinality in cardinalities {\n\n let mut input = mem_qe::encoding::DictionaryRLE::new();\n\n let values = batch_size / cardinality;\n\n for i in 0..cardinality {\n\n input.push_additional(Some(i.to_string()), values as u64);\n\n }\n\n group.throughput(Throughput::Bytes(batch_size as u64));\n\n\n\n group.bench_with_input(\n\n BenchmarkId::from_parameter(format!(\"{:?}_{:?}\", batch_size, cardinality)),\n\n &input,\n\n |b, input| {\n", "file_path": "mem_qe/benches/encoding.rs", "rank": 42, "score": 130167.62304861067 }, { "content": "fn set_integer_encoding(\n\n influxdb_column_type: InfluxColumnType,\n\n compression_level: CompressionLevel,\n\n col_path: ColumnPath,\n\n builder: WriterPropertiesBuilder,\n\n) -> WriterPropertiesBuilder {\n\n match compression_level {\n\n CompressionLevel::Maximum => {\n\n debug!(\n\n \"Setting encoding of {:?} col {} to DELTA_BINARY_PACKED (Maximum)\",\n\n influxdb_column_type, col_path\n\n );\n\n builder\n\n .set_column_encoding(col_path.clone(), Encoding::DELTA_BINARY_PACKED)\n\n .set_column_dictionary_enabled(col_path, false)\n\n }\n\n CompressionLevel::Compatibility => {\n\n debug!(\n\n \"Setting encoding of {:?} col {} to PLAIN/RLE (Compatibility)\",\n\n influxdb_column_type, col_path\n\n );\n\n builder\n\n .set_column_encoding(col_path.clone(), Encoding::PLAIN)\n\n .set_column_dictionary_enabled(col_path, true)\n\n }\n\n }\n\n}\n\n\n", "file_path": "ingest/src/parquet/writer.rs", "rank": 43, "score": 130137.00923744359 }, { "content": "// base location in object store for a given database name\n\nfn database_object_store_path(\n\n writer_id: u32,\n\n database_name: &DatabaseName<'_>,\n\n store: &ObjectStore,\n\n) -> object_store::path::Path {\n\n let mut path = store.new_path();\n\n path.push_dir(format!(\"{}\", writer_id));\n\n path.push_dir(database_name.to_string());\n\n path\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use data_types::{data::lines_to_replicated_write, database_rules::DatabaseRules};\n\n use influxdb_line_protocol::parse_lines;\n\n use object_store::memory::InMemory;\n\n\n\n #[test]\n\n fn append_increments_current_size_and_uses_existing_segment() {\n", "file_path": "server/src/buffer.rs", "rank": 44, "score": 129863.15751914424 }, { "content": "/// Creates parquet writers that write to /dev/null\n\nstruct IgnoringParquetDirectoryWriterSource {}\n\n\n\nimpl IOxTableWriterSource for IgnoringParquetDirectoryWriterSource {\n\n fn next_writer(&mut self, schema: &Schema) -> Result<Box<dyn IOxTableWriter>, TableError> {\n\n let dev_null = IgnoringWriteStream {};\n\n let writer = IOxParquetTableWriter::new(schema, CompressionLevel::Compatibility, dev_null)\n\n .expect(\"Creating table writer\");\n\n Ok(Box::new(writer))\n\n }\n\n}\n\n\n", "file_path": "benches/line_protocol_to_parquet.rs", "rank": 45, "score": 129754.08243506234 }, { "content": "struct MeasurementForMax {}\n\n#[async_trait]\n\nimpl DBSetup for MeasurementForMax {\n\n async fn make(&self) -> Vec<DBScenario> {\n\n let partition_key = \"1970-01-01T00\";\n\n\n\n let lp_lines1 = vec![\n\n \"h2o,state=MA,city=Cambridge f=8.0,i=8i,b=true,s=\\\"c\\\" 1000\",\n\n \"h2o,state=MA,city=Cambridge f=7.0,i=7i,b=false,s=\\\"d\\\" 2000\",\n\n \"h2o,state=MA,city=Cambridge f=6.0,i=6i,b=true,s=\\\"a\\\" 3000\",\n\n ];\n\n let lp_lines2 = vec![\"h2o,state=MA,city=Cambridge f=5.0,i=5i,b=true,s=\\\"z\\\" 4000\"];\n\n\n\n make_two_chunk_scenarios(partition_key, &lp_lines1.join(\"\\n\"), &lp_lines2.join(\"\\n\")).await\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_grouped_series_set_plan_max() {\n\n let predicate = PredicateBuilder::default()\n", "file_path": "server/src/query_tests/influxrpc/read_group.rs", "rank": 46, "score": 129491.06688484787 }, { "content": "struct MeasurementForMin {}\n\n#[async_trait]\n\nimpl DBSetup for MeasurementForMin {\n\n async fn make(&self) -> Vec<DBScenario> {\n\n let partition_key = \"1970-01-01T00\";\n\n\n\n let lp_lines1 = vec![\n\n \"h2o,state=MA,city=Cambridge f=8.0,i=8i,b=false,s=\\\"c\\\" 1000\",\n\n \"h2o,state=MA,city=Cambridge f=7.0,i=7i,b=false,s=\\\"a\\\" 2000\",\n\n ];\n\n let lp_lines2 = vec![\n\n \"h2o,state=MA,city=Cambridge f=6.0,i=6i,b=true,s=\\\"z\\\" 3000\",\n\n \"h2o,state=MA,city=Cambridge f=5.0,i=5i,b=true,s=\\\"c\\\" 4000\",\n\n ];\n\n\n\n make_two_chunk_scenarios(partition_key, &lp_lines1.join(\"\\n\"), &lp_lines2.join(\"\\n\")).await\n\n }\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "server/src/query_tests/influxrpc/read_group.rs", "rank": 47, "score": 129491.06688484787 }, { "content": "struct MeasurementForSelectors {}\n\n#[async_trait]\n\nimpl DBSetup for MeasurementForSelectors {\n\n async fn make(&self) -> Vec<DBScenario> {\n\n let partition_key = \"1970-01-01T00\";\n\n\n\n let lp_lines1 = vec![\"h2o,state=MA,city=Cambridge f=8.0,i=8i,b=true,s=\\\"d\\\" 1000\"];\n\n let lp_lines2 = vec![\n\n \"h2o,state=MA,city=Cambridge f=7.0,i=7i,b=true,s=\\\"c\\\" 2000\",\n\n \"h2o,state=MA,city=Cambridge f=6.0,i=6i,b=false,s=\\\"b\\\" 3000\",\n\n \"h2o,state=MA,city=Cambridge f=5.0,i=5i,b=false,s=\\\"a\\\" 4000\",\n\n ];\n\n\n\n make_two_chunk_scenarios(partition_key, &lp_lines1.join(\"\\n\"), &lp_lines2.join(\"\\n\")).await\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_grouped_series_set_plan_first() {\n\n let predicate = PredicateBuilder::default()\n", "file_path": "server/src/query_tests/influxrpc/read_group.rs", "rank": 48, "score": 129491.06688484787 }, { "content": "/// Sort a slice of `Packers` based on the provided column indexes.\n\n///\n\n/// All chosen columns will be sorted in ascending order; the sort is *not*\n\n/// stable.\n\npub fn sort(packers: &mut [Packers], sort_by: &[usize]) -> Result<(), Error> {\n\n if packers.is_empty() || sort_by.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n ensure!(sort_by.len() <= packers.len(), TooManyColumns);\n\n\n\n let mut col_set = BTreeSet::new();\n\n for &index in sort_by {\n\n ensure!(col_set.insert(index), RepeatedColumns { index });\n\n }\n\n\n\n // TODO(edd): map first/last still unstable https://github.com/rust-lang/rust/issues/62924\n\n if let Some(index) = col_set.range(packers.len()..).next() {\n\n return OutOfBoundsColumn { index: *index }.fail();\n\n }\n\n\n\n // Hoare's partitioning scheme can have quadratic runtime behaviour in\n\n // the worst case when the inputs are already sorted. To avoid this, a\n\n // check is added for large inputs.\n", "file_path": "packers/src/sorter.rs", "rank": 49, "score": 128719.10004177959 }, { "content": "/// parses the the measurement, field key and tag\n\n/// set from a tsm index key\n\n///\n\n/// It does not provide access to the org and bucket ids on the key, these can\n\n/// be accessed via org_id() and bucket_id() respectively.\n\n///\n\n/// Loosely based on [points.go](https://github.com/influxdata/influxdb/blob/751d70a213e5fdae837eda13d7ecb37763e69abb/models/points.go#L462)\n\n///\n\n/// The format looks roughly like:\n\n///\n\n/// <org_id bucket_id>,\\x00=<measurement>,<tag_keys_str>,\\xff=<field_key_str>#!\n\n/// ~#<field_key_str>\n\n///\n\n/// For example:\n\n/// <org_id bucket_id>,\\x00=http_api_request_duration_seconds,status=2XX,\\\n\n/// xff=sum#!~#sum\n\n///\n\n/// measurement = \"http_api_request\"\n\n/// tags = [(\"status\", \"2XX\")]\n\n/// field = \"sum\"\n\npub fn parse_tsm_key(key: &[u8]) -> Result<ParsedTSMKey, Error> {\n\n // Wrap in an internal function to translate error types and add key context\n\n parse_tsm_key_internal(key).context(ParsingTSMKey {\n\n key: String::from_utf8_lossy(key),\n\n })\n\n}\n\n\n", "file_path": "influxdb_tsm/src/key.rs", "rank": 50, "score": 127316.77797521849 }, { "content": "fn benchmark_row_ids_roaring(\n\n c: &mut Criterion,\n\n benchmark_group_name: &str,\n\n batch_sizes: &[usize],\n\n cardinalities: &[usize],\n\n) {\n\n let mut group = c.benchmark_group(benchmark_group_name);\n\n for &batch_size in batch_sizes {\n\n for &cardinality in cardinalities {\n\n let mut input = mem_qe::encoding::DictionaryRLE::new();\n\n let values = batch_size / cardinality;\n\n for i in 0..cardinality {\n\n input.push_additional(Some(i.to_string()), values as u64);\n\n }\n\n group.throughput(Throughput::Bytes(batch_size as u64));\n\n\n\n group.bench_with_input(\n\n BenchmarkId::from_parameter(format!(\"{:?}_{:?}\", batch_size, cardinality)),\n\n &input,\n\n |b, input| {\n", "file_path": "mem_qe/benches/encoding.rs", "rank": 51, "score": 127145.754946426 }, { "content": "/// Calculate storage statistics for a particular parquet \"file\" that can\n\n/// be read from `input`, with a total size of `input_size` byes\n\n///\n\n/// Returns a `FileStats` object representing statistics for all\n\n/// columns across all column chunks.\n\npub fn file_stats<R: 'static>(input: R) -> Result<FileStats>\n\nwhere\n\n R: ChunkReader + Name,\n\n{\n\n let mut file_stats_builder = FileStatsBuilder::new(&input.name(), input.len());\n\n let reader = SerializedFileReader::new(input).context(ParquetLibraryError {\n\n message: \"Creating parquet reader\",\n\n })?;\n\n\n\n let mut stats_builders: BTreeMap<String, ColumnStatsBuilder> = BTreeMap::new();\n\n\n\n let parquet_metadata = reader.metadata();\n\n for (rg_idx, rg_metadata) in parquet_metadata.row_groups().iter().enumerate() {\n\n debug!(\n\n \"Looking at Row Group [{}] (total uncompressed byte size {})\",\n\n rg_idx,\n\n rg_metadata.total_byte_size()\n\n );\n\n\n\n for (cc_idx, cc_metadata) in rg_metadata.columns().iter().enumerate() {\n", "file_path": "ingest/src/parquet/stats.rs", "rank": 52, "score": 126942.28731792045 }, { "content": "struct MeasurementForGroupKeys {}\n\n#[async_trait]\n\nimpl DBSetup for MeasurementForGroupKeys {\n\n async fn make(&self) -> Vec<DBScenario> {\n\n let partition_key = \"1970-01-01T00\";\n\n\n\n let lp_lines1 = vec![\n\n \"h2o,state=MA,city=Cambridge temp=80 50\",\n\n \"h2o,state=MA,city=Cambridge temp=81 100\",\n\n \"h2o,state=MA,city=Cambridge temp=82 200\",\n\n ];\n\n let lp_lines2 = vec![\n\n \"h2o,state=MA,city=Boston temp=70 300\",\n\n \"h2o,state=MA,city=Boston temp=71 400\",\n\n \"h2o,state=CA,city=LA temp=90,humidity=10 500\",\n\n \"h2o,state=CA,city=LA temp=91,humidity=11 600\",\n\n ];\n\n\n\n make_two_chunk_scenarios(partition_key, &lp_lines1.join(\"\\n\"), &lp_lines2.join(\"\\n\")).await\n\n }\n", "file_path": "server/src/query_tests/influxrpc/read_group.rs", "rank": 53, "score": 126827.35087636433 }, { "content": "struct OneMeasurementForAggs {}\n\n#[async_trait]\n\nimpl DBSetup for OneMeasurementForAggs {\n\n async fn make(&self) -> Vec<DBScenario> {\n\n let partition_key = \"1970-01-01T00\";\n\n\n\n let lp_lines1 = vec![\n\n \"h2o,state=MA,city=Boston temp=70.4 100\",\n\n \"h2o,state=MA,city=Boston temp=72.4 250\",\n\n ];\n\n let lp_lines2 = vec![\n\n \"h2o,state=CA,city=LA temp=90.0 200\",\n\n \"h2o,state=CA,city=LA temp=90.0 350\",\n\n ];\n\n\n\n make_two_chunk_scenarios(partition_key, &lp_lines1.join(\"\\n\"), &lp_lines2.join(\"\\n\")).await\n\n }\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "server/src/query_tests/influxrpc/read_group.rs", "rank": 54, "score": 126827.35087636433 }, { "content": "struct AnotherMeasurementForAggs {}\n\n#[async_trait]\n\nimpl DBSetup for AnotherMeasurementForAggs {\n\n async fn make(&self) -> Vec<DBScenario> {\n\n let partition_key = \"1970-01-01T00\";\n\n\n\n let lp_lines1 = vec![\n\n \"h2o,state=MA,city=Cambridge temp=80 50\",\n\n \"h2o,state=MA,city=Cambridge temp=81 100\",\n\n \"h2o,state=MA,city=Cambridge temp=82 200\",\n\n \"h2o,state=MA,city=Boston temp=70 300\",\n\n ];\n\n let lp_lines2 = vec![\n\n \"h2o,state=MA,city=Boston temp=71 400\",\n\n \"h2o,state=CA,city=LA temp=90,humidity=10 500\",\n\n \"h2o,state=CA,city=LA temp=91,humidity=11 600\",\n\n ];\n\n\n\n make_two_chunk_scenarios(partition_key, &lp_lines1.join(\"\\n\"), &lp_lines2.join(\"\\n\")).await\n\n }\n", "file_path": "server/src/query_tests/influxrpc/read_group.rs", "rank": 55, "score": 126827.35087636433 }, { "content": "/// Creates the tokio runtime for executing IOx\n\n///\n\n/// if nthreads is none, uses the default scheduler\n\n/// otherwise, creates a scheduler with the number of threads\n\nfn get_runtime(num_threads: Option<usize>) -> Result<Runtime, std::io::Error> {\n\n // NOTE: no log macros will work here!\n\n //\n\n // That means use eprintln!() instead of error!() and so on. The log emitter\n\n // requires a running tokio runtime and is initialised after this function.\n\n\n\n use tokio::runtime::Builder;\n\n let kind = std::io::ErrorKind::Other;\n\n match num_threads {\n\n None => Runtime::new(),\n\n Some(num_threads) => {\n\n println!(\n\n \"Setting number of threads to '{}' per command line request\",\n\n num_threads\n\n );\n\n\n\n match num_threads {\n\n 0 => {\n\n let msg = format!(\n\n \"Invalid num-threads: '{}' must be greater than zero\",\n", "file_path": "src/main.rs", "rank": 56, "score": 126492.08416789626 }, { "content": "pub fn snapshot_chunk<T>(\n\n metadata_path: object_store::path::Path,\n\n data_path: object_store::path::Path,\n\n store: Arc<ObjectStore>,\n\n partition_key: &str,\n\n chunk: Arc<T>,\n\n notify: Option<oneshot::Sender<()>>,\n\n) -> Result<Arc<Snapshot<T>>>\n\nwhere\n\n T: Send + Sync + 'static + PartitionChunk,\n\n{\n\n let table_stats = chunk\n\n .table_stats()\n\n .map_err(|e| Box::new(e) as _)\n\n .context(PartitionError)?;\n\n\n\n let snapshot = Snapshot::new(\n\n partition_key.to_string(),\n\n metadata_path,\n\n data_path,\n", "file_path": "server/src/snapshot.rs", "rank": 57, "score": 125188.78667825655 }, { "content": "/// Factory function for creating the UDA function for DataFusion\n\nfn make_uda<SELECTOR>(name: &'static str, output: SelectorOutput) -> AggregateUDF\n\nwhere\n\n SELECTOR: Selector + 'static,\n\n{\n\n let value_data_type = SELECTOR::value_data_type();\n\n let input_signature = Signature::Exact(vec![value_data_type.clone(), DataType::Int64]);\n\n\n\n let state_type = Arc::new(vec![value_data_type.clone(), DataType::Int64]);\n\n let state_type_factory: StateTypeFunction = Arc::new(move |_| Ok(Arc::clone(&state_type)));\n\n\n\n let factory: AccumulatorFunctionImplementation =\n\n Arc::new(move || Ok(Box::new(SelectorAccumulator::<SELECTOR>::new(output))));\n\n\n\n let return_type = Arc::new(output.return_type(&value_data_type));\n\n let return_type_func: ReturnTypeFunction = Arc::new(move |_| Ok(Arc::clone(&return_type)));\n\n\n\n AggregateUDF::new(\n\n name,\n\n &input_signature,\n\n &return_type_func,\n\n &factory,\n\n &state_type_factory,\n\n )\n\n}\n\n\n\n/// Structure that implements the Accumultator trait for DataFusion\n\n/// and processes (value, timestamp) pair and computes values\n", "file_path": "query/src/func/selectors.rs", "rank": 58, "score": 124928.04797759143 }, { "content": "/// Converts a [`query::Predicate`] into [`ChunkPredicate`],\n\n/// suitable for evaluating on the MutableBuffer.\n\npub fn to_mutable_buffer_predicate(\n\n chunk: impl AsRef<Chunk>,\n\n predicate: &Predicate,\n\n) -> Result<ChunkPredicate> {\n\n let predicate = chunk\n\n .as_ref()\n\n .predicate_builder()?\n\n .table_names(predicate.table_names.as_ref())?\n\n .field_names(predicate.field_columns.as_ref())?\n\n .range(predicate.range)?\n\n // it would be nice to avoid cloning all the exprs here.\n\n .exprs(predicate.exprs.clone())?\n\n .build();\n\n\n\n Ok(predicate)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::*;\n", "file_path": "server/src/db/pred.rs", "rank": 59, "score": 124347.73206362699 }, { "content": "struct MeasurementForWindowAggregate {}\n\n#[async_trait]\n\nimpl DBSetup for MeasurementForWindowAggregate {\n\n async fn make(&self) -> Vec<DBScenario> {\n\n let partition_key = \"1970-01-01T00\";\n\n\n\n let lp_lines1 = vec![\n\n \"h2o,state=MA,city=Boston temp=70.0 100\",\n\n \"h2o,state=MA,city=Boston temp=71.0 200\",\n\n \"h2o,state=MA,city=Boston temp=72.0 300\",\n\n \"h2o,state=MA,city=Boston temp=73.0 400\",\n\n \"h2o,state=MA,city=Boston temp=74.0 500\",\n\n \"h2o,state=MA,city=Cambridge temp=80.0 100\",\n\n \"h2o,state=MA,city=Cambridge temp=81.0 200\",\n\n ];\n\n let lp_lines2 = vec![\n\n \"h2o,state=MA,city=Cambridge temp=82.0 300\",\n\n \"h2o,state=MA,city=Cambridge temp=83.0 400\",\n\n \"h2o,state=MA,city=Cambridge temp=84.0 500\",\n\n \"h2o,state=CA,city=LA temp=90.0 100\",\n", "file_path": "server/src/query_tests/influxrpc/read_window_aggregate.rs", "rank": 60, "score": 124318.42753685675 }, { "content": "/// Builds the path for a given segment id, given the root object store path.\n\n/// The path should be where the root of the database is (e.g. 1/my_db/).\n\nfn object_store_path_for_segment<P: ObjectStorePath>(root_path: &P, segment_id: u64) -> Result<P> {\n\n ensure!(\n\n segment_id < MAX_SEGMENT_ID && segment_id > 0,\n\n SegmentIdOutOfBounds\n\n );\n\n\n\n let millions_place = segment_id / 1_000_000;\n\n let millions = millions_place * 1_000_000;\n\n let thousands_place = (segment_id - millions) / 1_000;\n\n let thousands = thousands_place * 1_000;\n\n let hundreds_place = segment_id - millions - thousands;\n\n\n\n let mut path = root_path.clone();\n\n path.push_all_dirs(&[\n\n WAL_DIR,\n\n &format!(\"{:03}\", millions_place),\n\n &format!(\"{:03}\", thousands_place),\n\n ]);\n\n path.set_file_name(format!(\"{:03}{}\", hundreds_place, SEGMENT_FILE_EXTENSION));\n\n\n\n Ok(path)\n\n}\n\n\n", "file_path": "server/src/buffer.rs", "rank": 61, "score": 123861.95093020223 }, { "content": "/// Sort a slice of `Vector` based on the provided column indexes.\n\n///\n\n/// All chosen columns will be sorted in ascending order; the sort is *not*\n\n/// stable.\n\npub fn sort(vectors: &mut [column::Vector], sort_by: &[usize]) -> Result<(), Error> {\n\n if vectors.is_empty() || sort_by.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n ensure!(sort_by.len() <= vectors.len(), TooManyColumns);\n\n\n\n let mut col_set = BTreeSet::new();\n\n for &index in sort_by {\n\n ensure!(col_set.insert(index), RepeatedColumns { index });\n\n }\n\n\n\n // TODO(edd): map first/last still unstable https://github.com/rust-lang/rust/issues/62924\n\n if let Some(index) = col_set.range(vectors.len()..).next() {\n\n return OutOfBoundsColumn { index: *index }.fail();\n\n }\n\n\n\n // Hoare's partitioning scheme can have quadratic runtime behaviour in\n\n // the worst case when the inputs are already sorted. To avoid this, a\n\n // check is added for large inputs.\n", "file_path": "mem_qe/src/sorter.rs", "rank": 62, "score": 122223.57053931097 }, { "content": "fn group_description_to_frames(group_description: GroupDescription) -> Result<Vec<Frame>> {\n\n // split key=value pairs into two separate vectors\n\n let (tag_keys, partition_key_vals): (Vec<Vec<u8>>, Vec<Vec<u8>>) = group_description\n\n .tags\n\n .into_iter()\n\n .map(|(k, v)| (k.bytes().collect(), v.bytes().collect()))\n\n .unzip();\n\n\n\n let group_frame = GroupFrame {\n\n tag_keys,\n\n partition_key_vals,\n\n };\n\n\n\n let data = Some(Data::Group(group_frame));\n\n\n\n Ok(vec![Frame { data }])\n\n}\n\n\n", "file_path": "src/influxdb_ioxd/rpc/storage/data.rs", "rank": 63, "score": 122054.6320826935 }, { "content": "struct MeasurementForWindowAggregateMonths {}\n\n#[async_trait]\n\nimpl DBSetup for MeasurementForWindowAggregateMonths {\n\n async fn make(&self) -> Vec<DBScenario> {\n\n // Note the lines are written into 4 different partititions (as we are\n\n // partitioned by day, effectively)\n\n let lp_lines = vec![\n\n \"h2o,state=MA,city=Boston temp=70.0 1583020800000000000\", // 2020-03-01T00:00:00Z\n\n \"h2o,state=MA,city=Boston temp=71.0 1583107920000000000\", // 2020-03-02T00:12:00Z\n\n \"h2o,state=MA,city=Boston temp=72.0 1585699200000000000\", // 2020-04-01T00:00:00Z\n\n \"h2o,state=MA,city=Boston temp=73.0 1585785600000000000\", // 2020-04-02T00:00:00Z\n\n ];\n\n // partition keys are: [\"2020-03-02T00\", \"2020-03-01T00\", \"2020-04-01T00\",\n\n // \"2020-04-02T00\"]\n\n\n\n let db = make_db();\n\n let mut writer = TestLPWriter::default();\n\n let data = lp_lines.join(\"\\n\");\n\n writer.write_lp_string(&db, &data).await.unwrap();\n\n let scenario1 = DBScenario {\n", "file_path": "server/src/query_tests/influxrpc/read_window_aggregate.rs", "rank": 64, "score": 121951.18497923101 }, { "content": "// simulates the speed of marshalling the bytes into something like the mutable\n\n// buffer or read buffer, which won't use the replicated write structure anyway\n\nfn bytes_into_struct(c: &mut Criterion) {\n\n run_group(\"bytes_into_struct\", c, |lines, rules, config, b| {\n\n let write = lines_to_rw(0, 0, &lines, rules);\n\n assert_eq!(write.entry_count(), config.partition_count);\n\n let data = write.bytes();\n\n\n\n b.iter(|| {\n\n let mut db = Db::default();\n\n db.deserialize_write(data);\n\n assert_eq!(db.partition_count(), config.partition_count);\n\n assert_eq!(db.row_count(), config.line_count);\n\n assert_eq!(db.measurement_count(), config.table_count);\n\n assert_eq!(db.tag_cardinality(), config.tag_cardinality);\n\n });\n\n });\n\n}\n\n\n", "file_path": "data_types/benches/benchmark.rs", "rank": 65, "score": 120530.21074088878 }, { "content": "/// Load the config if `server` was not specified on the command line\n\n/// (from environment variables and default)\n\n///\n\n/// This pulls in config from the following sources, in order of precedence:\n\n///\n\n/// - user set environment variables\n\n/// - .env file contents\n\n/// - pre-configured default values\n\npub fn load_config() -> Box<Config> {\n\n // Load the Config struct - this pulls in any envs set by the user or\n\n // sourced above, and applies any defaults.\n\n //\n\n\n\n //let args = std::env::args().filter(|arg| arg != \"server\");\n\n Box::new(Config::from_iter(strip_server(std::env::args()).iter()))\n\n}\n\n\n", "file_path": "src/commands/server.rs", "rank": 66, "score": 120280.87499613756 }, { "content": "fn get_database_name(input: &impl GrpcInputs) -> Result<DatabaseName<'static>, Status> {\n\n org_and_bucket_to_database(input.org_id()?.to_string(), &input.bucket_name()?)\n\n .map_err(|e| Status::internal(e.to_string()))\n\n}\n\n\n\n// The following code implements the business logic of the requests as\n\n// methods that return Results with module specific Errors (and thus\n\n// can use ?, etc). The trait implemententations then handle mapping\n\n// to the appropriate tonic Status\n\n\n\n/// Gathers all measurement names that have data in the specified\n\n/// (optional) range\n\nasync fn measurement_name_impl<T>(\n\n db_store: Arc<T>,\n\n db_name: DatabaseName<'static>,\n\n range: Option<TimestampRange>,\n\n) -> Result<StringValuesResponse>\n\nwhere\n\n T: DatabaseStore + 'static,\n\n{\n", "file_path": "src/influxdb_ioxd/rpc/storage/service.rs", "rank": 67, "score": 119483.31232422107 }, { "content": "/// Used for testing: create a Database with a local store\n\npub fn make_db() -> Db {\n\n let name = \"test_db\";\n\n Db::new(\n\n DatabaseRules::new(),\n\n Some(MutableBufferDb::new(name)),\n\n read_buffer::Database::new(),\n\n None, // wal buffer\n\n )\n\n}\n", "file_path": "server/src/query_tests/utils.rs", "rank": 68, "score": 119356.2366484467 }, { "content": "/// Strip everything prior to the \"server\" portion of the args so the generated\n\n/// Clap instance plays nicely with the subcommand bits in main.\n\nfn strip_server(args: impl Iterator<Item = String>) -> Vec<String> {\n\n let mut seen_server = false;\n\n args.enumerate()\n\n .filter_map(|(i, arg)| {\n\n if i != 0 && !seen_server {\n\n if arg == \"server\" {\n\n seen_server = true;\n\n }\n\n None\n\n } else {\n\n Some(arg)\n\n }\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n\narg_enum! {\n\n #[derive(Debug, Copy, Clone, PartialEq)]\n\n pub enum ObjectStore {\n\n Memory,\n", "file_path": "src/commands/server.rs", "rank": 70, "score": 119053.71408175334 }, { "content": "pub fn make_server() -> IOxTestingServer<impl IOxTesting> {\n\n IOxTestingServer::new(IOxTestingService {})\n\n}\n", "file_path": "src/influxdb_ioxd/rpc/testing.rs", "rank": 71, "score": 118848.9291470486 }, { "content": "// decode_uncompressed writes the binary encoded values in src into dst.\n\nfn decode_uncompressed(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() || src.len() & 0x7 != 0 {\n\n return Err(From::from(\"invalid uncompressed block length\"));\n\n }\n\n\n\n let count = src.len() / 8;\n\n if dst.capacity() < count {\n\n dst.reserve_exact(count - dst.capacity());\n\n }\n\n let mut i = 0;\n\n let mut prev = 0;\n\n let mut buf: [u8; 8] = [0; 8];\n\n while i < src.len() {\n\n buf.copy_from_slice(&src[i..i + 8]);\n\n prev += i64::from_be_bytes(buf);\n\n dst.push(prev); // N.B - signed integer...\n\n i += 8;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/timestamp.rs", "rank": 72, "score": 114543.54619983003 }, { "content": "// decode_rle decodes an RLE encoded slice containing only unsigned into the\n\n// destination vector.\n\nfn decode_rle(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.len() < 8 {\n\n return Err(From::from(\"not enough data to decode using RLE\"));\n\n }\n\n\n\n let mut i = 8; // Skip first value\n\n let (delta, n) = u64::decode_var(&src[i..]);\n\n if n == 0 {\n\n return Err(From::from(\"unable to decode delta\"));\n\n }\n\n i += n;\n\n\n\n let (count, n) = usize::decode_var(&src[i..]);\n\n if n == 0 {\n\n return Err(From::from(\"unable to decode count\"));\n\n }\n\n\n\n if dst.capacity() < count {\n\n dst.reserve_exact(count - dst.capacity());\n\n }\n", "file_path": "influxdb_tsm/src/encoders/integer.rs", "rank": 73, "score": 114543.54619983003 }, { "content": "fn decode_simple8b(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.len() < 8 {\n\n return Err(From::from(\"not enough data to decode packed integer.\"));\n\n }\n\n\n\n // TODO(edd): pre-allocate res by counting bytes in encoded slice?\n\n let mut res = vec![];\n\n let mut buf: [u8; 8] = [0; 8];\n\n buf.copy_from_slice(&src[0..8]);\n\n dst.push(zig_zag_decode(u64::from_be_bytes(buf)));\n\n\n\n simple8b::decode(&src[8..], &mut res);\n\n // TODO(edd): fix this. It's copying, which is slowwwwwwwww.\n\n let mut next = dst[0];\n\n for v in &res {\n\n next += zig_zag_decode(*v);\n\n dst.push(next);\n\n }\n\n Ok(())\n\n}\n", "file_path": "influxdb_tsm/src/encoders/integer.rs", "rank": 74, "score": 114543.54619983003 }, { "content": "// decode_rle decodes an RLE encoded slice containing only unsigned into the\n\n// destination vector.\n\nfn decode_rle(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.len() < 9 {\n\n return Err(From::from(\"not enough data to decode using RLE\"));\n\n }\n\n\n\n // calculate the scaler from the lower 4 bits of the first byte.\n\n let scaler = 10_u64.pow((src[0] & 0b0000_1111) as u32);\n\n let mut i = 1;\n\n\n\n // TODO(edd): this should be possible to do in-place without copy.\n\n let mut a: [u8; 8] = [0; 8];\n\n a.copy_from_slice(&src[i..i + 8]);\n\n i += 8;\n\n let (mut delta, n) = u64::decode_var(&src[i..]);\n\n if n == 0 {\n\n return Err(From::from(\"unable to decode delta\"));\n\n }\n\n i += n;\n\n delta *= scaler;\n\n\n", "file_path": "influxdb_tsm/src/encoders/timestamp.rs", "rank": 75, "score": 114543.54619983003 }, { "content": "fn decode_simple8b(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.len() < 9 {\n\n return Err(From::from(\"not enough data to decode packed timestamp\"));\n\n }\n\n\n\n let scaler = 10_u64.pow((src[0] & 0b0000_1111) as u32);\n\n\n\n // TODO(edd): pre-allocate res by counting bytes in encoded slice?\n\n let mut res = vec![];\n\n let mut buf: [u8; 8] = [0; 8];\n\n buf.copy_from_slice(&src[1..9]);\n\n dst.push(i64::from_be_bytes(buf));\n\n\n\n simple8b::decode(&src[9..], &mut res);\n\n let mut next = dst[dst.len() - 1];\n\n if scaler > 1 {\n\n // TODO(edd): fix this. It's copying, which is slowwwwwwwww.\n\n for v in &res {\n\n next += (v * scaler) as i64;\n\n dst.push(next);\n", "file_path": "influxdb_tsm/src/encoders/timestamp.rs", "rank": 76, "score": 114543.54619983003 }, { "content": "fn decode_uncompressed(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() || src.len() & 0x7 != 0 {\n\n return Err(From::from(\"invalid uncompressed block length\"));\n\n }\n\n\n\n let count = src.len() / 8;\n\n if dst.capacity() < count {\n\n dst.reserve_exact(count - dst.capacity());\n\n }\n\n let mut i = 0;\n\n let mut prev: i64 = 0;\n\n let mut buf: [u8; 8] = [0; 8];\n\n while i < src.len() {\n\n buf.copy_from_slice(&src[i..i + 8]);\n\n prev = prev.wrapping_add(zig_zag_decode(u64::from_be_bytes(buf)));\n\n dst.push(prev); // N.B - signed integer...\n\n i += 8;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/integer.rs", "rank": 77, "score": 114543.54619983003 }, { "content": "/// encode encodes a vector of signed integers into a slice of bytes.\n\n///\n\n/// To maximise compression, the provided vector should be sorted in ascending\n\n/// order. First deltas between the integers are determined, then further\n\n/// encoding is potentially carried out. If all the deltas are the same the\n\n/// block can be encoded using RLE. If not, as long as the deltas are not bigger\n\n/// than simple8b::MAX_VALUE they can be encoded using simple8b.\n\npub fn encode(src: &[i64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear(); // reset buffer.\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut max: u64 = 0;\n\n let mut deltas = i64_to_u64_vector(src);\n\n if deltas.len() > 1 {\n\n for i in (1..deltas.len()).rev() {\n\n deltas[i] = deltas[i].wrapping_sub(deltas[i - 1]);\n\n if deltas[i] > max {\n\n max = deltas[i];\n\n }\n\n }\n\n let mut use_rle = true;\n\n for i in 2..deltas.len() {\n\n if deltas[1] != deltas[i] {\n\n use_rle = false;\n\n break;\n", "file_path": "influxdb_tsm/src/encoders/timestamp.rs", "rank": 78, "score": 112758.42168651771 }, { "content": "/// decode decodes a slice of bytes encoded using encode back into a\n\n/// vector of signed integers.\n\npub fn decode(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n let encoding = &src[0] >> 4;\n\n match encoding {\n\n encoding if encoding == Encoding::Uncompressed as u8 => {\n\n decode_uncompressed(&src[1..], dst) // first byte not used\n\n }\n\n encoding if encoding == Encoding::Rle as u8 => decode_rle(&src, dst),\n\n encoding if encoding == Encoding::Simple8b as u8 => decode_simple8b(&src, dst),\n\n _ => Err(From::from(\"invalid block encoding\")),\n\n }\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/timestamp.rs", "rank": 79, "score": 112758.11945973654 }, { "content": "/// encode packs and binary encodes the provides slice of u64 values using\n\n/// simple8b into the provided vector.\n\npub fn encode(src: &[u64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n let mut i = 0;\n\n 'next_value: while i < src.len() {\n\n // try to pack a run of 240 or 120 1s\n\n let remain = src.len() - i;\n\n if remain >= 120 {\n\n let a = if remain >= 240 {\n\n &src[i..i + 240]\n\n } else {\n\n &src[i..i + 120]\n\n };\n\n\n\n // search for the longest sequence of 1s in a\n\n let k = a.iter().take_while(|x| **x == 1).count();\n\n if k == 240 {\n\n i += 240;\n\n dst.resize(dst.len() + 8, 0);\n\n continue;\n\n } else if k >= 120 {\n\n i += 120;\n", "file_path": "influxdb_tsm/src/encoders/simple8b.rs", "rank": 80, "score": 112758.07699468377 }, { "content": "/// Encodes a slice of byte slices representing string data into a vector of\n\n/// bytes. Currently uses Snappy compression.\n\npub fn encode(src: &[&[u8]], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear(); // reset buffer\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n // strings shouldn't be longer than 64kb\n\n let length_of_lengths = src.len() * super::MAX_VAR_INT_32;\n\n let sum_of_lengths: usize = src\n\n .iter()\n\n .map(|s| {\n\n let len = s.len();\n\n assert!(len < MAX_I32);\n\n len\n\n })\n\n .sum();\n\n let source_size = 2 + length_of_lengths + sum_of_lengths;\n\n\n\n // determine the maximum possible length needed for the buffer, which\n\n // includes the compressed size\n", "file_path": "influxdb_tsm/src/encoders/string.rs", "rank": 81, "score": 112758.03515337245 }, { "content": "/// Encodes a slice of unsigned 64-bit integers into `dst`.\n\n///\n\n/// Deltas between the integers in the input are first calculated, then the\n\n/// deltas are further compressed if possible, either via bit-packing using\n\n/// simple8b or by run-length encoding the deltas if they're all the same.\n\npub fn encode(src: &[u64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n let signed = u64_to_i64_vector(&src);\n\n super::integer::encode(&signed, dst)\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/unsigned.rs", "rank": 82, "score": 112757.57711231461 }, { "content": "/// encode encodes a vector of signed integers into dst.\n\n///\n\n/// Deltas between the integers in the vector are first calculated, and these\n\n/// deltas are then zig-zag encoded. The resulting zig-zag encoded deltas are\n\n/// further compressed if possible, either via bit-packing using simple8b or by\n\n/// run-length encoding the deltas if they're all the same.\n\npub fn encode(src: &[i64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear(); // reset buffer.\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut max: u64 = 0;\n\n let mut deltas = i64_to_u64_vector(&src);\n\n for i in (1..deltas.len()).rev() {\n\n deltas[i] = zig_zag_encode(deltas[i].wrapping_sub(deltas[i - 1]) as i64);\n\n if deltas[i] > max {\n\n max = deltas[i];\n\n }\n\n }\n\n\n\n // deltas[0] is the first value in the sequence.\n\n deltas[0] = zig_zag_encode(src[0]);\n\n\n\n if deltas.len() > 2 {\n\n let mut use_rle = true;\n", "file_path": "influxdb_tsm/src/encoders/integer.rs", "rank": 83, "score": 112757.31085971795 }, { "content": "/// Encodes a slice of booleans into `dst`.\n\n///\n\n/// Boolean encoding uses 1 bit per value. Each compressed byte slice contains a\n\n/// 1 byte header indicating the compression type, followed by a variable byte\n\n/// encoded length indicating how many booleans are packed in the slice. The\n\n/// remaining bytes contain 1 byte for every 8 boolean values encoded.\n\npub fn encode(src: &[bool], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear();\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let size = HEADER_LEN + 8 + ((src.len() + 7) / 8); // Header + Num bools + bool data.\n\n dst.resize(size, 0);\n\n\n\n // Store the encoding type in the 4 high bits of the first byte\n\n dst[0] = BOOLEAN_COMPRESSED_BIT_PACKED << 4;\n\n\n\n let mut n = 8u64; // Current bit in current byte.\n\n\n\n // Encode the number of booleans written.\n\n let len_u64: u64 = src.len().try_into()?;\n\n let i = len_u64.encode_var(&mut dst[1..]);\n\n let step: u64 = (i * 8).try_into()?;\n\n n += step;\n\n\n", "file_path": "influxdb_tsm/src/encoders/boolean.rs", "rank": 84, "score": 112757.1877077712 }, { "content": "/// decode decodes the provided slice of bytes into a vector of f64 values.\n\npub fn decode(src: &[u8], dst: &mut Vec<f64>) -> Result<(), Box<dyn Error>> {\n\n decode_with_sentinel(src, dst, SENTINEL)\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/float.rs", "rank": 85, "score": 112752.37978344399 }, { "content": "/// Decodes a slice of bytes into a destination vector of `bool`s.\n\npub fn decode(src: &[u8], dst: &mut Vec<bool>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n // First byte stores the encoding type, only have the bit packed format\n\n // currently so ignore for now.\n\n assert_eq!(src[0], BOOLEAN_COMPRESSED_BIT_PACKED << 4);\n\n let src = &src[HEADER_LEN..];\n\n\n\n let (count, num_bytes_read) = u64::decode_var(src);\n\n if num_bytes_read == 0 {\n\n return Err(\"boolean decoder: invalid count\".into());\n\n }\n\n\n\n let mut count: usize = count.try_into()?;\n\n let src = &src[num_bytes_read..];\n\n\n\n let min = src.len() * 8;\n\n\n", "file_path": "influxdb_tsm/src/encoders/boolean.rs", "rank": 86, "score": 112752.37978344399 }, { "content": "/// decode decodes a slice of bytes into a vector of signed integers.\n\npub fn decode(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n let encoding = &src[0] >> 4;\n\n match encoding {\n\n encoding if encoding == Encoding::Uncompressed as u8 => {\n\n decode_uncompressed(&src[1..], dst) // first byte not used\n\n }\n\n encoding if encoding == Encoding::Rle as u8 => decode_rle(&src[1..], dst),\n\n encoding if encoding == Encoding::Simple8b as u8 => decode_simple8b(&src[1..], dst),\n\n _ => Err(From::from(\"invalid block encoding\")),\n\n }\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/integer.rs", "rank": 87, "score": 112752.37978344399 }, { "content": "/// Decodes a slice of bytes into a destination vector of unsigned integers.\n\npub fn decode(src: &[u8], dst: &mut Vec<u64>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n let mut signed_results = vec![];\n\n super::integer::decode(src, &mut signed_results)?;\n\n dst.clear();\n\n dst.reserve_exact(signed_results.len() - dst.capacity());\n\n for s in signed_results {\n\n dst.push(s as u64);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/unsigned.rs", "rank": 88, "score": 112752.37978344399 }, { "content": "#[allow(clippy::many_single_char_names)]\n\npub fn encode(src: &[f64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear(); // reset buffer.\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n if dst.capacity() < 9 {\n\n dst.reserve_exact(9 - dst.capacity()); // room for encoding type, block\n\n // size and a value\n\n }\n\n\n\n // write encoding type\n\n let mut n = 8; // N.B, this is the number of bits written\n\n dst.push((1 << 4) as u8); // write compression type\n\n\n\n // write the first value into the block\n\n let first = src[0];\n\n let mut prev = first.to_bits();\n\n dst.extend_from_slice(&prev.to_be_bytes());\n\n n += 64;\n\n\n", "file_path": "influxdb_tsm/src/encoders/float.rs", "rank": 89, "score": 112752.37978344399 }, { "content": "fn encoding_drle_row_ids_sorted(c: &mut Criterion) {\n\n benchmark_row_ids(\n\n c,\n\n \"encoding_drle_row_ids_sorted\",\n\n &BATCH_SIZES,\n\n &CARDINALITIES,\n\n );\n\n}\n\n\n", "file_path": "mem_qe/benches/encoding.rs", "rank": 90, "score": 112558.42984347521 }, { "content": "type Error = Box<dyn std::error::Error>;\n", "file_path": "generated_types/build.rs", "rank": 91, "score": 112073.44770620982 }, { "content": "type Error = Box<dyn std::error::Error>;\n", "file_path": "google_types/build.rs", "rank": 92, "score": 112073.44770620982 }, { "content": "/// decode_influxdb decodes the provided slice of bytes, which must have been\n\n/// encoded into a TSM file via InfluxDB's encoder.\n\n///\n\n/// TODO(edd): InfluxDB uses a different sentinel value to terminate a block\n\n/// than we chose to use for the float decoder. As we settle on a story around\n\n/// compression of f64 blocks we may be able to clean this API and not have\n\n/// multiple methods.\n\npub fn decode_influxdb(src: &[u8], dst: &mut Vec<f64>) -> Result<(), Box<dyn Error>> {\n\n decode_with_sentinel(src, dst, SENTINEL_INFLUXDB)\n\n}\n\n\n\n/// decode decodes a slice of bytes into a vector of floats.\n", "file_path": "influxdb_tsm/src/encoders/float.rs", "rank": 93, "score": 111038.66355927094 }, { "content": "fn encoding_drle_row_ids_sorted_roaring(c: &mut Criterion) {\n\n benchmark_row_ids_roaring(\n\n c,\n\n \"encoding_drle_row_ids_sorted_roaring\",\n\n &BATCH_SIZES,\n\n &CARDINALITIES,\n\n );\n\n}\n\n\n", "file_path": "mem_qe/benches/encoding.rs", "rank": 94, "score": 110189.40065700829 }, { "content": "fn to_stringset(v: &[&str]) -> StringSetRef {\n\n v.into_stringset().unwrap()\n\n}\n", "file_path": "server/src/query_tests/influxrpc/table_names.rs", "rank": 95, "score": 109927.91861929669 }, { "content": "fn to_stringset(v: &[&str]) -> StringSetRef {\n\n v.into_stringset().unwrap()\n\n}\n", "file_path": "server/src/query_tests/influxrpc/tag_keys.rs", "rank": 96, "score": 109927.91861929669 }, { "content": "fn to_stringset(v: &[&str]) -> StringSetRef {\n\n v.into_stringset().unwrap()\n\n}\n", "file_path": "server/src/query_tests/influxrpc/tag_values.rs", "rank": 97, "score": 109927.91861929669 }, { "content": "/// Decodes a slice of bytes representing Snappy-compressed data into a vector\n\n/// of vectors of bytes representing string data, which may or may not be valid\n\n/// UTF-8.\n\npub fn decode(src: &[u8], dst: &mut Vec<Vec<u8>>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut decoder = snap::raw::Decoder::new();\n\n // First byte stores the encoding type, only have snappy format\n\n // currently so ignore for now.\n\n let decoded_bytes = decoder.decompress_vec(&src[HEADER_LEN..])?;\n\n\n\n if dst.capacity() == 0 {\n\n dst.reserve_exact(64);\n\n }\n\n\n\n let num_decoded_bytes = decoded_bytes.len();\n\n let mut i = 0;\n\n\n\n while i < num_decoded_bytes {\n\n let (length, num_bytes_read) = u64::decode_var(&decoded_bytes[i..]);\n\n let length: usize = length.try_into()?;\n", "file_path": "influxdb_tsm/src/encoders/string.rs", "rank": 98, "score": 109379.58321310107 }, { "content": "pub fn object_store_path_for_database_config<P: ObjectStorePath>(\n\n root: &P,\n\n name: &DatabaseName<'_>,\n\n) -> P {\n\n let mut path = root.clone();\n\n path.push_dir(name.to_string());\n\n path.set_file_name(DB_RULES_FILE_NAME);\n\n path\n\n}\n\n\n", "file_path": "server/src/config.rs", "rank": 99, "score": 107696.17696541845 } ]
Rust
src/unpack.rs
clouds56/crnlib
13dbc1e0e4184ad686e3987708c4564901ed92ef
use std::io::prelude::*; use anyhow::*; use serde::{Serialize, Deserialize}; use crate::{Tables, Huffman, codec::Codec}; pub trait Block: Serialize { const BLOCK_SIZE: usize; fn write_to<W: Write>(&self, mut w: W) -> std::io::Result<()> { use bincode::Options; let bin = bincode::config::DefaultOptions::new() .with_fixint_encoding() .with_little_endian() .serialize(self) .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; w.write(&bin)?; Ok(()) } } pub trait Unpack { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error>; fn next_tile_idx(codec: &mut Codec, encoding: &Huffman, tile_bits: &mut u32) -> Result<(usize, [usize; 4]), Error> { if *tile_bits == 1 { *tile_bits = encoding.next(codec).context("read chunk encoding bits")? | 512; } let tile_index = *tile_bits as usize & 7; *tile_bits >>= 3; Ok((Self::COUNT_TILES[tile_index], Self::TILES[tile_index])) } const TRUNK_SIZE: usize = 2; const COUNT_TILES: [usize; 8] = [ 1, 2, 2, 3, 3, 3, 3, 4 ]; const TILES: [[usize; 4]; 8] = [ [ 0, 0, 0, 0 ], [ 0, 0, 1, 1 ], [ 0, 1, 0, 1 ], [ 0, 0, 1, 2 ], [ 1, 2, 0, 0 ], [ 0, 1, 0, 2 ], [ 1, 0, 2, 0 ], [ 0, 1, 2, 3 ] ]; } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt1 { pub color_endpoint: (u16, u16), pub color_selector: [u8; 4], } impl Block for Dxt1 { const BLOCK_SIZE: usize = 8; } impl Unpack for Dxt1 { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut color_endpoint_index = 0; let mut color_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut color_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { color_endpoints[i] = tables.color_endpoint()?.next(codec, &mut color_endpoint_index).context("read color_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let color_selector = tables.color_selector()?.next(codec, &mut color_selector_index).context("read color_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt1 { color_endpoint: color_endpoints[tile], color_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt5 { pub alpha_endpoint: (u8, u8), pub alpha_selector: [u8; 6], pub color_endpoint: (u16, u16), pub color_selector: [u8; 4], } impl Block for Dxt5 { const BLOCK_SIZE: usize = 16; } impl Unpack for Dxt5 { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut color_endpoint_index = 0; let mut color_selector_index = 0; let mut alpha_endpoint_index = 0; let mut alpha_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut color_endpoints = [(0, 0); 4]; let mut alpha_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha_endpoint_index).context("read alpha_endpoint_delta")?; } for i in 0..tiles_count { color_endpoints[i] = tables.color_endpoint()?.next(codec, &mut color_endpoint_index).context("read color_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha_selector = tables.alpha_selector()?.next(codec, &mut alpha_selector_index).context("read alpha_selector_delta")?; let color_selector = tables.color_selector()?.next(codec, &mut color_selector_index).context("read color_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt5 { alpha_endpoint: alpha_endpoints[tile], alpha_selector, color_endpoint: color_endpoints[tile], color_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt5A { pub alpha_endpoint: (u8, u8), pub alpha_selector: [u8; 6], } impl Block for Dxt5A { const BLOCK_SIZE: usize = 8; } impl Unpack for Dxt5A { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut alpha_endpoint_index = 0; let mut alpha_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut alpha_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha_endpoint_index).context("read alpha_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha_selector = tables.alpha_selector()?.next(codec, &mut alpha_selector_index).context("read alpha_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt5A { alpha_endpoint: alpha_endpoints[tile], alpha_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxn { pub alpha0_endpoint: (u8, u8), pub alpha0_selector: [u8; 6], pub alpha1_endpoint: (u8, u8), pub alpha1_selector: [u8; 6], } impl Block for Dxn { const BLOCK_SIZE: usize = 16; } impl Unpack for Dxn { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut alpha0_endpoint_index = 0; let mut alpha0_selector_index = 0; let mut alpha1_endpoint_index = 0; let mut alpha1_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut alpha0_endpoints = [(0, 0); 4]; let mut alpha1_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha0_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha0_endpoint_index).context("read alpha0_endpoint_delta")?; } for i in 0..tiles_count { alpha1_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha1_endpoint_index).context("read alpha1_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha0_selector = tables.alpha_selector()?.next(codec, &mut alpha0_selector_index).context("read alpha0_selector_delta")?; let alpha1_selector = tables.alpha_selector()?.next(codec, &mut alpha1_selector_index).context("read alpha1_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxn { alpha0_endpoint: alpha0_endpoints[tile], alpha0_selector, alpha1_endpoint: alpha1_endpoints[tile], alpha1_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[test] fn test_constant() { assert_eq!(Dxt5::TILES.len(), Dxt5::COUNT_TILES.len()); assert_eq!(Dxt5::TILES[0].len(), Dxt5::TRUNK_SIZE * Dxt5::TRUNK_SIZE); use bincode::Options; let option = || bincode::config::DefaultOptions::new() .with_fixint_encoding() .with_little_endian(); assert_eq!(option().serialized_size(&Dxt1::default()).unwrap(), Dxt1::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxt5::default()).unwrap(), Dxt5::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxt5A::default()).unwrap(), Dxt5A::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxn::default()).unwrap(), Dxn::BLOCK_SIZE as u64); assert_eq!(option().serialize(&Dxt5 { alpha_endpoint: (0x17, 0x18), alpha_selector: [0x20, 0x21, 0x22, 0x23, 0x24, 0x25], color_endpoint: (0x3234, 0x3537), color_selector: [0x49, 0x48, 0x47, 0x46], }).unwrap(), &[ 0x17, 0x18, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x34, 0x32, 0x37, 0x35, 0x49, 0x48, 0x47, 0x46]); }
use std::io::prelude::*; use anyhow::*; use serde::{Serialize, Deserialize}; use crate::{Tables, Huffman, codec::Codec}; pub trait Block: Serialize { const BLOCK_SIZE: usize; fn write_to<W: Write>(&self, mut w: W) -> std::io::Result<()> { use bincode::Options; let bin = bincode::config::DefaultOptions::new() .with_fixint_encoding() .with_little_endian() .serialize(self) .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; w.write(&bin)?; Ok(()) } } pub trait Unpack { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error>; fn next_tile_idx(codec: &mut Codec, encoding: &Huffman, tile_bits: &mut u32) -> Result<(usize, [usize; 4]), Error> { if *tile_bits == 1 { *tile_bits = encoding.next(codec).context("read chunk encoding bits")? | 512; } let tile_index = *tile_bits as usize & 7; *tile_bits >>= 3; Ok((Self::COUNT_TILES[tile_index], Self::TILES[tile_index])) } const TRUNK_SIZE: usize = 2; const COUNT_TILES: [usize; 8] = [ 1, 2, 2, 3, 3, 3, 3, 4 ]; const TILES: [[usize; 4]; 8] = [ [ 0, 0, 0, 0 ], [ 0, 0, 1, 1 ], [ 0, 1, 0, 1 ], [ 0, 0, 1, 2 ], [ 1, 2, 0, 0 ], [ 0, 1, 0, 2 ], [ 1, 0, 2, 0 ], [ 0, 1, 2, 3 ] ]; } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt1 { pub color_endpoint: (u16, u16), pub color_selector: [u8; 4], } impl Block for Dxt1 { const BLOCK_SIZE: usize = 8; } impl Unpack for Dxt1 { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut color_endpoint_index = 0; let mut color_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut color_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { color_endpoints[i] = tables.color_endpoint()?.next(codec, &mut color_endpoint_index).context("read color_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let color_selector = tables.color_selector()?.next(codec, &mut color_selector_index).context("read color_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt1 { color_endpoint: color_endpoints[tile], color_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt5 { pub alpha_endpoint: (u8, u8), pub alpha_selector: [u8; 6], pub color_endpoint: (u16, u16), pub color_selector: [u8; 4], } impl Block for Dxt5 { const BLOCK_SIZE: usize = 16; } impl Unpack for Dxt5 { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut color_endpoint_index = 0; let mut color_selector_index = 0; let mut alpha_endpoint_index = 0; let mut alpha_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut color_endpoints = [(0, 0); 4]; let mut alpha_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha_endpoint_index).context("read alpha_endpoint_delta")?; } for i in 0..tiles_count { color_endpoints[i] = tables.color_endpoint()?.next(codec, &mut color_endpoint_index).context("read color_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha_selector = tables.alpha_selector()?.next(codec, &mut alpha_selector_index).context("read alpha_selector_delta")?; let color_selector = tables.color_selector()?.next(codec, &mut color_selector_index).context("read color_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt5 { alpha_endpoint: alpha_endpoints[tile], alpha_selector, color_endpoint: color_endpoints[tile], color_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt5A { pub alpha_endpoint: (u8, u8), pub alpha_selector: [u8; 6], } impl Block for Dxt5A { const BLOCK_SIZE: usize = 8; } impl Unpack for Dxt5A {
} #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxn { pub alpha0_endpoint: (u8, u8), pub alpha0_selector: [u8; 6], pub alpha1_endpoint: (u8, u8), pub alpha1_selector: [u8; 6], } impl Block for Dxn { const BLOCK_SIZE: usize = 16; } impl Unpack for Dxn { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut alpha0_endpoint_index = 0; let mut alpha0_selector_index = 0; let mut alpha1_endpoint_index = 0; let mut alpha1_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut alpha0_endpoints = [(0, 0); 4]; let mut alpha1_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha0_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha0_endpoint_index).context("read alpha0_endpoint_delta")?; } for i in 0..tiles_count { alpha1_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha1_endpoint_index).context("read alpha1_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha0_selector = tables.alpha_selector()?.next(codec, &mut alpha0_selector_index).context("read alpha0_selector_delta")?; let alpha1_selector = tables.alpha_selector()?.next(codec, &mut alpha1_selector_index).context("read alpha1_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxn { alpha0_endpoint: alpha0_endpoints[tile], alpha0_selector, alpha1_endpoint: alpha1_endpoints[tile], alpha1_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[test] fn test_constant() { assert_eq!(Dxt5::TILES.len(), Dxt5::COUNT_TILES.len()); assert_eq!(Dxt5::TILES[0].len(), Dxt5::TRUNK_SIZE * Dxt5::TRUNK_SIZE); use bincode::Options; let option = || bincode::config::DefaultOptions::new() .with_fixint_encoding() .with_little_endian(); assert_eq!(option().serialized_size(&Dxt1::default()).unwrap(), Dxt1::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxt5::default()).unwrap(), Dxt5::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxt5A::default()).unwrap(), Dxt5A::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxn::default()).unwrap(), Dxn::BLOCK_SIZE as u64); assert_eq!(option().serialize(&Dxt5 { alpha_endpoint: (0x17, 0x18), alpha_selector: [0x20, 0x21, 0x22, 0x23, 0x24, 0x25], color_endpoint: (0x3234, 0x3537), color_selector: [0x49, 0x48, 0x47, 0x46], }).unwrap(), &[ 0x17, 0x18, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x34, 0x32, 0x37, 0x35, 0x49, 0x48, 0x47, 0x46]); }
fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut alpha_endpoint_index = 0; let mut alpha_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut alpha_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha_endpoint_index).context("read alpha_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha_selector = tables.alpha_selector()?.next(codec, &mut alpha_selector_index).context("read alpha_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt5A { alpha_endpoint: alpha_endpoints[tile], alpha_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) }
function_block-full_function
[ { "content": "#[test]\n\nfn test_huffman() {\n\n let input = [0b0100_0000u8];\n\n let mut codec = Codec::new(&input);\n\n let huffman = Huffman::new(BTreeMap::<bool,_>::new()).expect(\"zero huffman\");\n\n assert!(huffman.next(&mut codec).is_err());\n\n\n\n let mut codec = Codec::new(&input);\n\n let mut depth = BTreeMap::new();\n\n depth.insert(0xff, 1);\n\n let huffman = Huffman::new(depth).expect(\"zero huffman\");\n\n assert_eq!(huffman.next(&mut codec).unwrap(), 0xff);\n\n assert!(huffman.next(&mut codec).is_err());\n\n\n\n\n\n let mut codec = Codec::new(&input);\n\n let mut depth = BTreeMap::new();\n\n depth.insert(0x01, 1);\n\n depth.insert(0xff, 1);\n\n let huffman = Huffman::new(depth).expect(\"zero huffman\");\n\n assert_eq!(huffman.next(&mut codec).unwrap(), 0x01);\n\n assert_eq!(huffman.next(&mut codec).unwrap(), 0xff);\n\n}\n", "file_path": "src/codec.rs", "rank": 2, "score": 58965.362654564764 }, { "content": "#[test]\n\nfn test_read_bits() {\n\n let input = [0b1100_1010u8, 0b0110_1101, 0b1101_1001];\n\n let mut codec = Codec::new(&input);\n\n assert_eq!(codec.read_bits(3).unwrap(), 0b110);\n\n assert_eq!(codec.index, 3);\n\n assert_eq!(codec.read_bits(17).unwrap(), 0b1010_0110_1101_1101);\n\n assert_eq!(codec.index, 20);\n\n assert_eq!(codec.read_bits(0).unwrap(), 0);\n\n assert_eq!(codec.index, 20);\n\n\n\n assert_eq!(Huffman::<()>::MAX_SYMBOL_COUNT, 1 << (Huffman::<()>::MAX_SYMBOL_COUNT_BIT - 1));\n\n}\n\n\n\npub struct Huffman<T> {\n\n depth_count: [usize; Key::MAX_DEPTH+1],\n\n /// [0, 1, 3, 7, 15, 32]\n\n /// for depth of i, the range of encoded is depth_bound[i-1]*2..depth_bound[i]\n\n /// 1: 0..1 => 0b0\n\n /// 2: 2..3 => 0b10\n\n /// 3: 6..7 => 0b110\n", "file_path": "src/codec.rs", "rank": 3, "score": 55539.64559670892 }, { "content": "#[test]\n\nfn test_file() {\n\n use std::io::prelude::*;\n\n let sample = \"samples/test.crn\";\n\n assert_eq!(Header::fixed_size(), Header::serialize_option()\n\n .serialized_size(&Header::default()).expect(\"header size\") as usize);\n\n let mut file = std::fs::File::open(sample).expect(\"open sample crn file\");\n\n let mut buffer = Vec::new();\n\n file.read_to_end(&mut buffer).expect(\"read crn file\");\n\n let header = Header::parse(&buffer).expect(\"parse\");\n\n println!(\"header: {:x?}\", header);\n\n assert_eq!(header.header_size as usize, Header::fixed_size() + 4*header.level_count as usize);\n\n assert!(header.check_crc(&buffer));\n\n\n\n let tables = header.get_table(&buffer).expect(\"read table\");\n\n println!(\"table: {:x?}\", tables);\n\n let level0 = header.unpack_level(&tables, &buffer, 0).expect(\"unpack\");\n\n println!(\"{:02x?}\", level0);\n\n header.unpack_level(&tables, &buffer, header.level_count as usize - 1).expect(\"unpack\");\n\n\n\n use image::ImageDecoder;\n", "file_path": "src/lib.rs", "rank": 5, "score": 25431.47006882602 }, { "content": "use std::collections::BTreeMap;\n\nuse bitvec::{slice::BitSlice, order::Msb0, fields::BitField};\n\nuse anyhow::*;\n\n\n\npub struct Codec<'a> {\n\n buffer: &'a BitSlice<Msb0, u8>,\n\n index: usize,\n\n}\n\n\n\nimpl Codec<'_> {\n\n pub fn new<'a>(input: &'a [u8]) -> Codec<'a> {\n\n Codec { buffer: BitSlice::from_slice(input), index: 0 }\n\n }\n\n pub fn look_bits(&self, n: usize) -> u64 {\n\n assert!(n <= 64);\n\n if n == 0 { return 0 }\n\n if self.index + n > self.buffer.len() {\n\n self.buffer[self.index..].load_be::<u64>() << (self.index + n - self.buffer.len())\n\n } else {\n\n self.buffer[self.index..self.index+n].load_be()\n", "file_path": "src/codec.rs", "rank": 6, "score": 19096.457051138867 }, { "content": " }\n\n }\n\n pub fn read_bits(&mut self, n: usize) -> Result<u64, Error> {\n\n assert!(n <= 64);\n\n if self.index + n > self.buffer.len() {\n\n bail!(\"read out of index {} < {}\", self.index+n, self.buffer.len());\n\n }\n\n if n == 0 { return Ok(0) }\n\n let result = self.buffer[self.index..self.index+n].load_be();\n\n self.index += n;\n\n Ok(result)\n\n }\n\n pub fn skip_bits(&mut self, n: usize) {\n\n self.index += n;\n\n }\n\n pub fn current(&self) -> usize {\n\n self.index\n\n }\n\n pub fn len(&self) -> usize {\n\n self.buffer.len()\n", "file_path": "src/codec.rs", "rank": 7, "score": 19095.50066898784 }, { "content": " }\n\n pub fn is_complete(&self) -> bool {\n\n self.index + 7 >= self.buffer.len() && self.index <= self.buffer.len()\n\n }\n\n\n\n pub fn get_huffman(&mut self) -> Result<Huffman<u32>, Error> {\n\n let symbol_count = self.read_bits(Huffman::<()>::MAX_SYMBOL_COUNT_BIT)? as u32;\n\n // println!(\"construct huffman tree with {} symbols\", symbol_count);\n\n if symbol_count == 0 {\n\n return Huffman::new(BTreeMap::new())\n\n }\n\n let mut tmp_symbol_depth = BTreeMap::new();\n\n let tmp_symbol_count = self.read_bits(5)? as usize;\n\n ensure!(tmp_symbol_count <= Key::SHUFFLE.len(),\n\n \"tmp_symbol_count {} > {}\", tmp_symbol_count, Key::SHUFFLE.len());\n\n for i in 0..tmp_symbol_count {\n\n let value = self.read_bits(3)? as usize;\n\n if value != 0 {\n\n tmp_symbol_depth.insert(Key::SHUFFLE[i], value);\n\n }\n", "file_path": "src/codec.rs", "rank": 8, "score": 19093.683519123053 }, { "content": " ensure!(codec.current() < codec.len(), \"stream end {} >= {}\", codec.current(), codec.len());\n\n let k = codec.look_bits(self.max_depth) as u32;\n\n for i in 1..=self.max_depth {\n\n let t = k >> (self.max_depth - i);\n\n if let Some(sym) = self.symbol_rev.get(&(i, t)) {\n\n codec.index += i;\n\n return Ok(*sym)\n\n }\n\n }\n\n bail!(\"incomplete huffman tree no match\");\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]\n\npub enum Key {\n\n Depth(usize),\n\n ShortZero /* 17 */, LongZero /* 18 */,\n\n ShortRepeat /* 19 */, LongRepeat /* 20 */,\n\n}\n\nuse Key::*;\n", "file_path": "src/codec.rs", "rank": 9, "score": 19092.62170899952 }, { "content": "impl<T: Ord+Copy> Huffman<T> {\n\n pub fn new(symbol_depth: BTreeMap<T, usize>) -> Result<Self, Error> {\n\n let mut depth_count = [0; Key::MAX_DEPTH+1];\n\n for &depth in symbol_depth.values() {\n\n depth_count[depth] += 1;\n\n }\n\n let mut max_depth = 0;\n\n let mut depth_bound= [0; Key::MAX_DEPTH+1];\n\n let mut available = 0;\n\n for (depth, &n) in depth_count.iter().enumerate() {\n\n if n != 0 {\n\n max_depth = depth;\n\n }\n\n available <<= 1;\n\n if depth != 0 {\n\n available += n as u32;\n\n }\n\n depth_bound[depth] = available;\n\n }\n\n ensure!(\n", "file_path": "src/codec.rs", "rank": 10, "score": 19091.97215396466 }, { "content": " }\n\n let key = Huffman::new(tmp_symbol_depth).context(\"get key huffman\")?;\n\n // println!(\"tmp_symbol_depth: {:?}\", key);\n\n let mut symbol_depth = BTreeMap::new();\n\n let mut i = 0;\n\n let mut last = None;\n\n while i < symbol_count {\n\n let (len, d) = match key.next(self).context(\"get key content\")? {\n\n Depth(d) => (1, d),\n\n ShortZero => (self.read_bits(3)? + 3, 0),\n\n LongZero => (self.read_bits(7)? + 11, 0),\n\n ShortRepeat => (self.read_bits(2)? + 3, last.ok_or_else(|| anyhow!(\"short repeat no last\"))?),\n\n LongRepeat => (self.read_bits(6)? + 7, last.ok_or_else(|| anyhow!(\"long repeat no last\"))?),\n\n };\n\n last = Some(d);\n\n for j in 0..len as u32 {\n\n if d != 0 {\n\n symbol_depth.insert(i+j, d);\n\n }\n\n }\n\n i += len as u32;\n\n }\n\n // println!(\"{:?}\", symbol_depth);\n\n Huffman::new(symbol_depth)\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/codec.rs", "rank": 11, "score": 19091.91362162975 }, { "content": " /// 4: 14..15 => 0b1110\n\n /// 5: 30..32 => 0b11110, 0b11111\n\n // depth_bound: [u32; Key::MAX_DEPTH+1],\n\n symbol_depth: BTreeMap<T, usize>,\n\n symbols: BTreeMap<T, u32>,\n\n symbol_rev: BTreeMap<(usize, u32), T>,\n\n max_depth: usize,\n\n}\n\n\n\nimpl<T: std::fmt::Debug> std::fmt::Debug for Huffman<T> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"Huffman\")\n\n .field(\"symbol_count\", &self.symbols.len())\n\n .field(\"max_depth\", &self.max_depth)\n\n .field(\"symbol_depth\", &self.symbol_depth)\n\n .field(\"depth_count\", &self.depth_count)\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "src/codec.rs", "rank": 12, "score": 19091.16374375795 }, { "content": " 1<<max_depth == depth_bound[max_depth] || (max_depth <= 1 && depth_bound[max_depth] == max_depth as u32),\n\n \"depth_bound error: {:?} {:?}\", depth_count, depth_bound);\n\n let mut depth_current = [0; Key::MAX_DEPTH+1];\n\n for i in 1..=Key::MAX_DEPTH {\n\n depth_current[i] = depth_bound[i-1]*2;\n\n }\n\n let symbols: BTreeMap<_, _> = symbol_depth.iter().filter_map(|(&key, &depth)| {\n\n if depth == 0 { return None }\n\n let result = depth_current[depth];\n\n depth_current[depth] += 1;\n\n Some((key, result))\n\n }).collect();\n\n let symbol_rev = symbols.iter().map(|(&k, &v)| ((symbol_depth[&k], v), k)).collect();\n\n Ok(Self {\n\n depth_count, symbol_depth, max_depth,\n\n symbols, symbol_rev,\n\n })\n\n }\n\n\n\n pub fn next(&self, codec: &mut Codec<'_>) -> Result<T, Error> {\n", "file_path": "src/codec.rs", "rank": 13, "score": 19090.192296689904 }, { "content": "\n\nimpl Key {\n\n pub const MAX_DEPTH: usize = 16;\n\n pub const SHUFFLE: [Key; Self::MAX_DEPTH+5] = [\n\n ShortZero, LongZero, ShortRepeat, LongRepeat,\n\n Depth(0), Depth(8), Depth(7), Depth(9),\n\n Depth(6), Depth(10), Depth(5), Depth(11),\n\n Depth(4), Depth(12), Depth(3), Depth(13),\n\n Depth(2), Depth(14), Depth(1), Depth(15), Depth(16)];\n\n}\n\n\n\nimpl<T> Huffman<T> {\n\n pub const MAX_SYMBOL_COUNT: usize = 8192;\n\n pub const MAX_SYMBOL_COUNT_BIT: usize = 14;\n\n}\n\n\n", "file_path": "src/codec.rs", "rank": 14, "score": 19089.926130685126 }, { "content": " Format::Dxt5A => unpack::Dxt5A::unpack(tables, &mut codec, width, height, self.face_count),\n\n Format::DxnXY | Format::DxnYX => unpack::Dxn::unpack(tables, &mut codec, width, height, self.face_count),\n\n Format::Dxt3 | Format::Etc1 | Format::Invalid => bail!(\"unsupported format {:?}\", self.format),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Tables {\n\n pub chunk_encoding: Huffman,\n\n\n\n pub color_endpoint: Option<Table<(u16, u16)>>,\n\n pub color_selector: Option<Table<[u8; 4]>>,\n\n pub alpha_endpoint: Option<Table<(u8, u8)>>,\n\n pub alpha_selector: Option<Table<[u8; 6]>>,\n\n}\n\n\n\nimpl Tables {\n\n fn color_endpoint(&self) -> Result<&Table<(u16, u16)>, Error> {\n\n self.color_endpoint.as_ref().ok_or_else(|| anyhow!(\"color_endpoint should present\"))\n", "file_path": "src/lib.rs", "rank": 31, "score": 32.46000761707373 }, { "content": "\n\n pub fn get_level_info(&self, idx: usize) -> Option<(u16, u16)> {\n\n if idx < self.level_count as usize {\n\n let width = 1.max(self.width >> idx);\n\n let height = 1.max(self.height >> idx);\n\n (width, height).into()\n\n } else { None }\n\n }\n\n\n\n pub fn unpack_level(&self, tables: &Tables, input: &[u8], idx: usize) -> Result<Vec<u8>, Error> {\n\n use crate::unpack::Unpack;\n\n let mut codec = if let Some(data) = self.get_level_data(input, idx) {\n\n codec::Codec::new(data)\n\n } else { bail!(\"level out of index\") };\n\n let width = 1.max(self.width >> idx);\n\n let height = 1.max(self.height >> idx);\n\n match self.format {\n\n Format::Dxt1 => unpack::Dxt1::unpack(tables, &mut codec, width, height, self.face_count),\n\n Format::Dxt5 | Format::Dxt5AGBR | Format::Dxt5CCxY | Format::Dxt5xGBR | Format::Dxt5xGxR =>\n\n unpack::Dxt5::unpack(tables, &mut codec, width, height, self.face_count),\n", "file_path": "src/lib.rs", "rank": 32, "score": 30.288521471613727 }, { "content": "pub mod codec;\n\npub mod unpack;\n\n\n\nuse serde::{Serialize, Deserialize};\n\nuse anyhow::*;\n\nuse bincode::Options;\n\n\n\npub type Huffman = codec::Huffman<u32>;\n\n\n\n#[derive(Debug, Copy, Clone, serde_repr::Serialize_repr, serde_repr::Deserialize_repr)]\n\n#[repr(u8)]\n\npub enum Format {\n\n Dxt1 = 0, Dxt3, Dxt5,\n\n Dxt5CCxY, Dxt5xGxR, Dxt5xGBR, Dxt5AGBR,\n\n DxnXY /* A2XY */, DxnYX /* ATI2 */,\n\n Dxt5A /* ATI1 */, Etc1,\n\n Invalid = 0xff,\n\n}\n\nimpl Default for Format {\n\n fn default() -> Self {\n", "file_path": "src/lib.rs", "rank": 33, "score": 25.346920548828848 }, { "content": " .with_big_endian()\n\n }\n\n pub fn parse(input: &[u8]) -> Result<Self, Error> {\n\n let mut result: Header = Self::serialize_option()\n\n .deserialize(input)?;\n\n result.level_offset = (0..result.level_count as usize).map(|i|\n\n Self::serialize_option().deserialize::<u32>(&input[Self::fixed_size() + 4*i..])).collect::<Result<_, _>>()?;\n\n Ok(result)\n\n }\n\n\n\n pub fn fixed_size() -> usize {\n\n 33 + 8*4 + 5\n\n }\n\n\n\n pub fn crc16(init: u16, input: &[u8]) -> u16 {\n\n input.iter().fold(!init, |v, &c| {\n\n let x = c ^ (v >> 8) as u8;\n\n let x = (x ^ (x >> 4)) as u16;\n\n (v << 8) ^ (x << 12) ^ (x << 5) ^ x\n\n })\n", "file_path": "src/lib.rs", "rank": 34, "score": 21.902318284629594 }, { "content": " Format::Invalid\n\n }\n\n}\n\n\n\npub mod be_u24 {\n\n use serde::{Serialize, Serializer, Deserialize, Deserializer};\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<u32, D::Error> where D: Deserializer<'de> {\n\n <[u8; 3]>::deserialize(deserializer).map(|x| (x[0] as u32) << 16 | (x[1] as u32) << 8 | x[2] as u32)\n\n }\n\n\n\n pub fn serialize<S>(i: &u32, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer {\n\n [(i >> 16 & 0xff) as u8 , (i >> 8 & 0xff) as u8, (i & 0xff) as u8].serialize(serializer)\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]\n\npub struct Palette {\n\n #[serde(with = \"be_u24\")]\n\n pub offset: u32,\n\n #[serde(with = \"be_u24\")]\n", "file_path": "src/lib.rs", "rank": 35, "score": 21.808102377765255 }, { "content": " C[x[6]] | (C[y[6]] << 2) | (C[x[7]] << 4) | (C[y[7]] << 6),\n\n ];\n\n Ok::<_, Error>(result)\n\n }).collect::<Result<Vec<_>, _>>()?;\n\n if !codec.is_complete() { bail!(\"extra bytes in codec\") }\n\n\n\n Ok(color_selectors)\n\n }\n\n\n\n pub fn get_alpha_selectors(&self, input: &[u8]) -> Result<Vec<[u8; 6]>, Error> {\n\n let mut codec = if let Some(data) = self.get_palette_data(self.alpha_selectors, input) {\n\n codec::Codec::new(&data)\n\n } else { return Ok(vec![]) };\n\n let dm = codec.get_huffman().context(\"alpha_selectors_dm\")?;\n\n // println!(\"{:?}\", dm);\n\n\n\n let mut x = [0; 8];\n\n let mut y = [0; 8];\n\n\n\n const C: [u16; 8] = [0, 2, 3, 4, 5, 6, 7, 1]; // DXT5\n", "file_path": "src/lib.rs", "rank": 36, "score": 20.471061554096366 }, { "content": " pub size: u32,\n\n pub count: u16,\n\n}\n\n\n\n#[derive(Debug, Default, Serialize, Deserialize)]\n\npub struct Header {\n\n pub magic: [u8; 2],\n\n pub header_size: u16,\n\n pub header_crc16: u16,\n\n pub file_size: u32,\n\n pub data_crc16: u16,\n\n\n\n pub width: u16,\n\n pub height: u16,\n\n pub level_count: u8,\n\n pub face_count: u8, // 1 or 6\n\n pub format: Format, // u8\n\n pub flags: u16,\n\n\n\n pub reserved: u32,\n", "file_path": "src/lib.rs", "rank": 37, "score": 20.44098685752598 }, { "content": " }\n\n fn color_selector(&self) -> Result<&Table<[u8; 4]>, Error> {\n\n self.color_selector.as_ref().ok_or_else(|| anyhow!(\"color_selector should present\"))\n\n }\n\n fn alpha_endpoint(&self) -> Result<&Table<(u8, u8)>, Error> {\n\n self.alpha_endpoint.as_ref().ok_or_else(|| anyhow!(\"alpha_endpoint should present\"))\n\n }\n\n fn alpha_selector(&self) -> Result<&Table<[u8; 6]>, Error> {\n\n self.alpha_selector.as_ref().ok_or_else(|| anyhow!(\"alpha_selector should present\"))\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Table<T> {\n\n pub delta: Huffman,\n\n pub entries: Vec<T>,\n\n}\n\n\n\nimpl<T: Copy> Table<T> {\n\n fn new(delta: Huffman, entries: Vec<T>) -> Self {\n", "file_path": "src/lib.rs", "rank": 38, "score": 19.451753750188583 }, { "content": "\n\n let alpha_selectors = (0..self.alpha_selectors.count).map(|_i| {\n\n use bitvec::{slice::BitSlice, order::Msb0, fields::BitField};\n\n let mut s = [0u8; 6];\n\n let s_bits = BitSlice::<Msb0, u8>::from_slice_mut(&mut s);\n\n let s_len = s_bits.len();\n\n for (j, (x, y)) in &mut x.iter_mut().zip(&mut y).enumerate() {\n\n let d = dm.next(&mut codec)? as i32;\n\n *x = ((*x as i32 + d % 15 - 7) & 7) as usize;\n\n *y = ((*y as i32 + d / 15 - 7) & 7) as usize;\n\n s_bits[s_len-j*6-3..s_len-j*6-0].store_be(C[*x]);\n\n s_bits[s_len-j*6-6..s_len-j*6-3].store_be(C[*y]);\n\n }\n\n s.reverse();\n\n Ok::<_, Error>(s)\n\n }).collect::<Result<Vec<_>, Error>>()?;\n\n if !codec.is_complete() { bail!(\"extra bytes in codec\") }\n\n\n\n Ok(alpha_selectors)\n\n }\n", "file_path": "src/lib.rs", "rank": 39, "score": 19.001395563847392 }, { "content": "\n\n pub fn get_alpha_endpoints(&self, input: &[u8]) -> Result<Vec<(u8, u8)>, Error> {\n\n let mut codec = if let Some(data) = self.get_palette_data(self.alpha_endpoints, input) {\n\n codec::Codec::new(&data)\n\n } else { return Ok(vec![]) };\n\n let dm = codec.get_huffman().context(\"alpha_endpoints_dm1\")?;\n\n // println!(\"{:?}\", dm);\n\n let (mut a, mut b) = (0, 0);\n\n let color_endpoints = (0..self.alpha_endpoints.count).map(|_i| {\n\n let da = dm.next(&mut codec)?; a = (a as u32 + da) as u8;\n\n let db = dm.next(&mut codec)?; b = (b as u32 + db) as u8;\n\n Ok::<_, Error>((a, b))\n\n }).collect::<Result<Vec<_>, _>>()?;\n\n if !codec.is_complete() { bail!(\"extra bytes in codec\") }\n\n Ok(color_endpoints)\n\n }\n\n\n\n pub fn get_color_selectors(&self, input: &[u8]) -> Result<Vec<[u8; 4]>, Error> {\n\n let mut codec = if let Some(data) = self.get_palette_data(self.color_selectors, input) {\n\n codec::Codec::new(&data)\n", "file_path": "src/lib.rs", "rank": 40, "score": 18.908763945043106 }, { "content": " pub userdata: [u32; 2],\n\n\n\n pub color_endpoints: Palette,\n\n pub color_selectors: Palette,\n\n pub alpha_endpoints: Palette,\n\n pub alpha_selectors: Palette,\n\n\n\n pub table_size: u16,\n\n #[serde(with = \"be_u24\")]\n\n pub table_offset: u32,\n\n\n\n #[serde(skip)]\n\n pub level_offset: Vec<u32>,\n\n}\n\n\n\nimpl Header {\n\n fn serialize_option() -> impl bincode::Options {\n\n bincode::config::DefaultOptions::new()\n\n .allow_trailing_bytes()\n\n .with_fixint_encoding()\n", "file_path": "src/lib.rs", "rank": 41, "score": 17.588063497327926 }, { "content": " let alpha_endpoint_delta = codec.get_huffman().context(\"read alpha_endpoint table\")?;\n\n let alpha_endpoints = self.get_alpha_endpoints(input).context(\"decode alpha_endpoints\")?;\n\n Table::new(alpha_endpoint_delta, alpha_endpoints).into()\n\n } else { None };\n\n\n\n let alpha_selector = if self.alpha_selectors.count != 0 {\n\n let alpha_selector_delta = codec.get_huffman().context(\"read alpha_selector table\")?;\n\n let alpha_selectors = self.get_alpha_selectors(input).context(\"decode alpha_selectors\")?;\n\n Table::new(alpha_selector_delta, alpha_selectors).into()\n\n } else { None };\n\n\n\n if !codec.is_complete() { bail!(\"extra bytes in codec\") }\n\n Ok(Tables {\n\n chunk_encoding,\n\n color_endpoint, color_selector,\n\n alpha_endpoint, alpha_selector,\n\n })\n\n }\n\n\n\n pub fn get_color_endpoints(&self, input: &[u8]) -> Result<Vec<(u16, u16)>, Error> {\n", "file_path": "src/lib.rs", "rank": 42, "score": 17.19745186465385 }, { "content": "crnlib\n\n========\n\nThis is a port from [crunch/crnlib](https://github.com/BinomialLLC/crunch), the license could be found at the end of file.\n\n\n\nFeel free to open a issue about usage and/or features to make it better to use. Now (Jan 2021) it still works well for my specific usage.\n\n\n\nUsage\n\n========\n\n```rust\n\nuse std::io::prelude::*;\n\nlet sample = \"samples/test.crn\";\n\nlet mut file = std::fs::File::open(sample).expect(\"open sample crn file\");\n\nlet mut buffer = Vec::new();\n\n\n\nlet header = Header::parse(&buffer).expect(\"parse\");\n\n\n\nlet tables = header.get_table(&buffer).expect(\"read table\");\n\nlet level0 = header.unpack_level(&tables, &buffer, 0).expect(\"unpack\");\n\n\n\n// level0 contains DXT encoded image content, which could be read by image\n\nuse image::ImageDecoder;\n\nlet (width0, height0) = header.get_level_info(0).expect(\"get level info\");\n\nlet variant = match header.format {\n\n Format::Dxt1 => image::dxt::DXTVariant::DXT1,\n\n Format::Dxt3 => image::dxt::DXTVariant::DXT3,\n\n Format::Dxt5 => image::dxt::DXTVariant::DXT5,\n\n format => unimplemented!(\"image does not support format {:?}\", format),\n\n};\n\nlet decoder = image::dxt::DxtDecoder::new(std::io::Cursor::new(&level0), width0 as u32, height0 as u32, variant).expect(\"new image\");\n\nlet mut raw = vec![0; decoder.total_bytes() as usize];\n\nlet color_type = decoder.color_type();\n\ndecoder.read_image(&mut raw).expect(\"decode dxt\");\n\nlet f = std::fs::File::create(std::path::Path::new(sample).with_extension(\"tga\")).expect(\"create sample tga file\");\n\nlet encoder = image::tga::TgaEncoder::new(f);\n\nencoder.encode(&raw, width0 as u32, height0 as u32, color_type).expect(\"encode tga\");\n\n```\n\n\n", "file_path": "README.md", "rank": 43, "score": 16.876890452271727 }, { "content": " Some(&input[start..end])\n\n }\n\n\n\n pub fn get_table(&self, input: &[u8]) -> Result<Tables, Error> {\n\n let mut codec = codec::Codec::new(self.get_table_data(input));\n\n let chunk_encoding = codec.get_huffman().context(\"read chunk table\")?;\n\n\n\n let color_endpoint = if self.color_endpoints.count != 0 {\n\n let color_endpoint_delta = codec.get_huffman().context(\"read color_endpoint table\")?;\n\n let color_endpoints = self.get_color_endpoints(input).context(\"decode color_endpoints\")?;\n\n Table::new(color_endpoint_delta, color_endpoints).into()\n\n } else { None };\n\n\n\n let color_selector = if self.color_selectors.count != 0 {\n\n let color_selector_delta = codec.get_huffman().context(\"read color_selector table\")?;\n\n let color_selectors = self.get_color_selectors(input).context(\"decode color_selectors\")?;\n\n Table::new(color_selector_delta, color_selectors).into()\n\n } else { None };\n\n\n\n let alpha_endpoint = if self.alpha_endpoints.count != 0 {\n", "file_path": "src/lib.rs", "rank": 44, "score": 15.002879429485663 }, { "content": " let mut codec = if let Some(data) = self.get_palette_data(self.color_endpoints, input) {\n\n codec::Codec::new(&data)\n\n } else { return Ok(vec![]) };\n\n let dm1 = codec.get_huffman().context(\"color_endpoints_dm1\")?;\n\n let dm2 = codec.get_huffman().context(\"color_endpoints_dm2\")?;\n\n // println!(\"{:?} {:?}\", dm1, dm2);\n\n let (mut a, mut b, mut c) = (0, 0, 0);\n\n let (mut d, mut e, mut f) = (0, 0, 0);\n\n let color_endpoints = (0..self.color_endpoints.count).map(|_i| {\n\n let da = dm1.next(&mut codec)? as u16; a = (a + da) & 0x1f;\n\n let db = dm2.next(&mut codec)? as u16; b = (b + db) & 0x3f;\n\n let dc = dm1.next(&mut codec)? as u16; c = (c + dc) & 0x1f;\n\n let dd = dm1.next(&mut codec)? as u16; d = (d + dd) & 0x1f;\n\n let de = dm2.next(&mut codec)? as u16; e = (e + de) & 0x3f;\n\n let df = dm1.next(&mut codec)? as u16; f = (f + df) & 0x1f;\n\n Ok::<_, Error>((c | (b << 5) | (a << 11), f | (e << 5) | (d << 11)))\n\n }).collect::<Result<Vec<_>, _>>()?;\n\n if !codec.is_complete() { bail!(\"extra bytes in codec\") }\n\n Ok(color_endpoints)\n\n }\n", "file_path": "src/lib.rs", "rank": 45, "score": 14.568008626089835 }, { "content": " } else { return Ok(vec![]) };\n\n let dm = codec.get_huffman().context(\"color_selectors_dm\")?;\n\n // println!(\"{:?}\", dm);\n\n\n\n let mut x = [0; 8];\n\n let mut y = [0; 8];\n\n\n\n const C: [u8; 4] = [0, 2, 3, 1]; // DXT1\n\n\n\n let color_selectors = (0..self.color_selectors.count).map(|_i| {\n\n for (x, y) in &mut x.iter_mut().zip(&mut y) {\n\n let d = dm.next(&mut codec)? as i32;\n\n *x = ((*x as i32 + d % 7 - 3) & 3) as usize;\n\n *y = ((*y as i32 + d / 7 - 3) & 3) as usize;\n\n }\n\n\n\n let result = [\n\n C[x[0]] | (C[y[0]] << 2) | (C[x[1]] << 4) | (C[y[1]] << 6),\n\n C[x[2]] | (C[y[2]] << 2) | (C[x[3]] << 4) | (C[y[3]] << 6),\n\n C[x[4]] | (C[y[4]] << 2) | (C[x[5]] << 4) | (C[y[5]] << 6),\n", "file_path": "src/lib.rs", "rank": 46, "score": 14.258977695843539 }, { "content": " let (width0, height0) = header.get_level_info(0).expect(\"get level info\");\n\n assert_eq!((width0, height0), (header.width, header.height));\n\n let variant = match header.format {\n\n Format::Dxt1 => image::dxt::DXTVariant::DXT1,\n\n Format::Dxt3 => image::dxt::DXTVariant::DXT3,\n\n Format::Dxt5 => image::dxt::DXTVariant::DXT5,\n\n format => unimplemented!(\"image does not support format {:?}\", format),\n\n };\n\n let decoder = image::dxt::DxtDecoder::new(std::io::Cursor::new(&level0), width0 as u32, height0 as u32, variant).expect(\"new image\");\n\n let mut raw = vec![0; decoder.total_bytes() as usize];\n\n let color_type = decoder.color_type();\n\n decoder.read_image(&mut raw).expect(\"decode dxt\");\n\n let f = std::fs::File::create(std::path::Path::new(sample).with_extension(\"tga\")).expect(\"create sample tga file\");\n\n let encoder = image::tga::TgaEncoder::new(f);\n\n encoder.encode(&raw, width0 as u32, height0 as u32, color_type).expect(\"encode tga\");\n\n}\n", "file_path": "src/lib.rs", "rank": 47, "score": 14.162647593213897 }, { "content": " }\n\n\n\n pub fn crc16_poly(init: u16, poly: u16, input: &[u8]) -> u16 {\n\n input.iter().fold(!init, |v, &c| {\n\n (0..8).fold(v ^ c as u16, |v, _| {\n\n if v & 1 == 1 { (v >> 1) ^ poly} else { v >> 1 }\n\n })\n\n })\n\n }\n\n\n\n pub fn check_crc(&self, input: &[u8]) -> bool {\n\n self.header_size as usize == Header::fixed_size() + 4*self.level_count as usize &&\n\n self.file_size as usize == input.len() &&\n\n self.header_crc16 == !Self::crc16(0, &input[6..self.header_size as usize]) &&\n\n self.data_crc16 == !Self::crc16(0, &input[self.header_size as usize..])\n\n }\n\n\n\n pub fn block_size(&self) -> usize {\n\n match self.format {\n\n Format::Dxt1 | Format::Dxt5A => 8,\n", "file_path": "src/lib.rs", "rank": 48, "score": 13.818708723741516 }, { "content": " Self { delta, entries }\n\n }\n\n #[inline]\n\n fn truncate(idx: usize, max: usize) -> usize {\n\n if idx < max { idx } else { idx-max }\n\n }\n\n pub fn next(&self, codec: &mut codec::Codec, idx: &mut usize) -> Result<T, Error> {\n\n let delta = self.delta.next(codec)? as usize;\n\n *idx = Self::truncate(*idx + delta, self.entries.len());\n\n Ok(self.entries[*idx])\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/lib.rs", "rank": 49, "score": 13.502149609233369 }, { "content": " _ => 16,\n\n }\n\n }\n\n\n\n pub fn get_level_data<'a>(&self, input: &'a [u8], idx: usize) -> Option<&'a [u8]> {\n\n let start = *self.level_offset.get(idx)? as usize;\n\n let end = self.level_offset.get(idx+1).cloned().unwrap_or(self.file_size) as usize;\n\n Some(&input[start..end])\n\n }\n\n\n\n fn get_table_data<'a>(&self, input: &'a [u8]) -> &'a [u8] {\n\n let start = self.table_offset as usize;\n\n let end = start + self.table_size as usize;\n\n &input[start..end]\n\n }\n\n\n\n fn get_palette_data<'a>(&self, palette: Palette, input: &'a [u8]) -> Option<&'a [u8]> {\n\n if palette.count == 0 { return None }\n\n let start = palette.offset as usize;\n\n let end = start + palette.size as usize;\n", "file_path": "src/lib.rs", "rank": 50, "score": 9.015909375803554 }, { "content": "Document of Table\n\n========\n\n* Any table contains 2 huffman tree\n\n * 14 bit of max_symbol_count of second tree\n\n * \"symbol_count\" of first temporary tree, the symbol was reordered as `[ShortZero, LongZero, ShortRepeat, LongRepeat, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15, 16]`.\n\n * temporary one has Key `0..=16` as well as `ShortZero, LongZero, ShortRepeat, LongRepeat` which come with a parameter with `3, 7, 2, 6` bits.\n\n * the depth array (of length max_symbol_count) of second one is encoded using first one.\n\n* A huffman tree is stored using symbol_depth.\n\n * symbol_depth means length of code in bits.\n\n * the Code is assign to Key from small depth to large, then follow the ord of Key.\n\n\n\nLicense of Crunch\n\n========\n\n```\n\ncrunch/crnlib uses a modified ZLIB license. Specifically, it's the same as zlib except that\n\npublic credits for using the library are *required*.\n\n\n\nCopyright (c) 2010-2016 Richard Geldreich, Jr. All rights reserved.\n\n\n\nThis software is provided 'as-is', without any express or implied\n\nwarranty. In no event will the authors be held liable for any damages\n\narising from the use of this software.\n\n\n\nPermission is granted to anyone to use this software for any purpose,\n\nincluding commercial applications, and to alter it and redistribute it\n\nfreely, subject to the following restrictions:\n\n\n\n1. The origin of this software must not be misrepresented; you must not\n\nclaim that you wrote the original software.\n\n\n\n2. If you use this software in a product, this acknowledgment in the product\n\ndocumentation or credits is required:\n\n\n\n\"Crunch Library Copyright (c) 2010-2016 Richard Geldreich, Jr.\"\n\n\n\n3. Altered source versions must be plainly marked as such, and must not be\n\nmisrepresented as being the original software.\n\n\n\n4. This notice may not be removed or altered from any source distribution.\n\n```\n", "file_path": "README.md", "rank": 51, "score": 7.596493577201654 } ]
Rust
src/rngs/entropy.rs
robsmith11/rand
1e9554d79b915860894bfba3aaff8c3f1c8b2159
use rand_core::{RngCore, CryptoRng, Error, ErrorKind, impls}; #[allow(unused)] use rngs; #[derive(Debug)] pub struct EntropyRng { source: Source, } #[derive(Debug)] enum Source { Os(Os), Custom(Custom), Jitter(Jitter), None, } impl EntropyRng { pub fn new() -> Self { EntropyRng { source: Source::None } } } impl Default for EntropyRng { fn default() -> Self { EntropyRng::new() } } impl RngCore for EntropyRng { fn next_u32(&mut self) -> u32 { impls::next_u32_via_fill(self) } fn next_u64(&mut self) -> u64 { impls::next_u64_via_fill(self) } fn fill_bytes(&mut self, dest: &mut [u8]) { self.try_fill_bytes(dest).unwrap_or_else(|err| panic!("all entropy sources failed; first error: {}", err)) } fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { let mut reported_error = None; if let Source::Os(ref mut os_rng) = self.source { match os_rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: OsRng failed \ [trying other entropy sources]: {}", err); reported_error = Some(err); }, } } else if Os::is_supported() { match Os::new_and_fill(dest) { Ok(os_rng) => { debug!("EntropyRng: using OsRng"); self.source = Source::Os(os_rng); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Source::Custom(ref mut rng) = self.source { match rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: custom entropy source failed \ [trying other entropy sources]: {}", err); reported_error = Some(err); }, } } else if Custom::is_supported() { match Custom::new_and_fill(dest) { Ok(custom) => { debug!("EntropyRng: using custom entropy source"); self.source = Source::Custom(custom); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Source::Jitter(ref mut jitter_rng) = self.source { match jitter_rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: JitterRng failed: {}", err); reported_error = Some(err); }, } } else if Jitter::is_supported() { match Jitter::new_and_fill(dest) { Ok(jitter_rng) => { debug!("EntropyRng: using JitterRng"); self.source = Source::Jitter(jitter_rng); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Some(err) = reported_error { Err(Error::with_cause(ErrorKind::Unavailable, "All entropy sources failed", err)) } else { Err(Error::new(ErrorKind::Unavailable, "No entropy sources available")) } } } impl CryptoRng for EntropyRng {} trait EntropySource { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> where Self: Sized; fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error>; fn is_supported() -> bool { true } } #[allow(unused)] #[derive(Clone, Debug)] struct NoSource; #[allow(unused)] impl EntropySource for NoSource { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { Err(Error::new(ErrorKind::Unavailable, "Source not supported")) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { unreachable!() } fn is_supported() -> bool { false } } #[cfg(feature="rand_os")] #[derive(Clone, Debug)] pub struct Os(rngs::OsRng); #[cfg(feature="rand_os")] impl EntropySource for Os { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { let mut rng = rngs::OsRng::new()?; rng.try_fill_bytes(dest)?; Ok(Os(rng)) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } #[cfg(not(feature="std"))] type Os = NoSource; type Custom = NoSource; #[cfg(not(target_arch = "wasm32"))] #[derive(Clone, Debug)] pub struct Jitter(rngs::JitterRng); #[cfg(not(target_arch = "wasm32"))] impl EntropySource for Jitter { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { let mut rng = rngs::JitterRng::new()?; rng.try_fill_bytes(dest)?; Ok(Jitter(rng)) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } #[cfg(target_arch = "wasm32")] type Jitter = NoSource; #[cfg(test)] mod test { use super::*; #[test] fn test_entropy() { let mut rng = EntropyRng::new(); let n = (rng.next_u32() ^ rng.next_u32()).count_ones(); assert!(n >= 2); } }
use rand_core::{RngCore, CryptoRng, Error, ErrorKind, impls}; #[allow(unused)] use rngs; #[derive(Debug)] pub struct EntropyRng { source: Source, } #[derive(Debug)] enum Source { Os(Os), Custom(Custom), Jitter(Jitter), None, } impl EntropyRng { pub fn new() -> Self { EntropyRng { source: Source::None } } } impl Default for EntropyRng { fn default() -> Self { EntropyRng::new() } } impl RngCore for EntropyRng { fn next_u32(&mut self) -> u32 { impls::next_u32_via_fill(self) } fn next_u64(&mut self) -> u64 { impls::next_u64_via_fill(self) } fn fill_bytes(&mut
Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: JitterRng failed: {}", err); reported_error = Some(err); }, } } else if Jitter::is_supported() { match Jitter::new_and_fill(dest) { Ok(jitter_rng) => { debug!("EntropyRng: using JitterRng"); self.source = Source::Jitter(jitter_rng); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Some(err) = reported_error { Err(Error::with_cause(ErrorKind::Unavailable, "All entropy sources failed", err)) } else { Err(Error::new(ErrorKind::Unavailable, "No entropy sources available")) } } } impl CryptoRng for EntropyRng {} trait EntropySource { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> where Self: Sized; fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error>; fn is_supported() -> bool { true } } #[allow(unused)] #[derive(Clone, Debug)] struct NoSource; #[allow(unused)] impl EntropySource for NoSource { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { Err(Error::new(ErrorKind::Unavailable, "Source not supported")) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { unreachable!() } fn is_supported() -> bool { false } } #[cfg(feature="rand_os")] #[derive(Clone, Debug)] pub struct Os(rngs::OsRng); #[cfg(feature="rand_os")] impl EntropySource for Os { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { let mut rng = rngs::OsRng::new()?; rng.try_fill_bytes(dest)?; Ok(Os(rng)) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } #[cfg(not(feature="std"))] type Os = NoSource; type Custom = NoSource; #[cfg(not(target_arch = "wasm32"))] #[derive(Clone, Debug)] pub struct Jitter(rngs::JitterRng); #[cfg(not(target_arch = "wasm32"))] impl EntropySource for Jitter { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { let mut rng = rngs::JitterRng::new()?; rng.try_fill_bytes(dest)?; Ok(Jitter(rng)) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } #[cfg(target_arch = "wasm32")] type Jitter = NoSource; #[cfg(test)] mod test { use super::*; #[test] fn test_entropy() { let mut rng = EntropyRng::new(); let n = (rng.next_u32() ^ rng.next_u32()).count_ones(); assert!(n >= 2); } }
self, dest: &mut [u8]) { self.try_fill_bytes(dest).unwrap_or_else(|err| panic!("all entropy sources failed; first error: {}", err)) } fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { let mut reported_error = None; if let Source::Os(ref mut os_rng) = self.source { match os_rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: OsRng failed \ [trying other entropy sources]: {}", err); reported_error = Some(err); }, } } else if Os::is_supported() { match Os::new_and_fill(dest) { Ok(os_rng) => { debug!("EntropyRng: using OsRng"); self.source = Source::Os(os_rng); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Source::Custom(ref mut rng) = self.source { match rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: custom entropy source failed \ [trying other entropy sources]: {}", err); reported_error = Some(err); }, } } else if Custom::is_supported() { match Custom::new_and_fill(dest) { Ok(custom) => { debug!("EntropyRng: using custom entropy source"); self.source = Source::Custom(custom); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Source::Jitter(ref mut jitter_rng) = self.source { match jitter_rng.fill(dest) {
random
[ { "content": "/// Implement `next_u64` via `next_u32`, little-endian order.\n\npub fn next_u64_via_u32<R: RngCore + ?Sized>(rng: &mut R) -> u64 {\n\n // Use LE; we explicitly generate one value before the next.\n\n let x = u64::from(rng.next_u32());\n\n let y = u64::from(rng.next_u32());\n\n (y << 32) | x\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 0, "score": 277686.0770934721 }, { "content": "/// Implement `next_u64` via `fill_bytes`, little-endian order.\n\npub fn next_u64_via_fill<R: RngCore + ?Sized>(rng: &mut R) -> u64 {\n\n impl_uint_from_fill!(rng, u64, 8)\n\n}\n\n\n\n// TODO: implement tests for the above\n", "file_path": "rand_core/src/impls.rs", "rank": 1, "score": 247144.89470452548 }, { "content": "/// Implement `next_u32` via `fill_bytes`, little-endian order.\n\npub fn next_u32_via_fill<R: RngCore + ?Sized>(rng: &mut R) -> u32 {\n\n impl_uint_from_fill!(rng, u32, 4)\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 2, "score": 246980.78232232688 }, { "content": "// Returns the door the game host opens given our choice and knowledge of\n\n// where the car is. The game host will never open the door with the car.\n\nfn game_host_open<R: Rng>(car: u32, choice: u32, rng: &mut R) -> u32 {\n\n use rand::seq::SliceRandom;\n\n *free_doors(&[car, choice]).choose(rng).unwrap()\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 3, "score": 180214.4224328163 }, { "content": "/// Implement `fill_bytes` via `next_u64` and `next_u32`, little-endian order.\n\n///\n\n/// The fastest way to fill a slice is usually to work as long as possible with\n\n/// integers. That is why this method mostly uses `next_u64`, and only when\n\n/// there are 4 or less bytes remaining at the end of the slice it uses\n\n/// `next_u32` once.\n\npub fn fill_bytes_via_next<R: RngCore + ?Sized>(rng: &mut R, dest: &mut [u8]) {\n\n let mut left = dest;\n\n while left.len() >= 8 {\n\n let (l, r) = {left}.split_at_mut(8);\n\n left = r;\n\n let chunk: [u8; 8] = unsafe {\n\n transmute(rng.next_u64().to_le())\n\n };\n\n l.copy_from_slice(&chunk);\n\n }\n\n let n = left.len();\n\n if n > 4 {\n\n let chunk: [u8; 8] = unsafe {\n\n transmute(rng.next_u64().to_le())\n\n };\n\n left.copy_from_slice(&chunk[..n]);\n\n } else if n > 0 {\n\n let chunk: [u8; 4] = unsafe {\n\n transmute(rng.next_u32().to_le())\n\n };\n", "file_path": "rand_core/src/impls.rs", "rank": 4, "score": 178559.5046208826 }, { "content": "/// Implement `fill_bytes` by reading chunks from the output buffer of a block\n\n/// based RNG.\n\n///\n\n/// The return values are `(consumed_u64, filled_u8)`.\n\n/// `filled_u8` is the number of filled bytes in `dest`, which may be less than\n\n/// the length of `dest`.\n\n/// `consumed_u64` is the number of words consumed from `src`, which is the same\n\n/// as `filled_u8 / 8` rounded up.\n\n///\n\n/// See `fill_via_u32_chunks` for an example.\n\npub fn fill_via_u64_chunks(src: &[u64], dest: &mut [u8]) -> (usize, usize) {\n\n fill_via_chunks!(src, dest, u64, 8)\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 5, "score": 177903.27702381683 }, { "content": "/// Implement `fill_bytes` by reading chunks from the output buffer of a block\n\n/// based RNG.\n\n///\n\n/// The return values are `(consumed_u32, filled_u8)`.\n\n///\n\n/// `filled_u8` is the number of filled bytes in `dest`, which may be less than\n\n/// the length of `dest`.\n\n/// `consumed_u32` is the number of words consumed from `src`, which is the same\n\n/// as `filled_u8 / 4` rounded up.\n\n///\n\n/// # Example\n\n/// (from `IsaacRng`)\n\n///\n\n/// ```ignore\n\n/// fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n/// let mut read_len = 0;\n\n/// while read_len < dest.len() {\n\n/// if self.index >= self.rsl.len() {\n\n/// self.isaac();\n\n/// }\n\n///\n\n/// let (consumed_u32, filled_u8) =\n\n/// impls::fill_via_u32_chunks(&mut self.rsl[self.index..],\n\n/// &mut dest[read_len..]);\n\n///\n\n/// self.index += consumed_u32;\n\n/// read_len += filled_u8;\n\n/// }\n\n/// }\n\n/// ```\n\npub fn fill_via_u32_chunks(src: &[u32], dest: &mut [u8]) -> (usize, usize) {\n\n fill_via_chunks!(src, dest, u32, 4)\n\n}\n\n\n", "file_path": "rand_core/src/impls.rs", "rank": 6, "score": 177747.4631505003 }, { "content": "pub fn map_err(err: io::Error) -> Error {\n\n match err.kind() {\n\n io::ErrorKind::Interrupted =>\n\n Error::new(ErrorKind::Transient, \"interrupted\"),\n\n io::ErrorKind::WouldBlock =>\n\n Error::with_cause(ErrorKind::NotReady,\n\n \"OS RNG not yet seeded\", err),\n\n _ => Error::with_cause(ErrorKind::Unavailable,\n\n \"error while opening random device\", err)\n\n }\n\n}\n", "file_path": "rand_os/src/random_device.rs", "rank": 7, "score": 172923.7032473347 }, { "content": "/// Retrieve the lazily-initialized thread-local random number generator,\n\n/// seeded by the system. Intended to be used in method chaining style,\n\n/// e.g. `thread_rng().gen::<i32>()`, or cached locally, e.g.\n\n/// `let mut rng = thread_rng();`. Invoked by the `Default` trait, making\n\n/// `ThreadRng::default()` equivelent.\n\n///\n\n/// For more information see [`ThreadRng`].\n\n///\n\n/// [`ThreadRng`]: rngs/struct.ThreadRng.html\n\npub fn thread_rng() -> ThreadRng {\n\n ThreadRng { rng: THREAD_RNG_KEY.with(|t| t.get()) }\n\n}\n\n\n\nimpl Default for ThreadRng {\n\n fn default() -> ThreadRng {\n\n ::prelude::thread_rng()\n\n }\n\n}\n\n\n\nimpl RngCore for ThreadRng {\n\n #[inline(always)]\n\n fn next_u32(&mut self) -> u32 {\n\n unsafe { (*self.rng).next_u32() }\n\n }\n\n\n\n #[inline(always)]\n\n fn next_u64(&mut self) -> u64 {\n\n unsafe { (*self.rng).next_u64() }\n\n }\n", "file_path": "src/rngs/thread.rs", "rank": 8, "score": 166163.445116088 }, { "content": "#[inline]\n\npub fn read_u64_into(src: &[u8], dst: &mut [u64]) {\n\n read_slice!(src, dst, 8, to_le);\n\n}\n\n\n", "file_path": "rand_core/src/le.rs", "rank": 9, "score": 161395.34576456586 }, { "content": "// Run a single simulation of the Monty Hall problem.\n\nfn simulate<R: Rng>(random_door: &Uniform<u32>, rng: &mut R)\n\n -> SimulationResult {\n\n let car = random_door.sample(rng);\n\n\n\n // This is our initial choice\n\n let mut choice = random_door.sample(rng);\n\n\n\n // The game host opens a door\n\n let open = game_host_open(car, choice, rng);\n\n\n\n // Shall we switch?\n\n let switch = rng.gen();\n\n if switch {\n\n choice = switch_door(choice, open);\n\n }\n\n\n\n SimulationResult { win: choice == car, switch }\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 10, "score": 161376.93073890248 }, { "content": "#[inline]\n\npub fn read_u32_into(src: &[u8], dst: &mut [u32]) {\n\n read_slice!(src, dst, 4, to_le);\n\n}\n\n\n\n/// Reads unsigned 64 bit integers from `src` into `dst`.\n\n/// Borrowed from the `byteorder` crate.\n", "file_path": "rand_core/src/le.rs", "rank": 11, "score": 161210.70049577756 }, { "content": "/// An automatically-implemented extension trait on [`RngCore`] providing high-level\n\n/// generic methods for sampling values and other convenience methods.\n\n///\n\n/// This is the primary trait to use when generating random values.\n\n///\n\n/// # Generic usage\n\n///\n\n/// The basic pattern is `fn foo<R: Rng + ?Sized>(rng: &mut R)`. Some\n\n/// things are worth noting here:\n\n///\n\n/// - Since `Rng: RngCore` and every `RngCore` implements `Rng`, it makes no\n\n/// difference whether we use `R: Rng` or `R: RngCore`.\n\n/// - The `+ ?Sized` un-bounding allows functions to be called directly on\n\n/// type-erased references; i.e. `foo(r)` where `r: &mut RngCore`. Without\n\n/// this it would be necessary to write `foo(&mut r)`.\n\n///\n\n/// An alternative pattern is possible: `fn foo<R: Rng>(rng: R)`. This has some\n\n/// trade-offs. It allows the argument to be consumed directly without a `&mut`\n\n/// (which is how `from_rng(thread_rng())` works); also it still works directly\n\n/// on references (including type-erased references). Unfortunately within the\n\n/// function `foo` it is not known whether `rng` is a reference type or not,\n\n/// hence many uses of `rng` require an extra reference, either explicitly\n\n/// (`distr.sample(&mut rng)`) or implicitly (`rng.gen()`); one may hope the\n\n/// optimiser can remove redundant references later.\n\n///\n\n/// Example:\n\n///\n\n/// ```\n\n/// # use rand::thread_rng;\n\n/// use rand::Rng;\n\n///\n\n/// fn foo<R: Rng + ?Sized>(rng: &mut R) -> f32 {\n\n/// rng.gen()\n\n/// }\n\n///\n\n/// # let v = foo(&mut thread_rng());\n\n/// ```\n\n///\n\n/// [`RngCore`]: trait.RngCore.html\n\npub trait Rng: RngCore {\n\n /// Return a random value supporting the [`Standard`] distribution.\n\n ///\n\n /// [`Standard`]: distributions/struct.Standard.html\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use rand::{thread_rng, Rng};\n\n ///\n\n /// let mut rng = thread_rng();\n\n /// let x: u32 = rng.gen();\n\n /// println!(\"{}\", x);\n\n /// println!(\"{:?}\", rng.gen::<(f64, bool)>());\n\n /// ```\n\n #[inline]\n\n fn gen<T>(&mut self) -> T where Standard: Distribution<T> {\n\n Standard.sample(self)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 12, "score": 159600.0604091896 }, { "content": "#[cfg(feature=\"std\")]\n\n#[inline(always)]\n\npub fn ziggurat<R: Rng + ?Sized, P, Z>(\n\n rng: &mut R,\n\n symmetric: bool,\n\n x_tab: ziggurat_tables::ZigTable,\n\n f_tab: ziggurat_tables::ZigTable,\n\n mut pdf: P,\n\n mut zero_case: Z)\n\n -> f64 where P: FnMut(f64) -> f64, Z: FnMut(&mut R, f64) -> f64 {\n\n use distributions::float::IntoFloat;\n\n loop {\n\n // As an optimisation we re-implement the conversion to a f64.\n\n // From the remaining 12 most significant bits we use 8 to construct `i`.\n\n // This saves us generating a whole extra random number, while the added\n\n // precision of using 64 bits for f64 does not buy us much.\n\n let bits = rng.next_u64();\n\n let i = bits as usize & 0xff;\n\n\n\n let u = if symmetric {\n\n // Convert to a value in the range [2,4) and substract to get [-1,1)\n\n // We can't convert to an open range directly, that would require\n", "file_path": "src/distributions/utils.rs", "rank": 13, "score": 154801.14700929658 }, { "content": "/// Randomly sample exactly `amount` indices from `0..length`, using an inplace\n\n/// partial Fisher-Yates method.\n\n/// Sample an amount of indices using an inplace partial fisher yates method.\n\n///\n\n/// This allocates the entire `length` of indices and randomizes only the first `amount`.\n\n/// It then truncates to `amount` and returns.\n\n///\n\n/// This method is not appropriate for large `length` and potentially uses a lot\n\n/// of memory; because of this we only implement for `u32` index (which improves\n\n/// performance in all cases).\n\n///\n\n/// Set-up is `O(length)` time and memory and shuffling is `O(amount)` time.\n\nfn sample_inplace<R>(rng: &mut R, length: u32, amount: u32) -> IndexVec\n\n where R: Rng + ?Sized,\n\n{\n\n debug_assert!(amount <= length);\n\n let mut indices: Vec<u32> = Vec::with_capacity(length as usize);\n\n indices.extend(0..length);\n\n for i in 0..amount {\n\n let j: u32 = rng.gen_range(i, length);\n\n indices.swap(i as usize, j as usize);\n\n }\n\n indices.truncate(amount as usize);\n\n debug_assert_eq!(indices.len(), amount as usize);\n\n IndexVec::from(indices)\n\n}\n\n\n", "file_path": "src/seq/index.rs", "rank": 14, "score": 154457.57691324252 }, { "content": "/// Randomly sample exactly `amount` indices from `0..length`, using Floyd's\n\n/// combination algorithm.\n\n///\n\n/// The output values are fully shuffled. (Overhead is under 50%.)\n\n///\n\n/// This implementation uses `O(amount)` memory and `O(amount^2)` time.\n\nfn sample_floyd<R>(rng: &mut R, length: u32, amount: u32) -> IndexVec\n\n where R: Rng + ?Sized,\n\n{\n\n // For small amount we use Floyd's fully-shuffled variant. For larger\n\n // amounts this is slow due to Vec::insert performance, so we shuffle\n\n // afterwards. Benchmarks show little overhead from extra logic.\n\n let floyd_shuffle = amount < 50;\n\n\n\n debug_assert!(amount <= length);\n\n let mut indices = Vec::with_capacity(amount as usize);\n\n for j in length - amount .. length {\n\n let t = rng.gen_range(0, j + 1);\n\n if floyd_shuffle {\n\n if let Some(pos) = indices.iter().position(|&x| x == t) {\n\n indices.insert(pos, j);\n\n continue;\n\n }\n\n } else {\n\n if indices.contains(&t) {\n\n indices.push(j);\n", "file_path": "src/seq/index.rs", "rank": 15, "score": 154454.00395566394 }, { "content": "pub fn read(dest: &mut [u8]) -> Result<(), Error> {\n\n // We expect this function only to be used after `random_device::open`\n\n // was succesful. Therefore we can assume that our memory was set with a\n\n // valid object.\n\n let mutex = unsafe { READ_RNG_FILE.as_ref().unwrap() };\n\n let mut guard = mutex.lock().unwrap();\n\n let file = (*guard).as_mut().unwrap();\n\n\n\n // Use `std::io::read_exact`, which retries on `ErrorKind::Interrupted`.\n\n file.read_exact(dest).map_err(|err| {\n\n Error::with_cause(ErrorKind::Unavailable,\n\n \"error reading random device\", err)\n\n })\n\n\n\n}\n\n\n", "file_path": "rand_os/src/random_device.rs", "rank": 16, "score": 149802.68291277005 }, { "content": "#[allow(unused)]\n\npub fn open<F>(path: &'static str, open_fn: F) -> Result<(), Error>\n\n where F: Fn(&'static str) -> Result<File, io::Error>\n\n{\n\n READ_RNG_ONCE.call_once(|| {\n\n unsafe { READ_RNG_FILE = Some(Mutex::new(None)) }\n\n });\n\n\n\n // We try opening the file outside the `call_once` fn because we cannot\n\n // clone the error, thus we must retry on failure.\n\n\n\n let mutex = unsafe { READ_RNG_FILE.as_ref().unwrap() };\n\n let mut guard = mutex.lock().unwrap();\n\n if (*guard).is_none() {\n\n info!(\"OsRng: opening random device {}\", path);\n\n let file = open_fn(path).map_err(map_err)?;\n\n *guard = Some(file);\n\n };\n\n Ok(())\n\n}\n\n\n", "file_path": "rand_os/src/random_device.rs", "rank": 17, "score": 144170.02578671346 }, { "content": "// Returns the door we switch to, given our current choice and\n\n// the open door. There will only be one valid door.\n\nfn switch_door(choice: u32, open: u32) -> u32 {\n\n free_doors(&[choice, open])[0]\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 18, "score": 134667.08612494095 }, { "content": "trait OsRngImpl where Self: Sized {\n\n // Create a new `OsRng` platform interface.\n\n fn new() -> Result<Self, Error>;\n\n\n\n // Fill a chunk with random bytes.\n\n fn fill_chunk(&mut self, dest: &mut [u8]) -> Result<(), Error>;\n\n\n\n // Test whether the OS RNG is initialized. This method may not be possible\n\n // to support cheaply (or at all) on all operating systems.\n\n //\n\n // If `blocking` is set, this will cause the OS the block execution until\n\n // its RNG is initialized.\n\n //\n\n // Random values that are read while this are stored in `dest`, the amount\n\n // of read bytes is returned.\n\n fn test_initialized(&mut self, _dest: &mut [u8], _blocking: bool)\n\n -> Result<usize, Error> { Ok(0) }\n\n\n\n // Maximum chunk size supported.\n\n fn max_chunk_size(&self) -> usize { ::std::usize::MAX }\n", "file_path": "rand_os/src/lib.rs", "rank": 19, "score": 133078.25187452248 }, { "content": "/// Randomly sample exactly `amount` distinct indices from `0..length`, and\n\n/// return them in random order (fully shuffled).\n\n///\n\n/// This method is used internally by the slice sampling methods, but it can\n\n/// sometimes be useful to have the indices themselves so this is provided as\n\n/// an alternative.\n\n///\n\n/// The implementation used is not specified; we automatically select the\n\n/// fastest available algorithm for the `length` and `amount` parameters\n\n/// (based on detailed profiling on an Intel Haswell CPU). Roughly speaking,\n\n/// complexity is `O(amount)`, except that when `amount` is small, performance\n\n/// is closer to `O(amount^2)`, and when `length` is close to `amount` then\n\n/// `O(length)`.\n\n///\n\n/// Note that performance is significantly better over `u32` indices than over\n\n/// `u64` indices. Because of this we hide the underlying type behind an\n\n/// abstraction, `IndexVec`.\n\n/// \n\n/// If an allocation-free `no_std` function is required, it is suggested\n\n/// to adapt the internal `sample_floyd` implementation.\n\n///\n\n/// Panics if `amount > length`.\n\npub fn sample<R>(rng: &mut R, length: usize, amount: usize) -> IndexVec\n\n where R: Rng + ?Sized,\n\n{\n\n if amount > length {\n\n panic!(\"`amount` of samples must be less than or equal to `length`\");\n\n }\n\n if length > (::core::u32::MAX as usize) {\n\n // We never want to use inplace here, but could use floyd's alg\n\n // Lazy version: always use the cache alg.\n\n return sample_rejection(rng, length, amount);\n\n }\n\n let amount = amount as u32;\n\n let length = length as u32;\n\n\n\n // Choice of algorithm here depends on both length and amount. See:\n\n // https://github.com/rust-random/rand/pull/479\n\n // We do some calculations with f32. Accuracy is not very important.\n\n\n\n if amount < 163 {\n\n const C: [[f32; 2]; 2] = [[1.6, 8.0/45.0], [10.0, 70.0/9.0]];\n", "file_path": "src/seq/index.rs", "rank": 20, "score": 132241.11883957265 }, { "content": "fn free_doors(blocked: &[u32]) -> Vec<u32> {\n\n (0..3).filter(|x| !blocked.contains(x)).collect()\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 21, "score": 128953.74988392837 }, { "content": "/// A marker trait used to indicate that an [`RngCore`] or [`BlockRngCore`]\n\n/// implementation is supposed to be cryptographically secure.\n\n/// \n\n/// *Cryptographically secure generators*, also known as *CSPRNGs*, should\n\n/// satisfy an additional properties over other generators: given the first\n\n/// *k* bits of an algorithm's output\n\n/// sequence, it should not be possible using polynomial-time algorithms to\n\n/// predict the next bit with probability significantly greater than 50%.\n\n/// \n\n/// Some generators may satisfy an additional property, however this is not\n\n/// required by this trait: if the CSPRNG's state is revealed, it should not be\n\n/// computationally-feasible to reconstruct output prior to this. Some other\n\n/// generators allow backwards-computation and are consided *reversible*.\n\n/// \n\n/// Note that this trait is provided for guidance only and cannot guarantee\n\n/// suitability for cryptographic applications. In general it should only be\n\n/// implemented for well-reviewed code implementing well-regarded algorithms.\n\n/// \n\n/// Note also that use of a `CryptoRng` does not protect against other\n\n/// weaknesses such as seeding from a weak entropy source or leaking state.\n\n/// \n\n/// [`RngCore`]: trait.RngCore.html\n\n/// [`BlockRngCore`]: ../rand_core/block/trait.BlockRngCore.html\n\npub trait CryptoRng {}\n\n\n", "file_path": "rand_core/src/lib.rs", "rank": 22, "score": 128689.19142497881 }, { "content": "/// The core of a random number generator.\n\n/// \n\n/// This trait encapsulates the low-level functionality common to all\n\n/// generators, and is the \"back end\", to be implemented by generators.\n\n/// End users should normally use [`Rng`] from the [rand] crate, which is\n\n/// automatically implemented for every type implementing `RngCore`.\n\n/// \n\n/// Three different methods for generating random data are provided since the\n\n/// optimal implementation of each is dependent on the type of generator. There\n\n/// is no required relationship between the output of each; e.g. many\n\n/// implementations of [`fill_bytes`] consume a whole number of `u32` or `u64`\n\n/// values and drop any remaining unused bytes.\n\n/// \n\n/// The [`try_fill_bytes`] method is a variant of [`fill_bytes`] allowing error\n\n/// handling; it is not deemed sufficiently useful to add equivalents for\n\n/// [`next_u32`] or [`next_u64`] since the latter methods are almost always used\n\n/// with algorithmic generators (PRNGs), which are normally infallible.\n\n/// \n\n/// Algorithmic generators implementing [`SeedableRng`] should normally have\n\n/// *portable, reproducible* output, i.e. fix Endianness when converting values\n\n/// to avoid platform differences, and avoid making any changes which affect\n\n/// output (except by communicating that the release has breaking changes).\n\n/// \n\n/// Typically implementators will implement only one of the methods available\n\n/// in this trait directly, then use the helper functions from the\n\n/// [`rand_core::impls`] module to implement the other methods.\n\n/// \n\n/// It is recommended that implementations also implement:\n\n/// \n\n/// - `Debug` with a custom implementation which *does not* print any internal\n\n/// state (at least, [`CryptoRng`]s should not risk leaking state through\n\n/// `Debug`).\n\n/// - `Serialize` and `Deserialize` (from Serde), preferably making Serde\n\n/// support optional at the crate level in PRNG libs.\n\n/// - `Clone`, if possible.\n\n/// - *never* implement `Copy` (accidental copies may cause repeated values).\n\n/// - *do not* implement `Default` for pseudorandom generators, but instead\n\n/// implement [`SeedableRng`], to guide users towards proper seeding.\n\n/// External / hardware RNGs can choose to implement `Default`.\n\n/// - `Eq` and `PartialEq` could be implemented, but are probably not useful.\n\n/// \n\n/// # Example\n\n/// \n\n/// A simple example, obviously not generating very *random* output:\n\n/// \n\n/// ```\n\n/// #![allow(dead_code)]\n\n/// use rand_core::{RngCore, Error, impls};\n\n/// \n\n/// struct CountingRng(u64);\n\n/// \n\n/// impl RngCore for CountingRng {\n\n/// fn next_u32(&mut self) -> u32 {\n\n/// self.next_u64() as u32\n\n/// }\n\n/// \n\n/// fn next_u64(&mut self) -> u64 {\n\n/// self.0 += 1;\n\n/// self.0\n\n/// }\n\n/// \n\n/// fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n/// impls::fill_bytes_via_next(self, dest)\n\n/// }\n\n/// \n\n/// fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n\n/// Ok(self.fill_bytes(dest))\n\n/// }\n\n/// }\n\n/// ```\n\n/// \n\n/// [rand]: https://crates.io/crates/rand\n\n/// [`Rng`]: ../rand/trait.Rng.html\n\n/// [`SeedableRng`]: trait.SeedableRng.html\n\n/// [`rand_core::impls`]: ../rand_core/impls/index.html\n\n/// [`try_fill_bytes`]: trait.RngCore.html#tymethod.try_fill_bytes\n\n/// [`fill_bytes`]: trait.RngCore.html#tymethod.fill_bytes\n\n/// [`next_u32`]: trait.RngCore.html#tymethod.next_u32\n\n/// [`next_u64`]: trait.RngCore.html#tymethod.next_u64\n\n/// [`CryptoRng`]: trait.CryptoRng.html\n\npub trait RngCore {\n\n /// Return the next random `u32`.\n\n ///\n\n /// RNGs must implement at least one method from this trait directly. In\n\n /// the case this method is not implemented directly, it can be implemented\n\n /// using `self.next_u64() as u32` or\n\n /// [via `fill_bytes`](../rand_core/impls/fn.next_u32_via_fill.html).\n\n fn next_u32(&mut self) -> u32;\n\n\n\n /// Return the next random `u64`.\n\n ///\n\n /// RNGs must implement at least one method from this trait directly. In\n\n /// the case this method is not implemented directly, it can be implemented\n\n /// [via `next_u32`](../rand_core/impls/fn.next_u64_via_u32.html) or\n\n /// [via `fill_bytes`](../rand_core/impls/fn.next_u64_via_fill.html).\n\n fn next_u64(&mut self) -> u64;\n\n\n\n /// Fill `dest` with random data.\n\n ///\n\n /// RNGs must implement at least one method from this trait directly. In\n", "file_path": "rand_core/src/lib.rs", "rank": 23, "score": 128484.42454452056 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[deprecated(since=\"0.6.0\", note=\"use SliceRandom::choose_multiple instead\")]\n\npub fn sample_slice<R, T>(rng: &mut R, slice: &[T], amount: usize) -> Vec<T>\n\n where R: Rng + ?Sized,\n\n T: Clone\n\n{\n\n let indices = index::sample(rng, slice.len(), amount).into_iter();\n\n\n\n let mut out = Vec::with_capacity(amount);\n\n out.extend(indices.map(|i| slice[i].clone()));\n\n out\n\n}\n\n\n\n/// Randomly sample exactly `amount` references from `slice`.\n\n///\n\n/// The references are non-repeating and in random order.\n\n///\n\n/// This implementation uses `O(amount)` time and memory.\n\n///\n\n/// Panics if `amount > slice.len()`\n\n///\n\n/// Deprecated: use [`SliceRandom::choose_multiple`] instead.\n\n/// \n\n/// [`SliceRandom::choose_multiple`]: trait.SliceRandom.html#method.choose_multiple\n", "file_path": "src/seq/mod.rs", "rank": 24, "score": 124433.22059753918 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[deprecated(since=\"0.6.0\", note=\"use SliceRandom::choose_multiple instead\")]\n\npub fn sample_slice_ref<'a, R, T>(rng: &mut R, slice: &'a [T], amount: usize) -> Vec<&'a T>\n\n where R: Rng + ?Sized\n\n{\n\n let indices = index::sample(rng, slice.len(), amount).into_iter();\n\n\n\n let mut out = Vec::with_capacity(amount);\n\n out.extend(indices.map(|i| &slice[i]));\n\n out\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[cfg(feature = \"alloc\")] use {Rng, SeedableRng};\n\n #[cfg(feature = \"alloc\")] use rngs::SmallRng;\n\n #[cfg(all(feature=\"alloc\", not(feature=\"std\")))]\n\n use alloc::vec::Vec;\n\n\n\n #[test]\n\n fn test_slice_choose() {\n", "file_path": "src/seq/mod.rs", "rank": 25, "score": 118254.54293577932 }, { "content": "pub trait WideningMultiply<RHS = Self> {\n\n type Output;\n\n\n\n fn wmul(self, x: RHS) -> Self::Output;\n\n}\n\n\n\nmacro_rules! wmul_impl {\n\n ($ty:ty, $wide:ty, $shift:expr) => {\n\n impl WideningMultiply for $ty {\n\n type Output = ($ty, $ty);\n\n\n\n #[inline(always)]\n\n fn wmul(self, x: $ty) -> Self::Output {\n\n let tmp = (self as $wide) * (x as $wide);\n\n ((tmp >> $shift) as $ty, tmp as $ty)\n\n }\n\n }\n\n };\n\n\n\n // simd bulk implementation\n", "file_path": "src/distributions/utils.rs", "rank": 26, "score": 118156.27115315947 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[deprecated(since=\"0.6.0\", note=\"use IteratorRandom::choose_multiple instead\")]\n\npub fn sample_iter<T, I, R>(rng: &mut R, iterable: I, amount: usize) -> Result<Vec<T>, Vec<T>>\n\n where I: IntoIterator<Item=T>,\n\n R: Rng + ?Sized,\n\n{\n\n use seq::IteratorRandom;\n\n let iter = iterable.into_iter();\n\n let result = iter.choose_multiple(rng, amount);\n\n if result.len() == amount {\n\n Ok(result)\n\n } else {\n\n Err(result)\n\n }\n\n}\n\n\n\n/// Randomly sample exactly `amount` values from `slice`.\n\n///\n\n/// The values are non-repeating and in random order.\n\n///\n\n/// This implementation uses `O(amount)` time and memory.\n\n///\n\n/// Panics if `amount > slice.len()`\n\n///\n\n/// Deprecated: use [`SliceRandom::choose_multiple`] instead.\n\n/// \n\n/// [`SliceRandom::choose_multiple`]: trait.SliceRandom.html#method.choose_multiple\n", "file_path": "src/seq/mod.rs", "rank": 27, "score": 115172.69736588118 }, { "content": "#[cfg(feature=\"std\")]\n\npub fn log_gamma(x: f64) -> f64 {\n\n // precalculated 6 coefficients for the first 6 terms of the series\n\n let coefficients: [f64; 6] = [\n\n 76.18009172947146,\n\n -86.50532032941677,\n\n 24.01409824083091,\n\n -1.231739572450155,\n\n 0.1208650973866179e-2,\n\n -0.5395239384953e-5,\n\n ];\n\n\n\n // (x+0.5)*ln(x+g+0.5)-(x+g+0.5)\n\n let tmp = x + 5.5;\n\n let log = (x + 0.5) * tmp.ln() - tmp;\n\n\n\n // the first few terms of the series for Ag(x)\n\n let mut a = 1.000000000190015;\n\n let mut denom = x;\n\n for coeff in &coefficients {\n\n denom += 1.0;\n", "file_path": "src/distributions/utils.rs", "rank": 30, "score": 110670.59607986457 }, { "content": "#[derive(Clone, Debug)]\n\nenum OsRngMethod {\n\n GetRandom,\n\n RandomDevice,\n\n}\n\n\n\nimpl OsRngImpl for OsRng {\n\n fn new() -> Result<OsRng, Error> {\n\n if is_getrandom_available() {\n\n return Ok(OsRng { method: OsRngMethod::GetRandom,\n\n initialized: false });\n\n }\n\n let open = |p| OpenOptions::new()\n\n .read(true)\n\n .custom_flags(libc::O_NONBLOCK)\n\n .open(p);\n\n random_device::open(\"/dev/random\", &open)?;\n\n Ok(OsRng { method: OsRngMethod::RandomDevice, initialized: false })\n\n }\n\n\n\n fn fill_chunk(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n", "file_path": "rand_os/src/solaris.rs", "rank": 31, "score": 107073.75089091275 }, { "content": "#[derive(Clone, Debug)]\n\nenum OsRngMethod {\n\n GetRandom,\n\n RandomDevice,\n\n}\n\n\n\nimpl OsRngImpl for OsRng {\n\n fn new() -> Result<OsRng, Error> {\n\n if is_getrandom_available() {\n\n return Ok(OsRng { method: OsRngMethod::GetRandom,\n\n initialized: false });\n\n }\n\n random_device::open(\"/dev/urandom\", &|p| File::open(p))?;\n\n Ok(OsRng { method: OsRngMethod::RandomDevice, initialized: false })\n\n }\n\n\n\n fn fill_chunk(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n\n match self.method {\n\n OsRngMethod::GetRandom => getrandom_try_fill(dest, false),\n\n OsRngMethod::RandomDevice => random_device::read(dest),\n\n }\n", "file_path": "rand_os/src/linux_android.rs", "rank": 32, "score": 104865.96554378611 }, { "content": "#[derive(Clone, Debug)]\n\nenum OsRngMethod {\n\n Browser,\n\n Node\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct OsRng(OsRngMethod);\n\n\n\nimpl OsRngImpl for OsRng {\n\n fn new() -> Result<OsRng, Error> {\n\n let result = js! {\n\n try {\n\n if (\n\n typeof self === \"object\" &&\n\n typeof self.crypto === \"object\" &&\n\n typeof self.crypto.getRandomValues === \"function\"\n\n ) {\n\n return { success: true, ty: 1 };\n\n }\n\n\n", "file_path": "rand_os/src/wasm32_stdweb.rs", "rank": 33, "score": 104865.96554378611 }, { "content": "#[cfg(feature=\"std\")]\n\npub trait FromEntropy: SeedableRng {\n\n /// Creates a new instance, automatically seeded with fresh entropy.\n\n ///\n\n /// Normally this will use `OsRng`, but if that fails `JitterRng` will be\n\n /// used instead. Both should be suitable for cryptography. It is possible\n\n /// that both entropy sources will fail though unlikely; failures would\n\n /// almost certainly be platform limitations or build issues, i.e. most\n\n /// applications targetting PC/mobile platforms should not need to worry\n\n /// about this failing.\n\n ///\n\n /// # Panics\n\n ///\n\n /// If all entropy sources fail this will panic. If you need to handle\n\n /// errors, use the following code, equivalent aside from error handling:\n\n ///\n\n /// ```\n\n /// # use rand::Error;\n\n /// use rand::prelude::*;\n\n /// use rand::rngs::EntropyRng;\n\n ///\n", "file_path": "src/lib.rs", "rank": 34, "score": 102883.53122861845 }, { "content": "#[cfg(feature=\"std\")]\n\n#[inline]\n\npub fn random<T>() -> T where Standard: Distribution<T> {\n\n thread_rng().gen()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use rngs::mock::StepRng;\n\n use rngs::StdRng;\n\n use super::*;\n\n #[cfg(all(not(feature=\"std\"), feature=\"alloc\"))] use alloc::boxed::Box;\n\n\n\n pub struct TestRng<R> { inner: R }\n\n\n\n impl<R: RngCore> RngCore for TestRng<R> {\n\n fn next_u32(&mut self) -> u32 {\n\n self.inner.next_u32()\n\n }\n\n fn next_u64(&mut self) -> u64 {\n\n self.inner.next_u64()\n\n }\n", "file_path": "src/lib.rs", "rank": 35, "score": 102054.20260040995 }, { "content": "#[test]\n\nfn test_os_rng() {\n\n let mut r = OsRng::new().unwrap();\n\n\n\n r.next_u32();\n\n r.next_u64();\n\n\n\n let mut v1 = [0u8; 1000];\n\n r.fill_bytes(&mut v1);\n\n\n\n let mut v2 = [0u8; 1000];\n\n r.fill_bytes(&mut v2);\n\n\n\n let mut n_diff_bits = 0;\n\n for i in 0..v1.len() {\n\n n_diff_bits += (v1[i] ^ v2[i]).count_ones();\n\n }\n\n\n\n // Check at least 1 bit per byte differs. p(failure) < 1e-1000 with random input.\n\n assert!(n_diff_bits >= v1.len() as u32);\n\n}\n\n\n", "file_path": "rand_os/tests/mod.rs", "rank": 36, "score": 101427.43345148212 }, { "content": "/// A trait for RNGs which do not generate random numbers individually, but in\n\n/// blocks (typically `[u32; N]`). This technique is commonly used by\n\n/// cryptographic RNGs to improve performance.\n\n/// \n\n/// See the [module documentation](index.html) for details.\n\npub trait BlockRngCore {\n\n /// Results element type, e.g. `u32`.\n\n type Item;\n\n \n\n /// Results type. This is the 'block' an RNG implementing `BlockRngCore`\n\n /// generates, which will usually be an array like `[u32; 16]`.\n\n type Results: AsRef<[Self::Item]> + AsMut<[Self::Item]> + Default;\n\n\n\n /// Generate a new block of results.\n\n fn generate(&mut self, results: &mut Self::Results);\n\n}\n\n\n\n\n\n/// A wrapper type implementing [`RngCore`] for some type implementing\n\n/// [`BlockRngCore`] with `u32` array buffer; i.e. this can be used to implement\n\n/// a full RNG from just a `generate` function.\n\n///\n\n/// The `core` field may be accessed directly but the results buffer may not.\n\n/// PRNG implementations can simply use a type alias\n\n/// (`pub type MyRng = BlockRng<MyRngCore>;`) but might prefer to use a\n", "file_path": "rand_core/src/block.rs", "rank": 37, "score": 100582.8425762243 }, { "content": "#[cfg(not(any(target_arch = \"wasm32\", target_arch = \"asmjs\")))]\n\n#[test]\n\nfn test_os_rng_tasks() {\n\n use std::sync::mpsc::channel;\n\n use std::thread;\n\n\n\n let mut txs = vec!();\n\n for _ in 0..20 {\n\n let (tx, rx) = channel();\n\n txs.push(tx);\n\n\n\n thread::spawn(move|| {\n\n // wait until all the tasks are ready to go.\n\n rx.recv().unwrap();\n\n\n\n // deschedule to attempt to interleave things as much\n\n // as possible (XXX: is this a good test?)\n\n let mut r = OsRng::new().unwrap();\n\n thread::yield_now();\n\n let mut v = [0u8; 1000];\n\n\n\n for _ in 0..100 {\n", "file_path": "rand_os/tests/mod.rs", "rank": 38, "score": 99230.96700785347 }, { "content": "#[test]\n\nfn test_os_rng_empty() {\n\n let mut r = OsRng::new().unwrap();\n\n\n\n let mut empty = [0u8; 0];\n\n r.fill_bytes(&mut empty);\n\n}\n\n\n", "file_path": "rand_os/tests/mod.rs", "rank": 39, "score": 99230.96700785347 }, { "content": "#[test]\n\nfn test_os_rng_huge() {\n\n let mut r = OsRng::new().unwrap();\n\n\n\n let mut huge = [0u8; 100_000];\n\n r.fill_bytes(&mut huge);\n\n}\n\n\n", "file_path": "rand_os/tests/mod.rs", "rank": 40, "score": 99230.96700785347 }, { "content": "/// A random number generator that can be explicitly seeded.\n\n///\n\n/// This trait encapsulates the low-level functionality common to all\n\n/// pseudo-random number generators (PRNGs, or algorithmic generators).\n\n/// \n\n/// The [`rand::FromEntropy`] trait is automatically implemented for every type\n\n/// implementing `SeedableRng`, providing a convenient `from_entropy()`\n\n/// constructor.\n\n/// \n\n/// [`rand::FromEntropy`]: ../rand/trait.FromEntropy.html\n\npub trait SeedableRng: Sized {\n\n /// Seed type, which is restricted to types mutably-dereferencable as `u8`\n\n /// arrays (we recommend `[u8; N]` for some `N`).\n\n ///\n\n /// It is recommended to seed PRNGs with a seed of at least circa 100 bits,\n\n /// which means an array of `[u8; 12]` or greater to avoid picking RNGs with\n\n /// partially overlapping periods.\n\n ///\n\n /// For cryptographic RNG's a seed of 256 bits is recommended, `[u8; 32]`.\n\n ///\n\n ///\n\n /// # Implementing `SeedableRng` for RNGs with large seeds\n\n ///\n\n /// Note that the required traits `core::default::Default` and\n\n /// `core::convert::AsMut<u8>` are not implemented for large arrays\n\n /// `[u8; N]` with `N` > 32. To be able to implement the traits required by\n\n /// `SeedableRng` for RNGs with such large seeds, the newtype pattern can be\n\n /// used:\n\n ///\n\n /// ```\n", "file_path": "rand_core/src/lib.rs", "rank": 41, "score": 98348.3323241853 }, { "content": "#[bench]\n\nfn gen_u64_jitter(b: &mut Bencher) {\n\n let mut rng = JitterRng::new().unwrap();\n\n b.iter(|| {\n\n rng.gen::<u64>()\n\n });\n\n b.bytes = size_of::<u64>() as u64;\n\n}\n\n\n\nmacro_rules! init_gen {\n\n ($fnn:ident, $gen:ident) => {\n\n #[bench]\n\n fn $fnn(b: &mut Bencher) {\n\n let mut rng = XorShiftRng::from_entropy();\n\n b.iter(|| {\n\n let r2 = $gen::from_rng(&mut rng).unwrap();\n\n r2\n\n });\n\n }\n\n }\n\n}\n", "file_path": "benches/generators.rs", "rank": 42, "score": 95718.38363335877 }, { "content": "fn getrandom_try_fill(dest: &mut [u8], blocking: bool) -> Result<(), Error> {\n\n let result = getrandom(dest, blocking);\n\n if result == -1 || result == 0 {\n\n let err = io::Error::last_os_error();\n\n let kind = err.kind();\n\n if kind == io::ErrorKind::WouldBlock {\n\n return Err(Error::with_cause(\n\n ErrorKind::NotReady,\n\n \"getrandom not ready\",\n\n err,\n\n ));\n\n } else {\n\n return Err(Error::with_cause(\n\n ErrorKind::Unavailable,\n\n \"unexpected getrandom error\",\n\n err,\n\n ));\n\n }\n\n } else if result != dest.len() as i64 {\n\n return Err(Error::new(ErrorKind::Unavailable,\n\n \"unexpected getrandom error\"));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rand_os/src/solaris.rs", "rank": 43, "score": 94141.1997506332 }, { "content": "fn getrandom_try_fill(dest: &mut [u8], blocking: bool) -> Result<(), Error> {\n\n let mut read = 0;\n\n while read < dest.len() {\n\n let result = getrandom(&mut dest[read..], blocking);\n\n if result == -1 {\n\n let err = io::Error::last_os_error();\n\n let kind = err.kind();\n\n if kind == io::ErrorKind::Interrupted {\n\n continue;\n\n } else if kind == io::ErrorKind::WouldBlock {\n\n return Err(Error::with_cause(\n\n ErrorKind::NotReady,\n\n \"getrandom not ready\",\n\n err,\n\n ));\n\n } else {\n\n return Err(Error::with_cause(\n\n ErrorKind::Unavailable,\n\n \"unexpected getrandom error\",\n\n err,\n\n ));\n\n }\n\n } else {\n\n read += result as usize;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rand_os/src/linux_android.rs", "rank": 44, "score": 92612.46443158774 }, { "content": "/// Randomly sample exactly `amount` indices from `0..length`, using rejection\n\n/// sampling.\n\n/// \n\n/// Since `amount <<< length` there is a low chance of a random sample in\n\n/// `0..length` being a duplicate. We test for duplicates and resample where\n\n/// necessary. The algorithm is `O(amount)` time and memory.\n\nfn sample_rejection<R>(rng: &mut R, length: usize, amount: usize) -> IndexVec\n\n where R: Rng + ?Sized,\n\n{\n\n debug_assert!(amount < length);\n\n #[cfg(feature=\"std\")] let mut cache = HashSet::with_capacity(amount);\n\n #[cfg(not(feature=\"std\"))] let mut cache = BTreeSet::new();\n\n let distr = Uniform::new(0, length);\n\n let mut indices = Vec::with_capacity(amount);\n\n for _ in 0..amount {\n\n let mut pos = distr.sample(rng);\n\n while !cache.insert(pos) {\n\n pos = distr.sample(rng);\n\n }\n\n indices.push(pos);\n\n }\n\n\n\n debug_assert_eq!(indices.len(), amount);\n\n IndexVec::from(indices)\n\n}\n\n\n", "file_path": "src/seq/index.rs", "rank": 45, "score": 88985.81298037224 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum GammaRepr {\n\n Large(GammaLargeShape),\n\n One(Exp),\n\n Small(GammaSmallShape)\n\n}\n\n\n\n// These two helpers could be made public, but saving the\n\n// match-on-Gamma-enum branch from using them directly (e.g. if one\n\n// knows that the shape is always > 1) doesn't appear to be much\n\n// faster.\n\n\n\n/// Gamma distribution where the shape parameter is less than 1.\n\n///\n\n/// Note, samples from this require a compulsory floating-point `pow`\n\n/// call, which makes it significantly slower than sampling from a\n\n/// gamma distribution where the shape parameter is greater than or\n\n/// equal to 1.\n\n///\n\n/// See `Gamma` for sampling from a Gamma distribution with general\n\n/// shape parameters.\n", "file_path": "src/distributions/gamma.rs", "rank": 46, "score": 70243.80382931154 }, { "content": "struct SimulationResult {\n\n win: bool,\n\n switch: bool,\n\n}\n\n\n", "file_path": "examples/monty-hall.rs", "rank": 47, "score": 70243.80382931154 }, { "content": "// Entropy collector state.\n\n// These values are not necessary to preserve across runs.\n\nstruct EcState {\n\n // Previous time stamp to determine the timer delta\n\n prev_time: u64,\n\n // Deltas used for the stuck test\n\n last_delta: i32,\n\n last_delta2: i32,\n\n // Memory for the Memory Access noise source\n\n mem: [u8; MEMORY_SIZE],\n\n}\n\n\n\nimpl EcState {\n\n // Stuck test by checking the:\n\n // - 1st derivation of the jitter measurement (time delta)\n\n // - 2nd derivation of the jitter measurement (delta of time deltas)\n\n // - 3rd derivation of the jitter measurement (delta of delta of time\n\n // deltas)\n\n //\n\n // All values must always be non-zero.\n\n // This test is a heuristic to see whether the last measurement holds\n\n // entropy.\n", "file_path": "src/rngs/jitter.rs", "rank": 48, "score": 70243.80382931154 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct GammaLargeShape {\n\n scale: f64,\n\n c: f64,\n\n d: f64\n\n}\n\n\n\nimpl Gamma {\n\n /// Construct an object representing the `Gamma(shape, scale)`\n\n /// distribution.\n\n ///\n\n /// Panics if `shape <= 0` or `scale <= 0`.\n\n #[inline]\n\n pub fn new(shape: f64, scale: f64) -> Gamma {\n\n assert!(shape > 0.0, \"Gamma::new called with shape <= 0\");\n\n assert!(scale > 0.0, \"Gamma::new called with scale <= 0\");\n\n\n\n let repr = if shape == 1.0 {\n\n One(Exp::new(1.0 / scale))\n\n } else if shape < 1.0 {\n\n Small(GammaSmallShape::new_raw(shape, scale))\n", "file_path": "src/distributions/gamma.rs", "rank": 49, "score": 69000.42098800447 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct GammaSmallShape {\n\n inv_shape: f64,\n\n large_shape: GammaLargeShape\n\n}\n\n\n\n/// Gamma distribution where the shape parameter is larger than 1.\n\n///\n\n/// See `Gamma` for sampling from a Gamma distribution with general\n\n/// shape parameters.\n", "file_path": "src/distributions/gamma.rs", "rank": 50, "score": 69000.42098800447 }, { "content": "#[cfg(any(feature = \"std\", rustc_1_25))]\n\n#[derive(Debug, Copy, Clone)]\n\nenum UniformDurationMode {\n\n Small {\n\n secs: u64,\n\n nanos: Uniform<u32>,\n\n },\n\n Medium {\n\n nanos: Uniform<u64>,\n\n },\n\n Large {\n\n max_secs: u64,\n\n max_nanos: u32,\n\n secs: Uniform<u64>,\n\n }\n\n}\n\n\n\n#[cfg(any(feature = \"std\", rustc_1_25))]\n\nimpl SampleUniform for Duration {\n\n type Sampler = UniformDuration;\n\n}\n\n\n", "file_path": "src/distributions/uniform.rs", "rank": 51, "score": 69000.42098800447 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum ChiSquaredRepr {\n\n // k == 1, Gamma(alpha, ..) is particularly slow for alpha < 1,\n\n // e.g. when alpha = 1/2 as it would be for this case, so special-\n\n // casing and using the definition of N(0,1)^2 is faster.\n\n DoFExactlyOne,\n\n DoFAnythingElse(Gamma),\n\n}\n\n\n\nimpl ChiSquared {\n\n /// Create a new chi-squared distribution with degrees-of-freedom\n\n /// `k`. Panics if `k < 0`.\n\n pub fn new(k: f64) -> ChiSquared {\n\n let repr = if k == 1.0 {\n\n DoFExactlyOne\n\n } else {\n\n assert!(k > 0.0, \"ChiSquared::new called with `k` < 0\");\n\n DoFAnythingElse(Gamma::new(0.5 * k, 2.0))\n\n };\n\n ChiSquared { repr }\n\n }\n", "file_path": "src/distributions/gamma.rs", "rank": 52, "score": 69000.42098800447 }, { "content": "enum SecRandom {}\n\n\n\n#[allow(non_upper_case_globals)]\n\nconst kSecRandomDefault: *const SecRandom = 0 as *const SecRandom;\n\n\n\n#[link(name = \"Security\", kind = \"framework\")]\n\nextern {\n\n fn SecRandomCopyBytes(rnd: *const SecRandom,\n\n count: size_t, bytes: *mut u8) -> c_int;\n\n}\n\n\n\nimpl OsRngImpl for OsRng {\n\n fn new() -> Result<OsRng, Error> { Ok(OsRng) }\n\n\n\n fn fill_chunk(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n\n let ret = unsafe {\n\n SecRandomCopyBytes(kSecRandomDefault,\n\n dest.len() as size_t,\n\n dest.as_mut_ptr())\n\n };\n", "file_path": "rand_os/src/macos.rs", "rank": 53, "score": 69000.42098800447 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n\n\n let ac = autocfg::new();\n\n ac.emit_rustc_version(1, 25);\n\n ac.emit_rustc_version(1, 26);\n\n ac.emit_rustc_version(1, 27);\n\n}\n", "file_path": "build.rs", "rank": 54, "score": 68763.46236026523 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n let ac = autocfg::new();\n\n ac.emit_rustc_version(1, 26);\n\n}\n", "file_path": "rand_chacha/build.rs", "rank": 55, "score": 65881.28381203327 }, { "content": "fn main() {\n\n // The estimation will be more accurate with more simulations\n\n let num_simulations = 10000;\n\n\n\n let mut rng = rand::thread_rng();\n\n let random_door = Uniform::new(0u32, 3);\n\n\n\n let (mut switch_wins, mut switch_losses) = (0, 0);\n\n let (mut keep_wins, mut keep_losses) = (0, 0);\n\n\n\n println!(\"Running {} simulations...\", num_simulations);\n\n for _ in 0..num_simulations {\n\n let result = simulate(&random_door, &mut rng);\n\n\n\n match (result.win, result.switch) {\n\n (true, true) => switch_wins += 1,\n\n (true, false) => keep_wins += 1,\n\n (false, true) => switch_losses += 1,\n\n (false, false) => keep_losses += 1,\n\n }\n", "file_path": "examples/monty-hall.rs", "rank": 56, "score": 65881.28381203327 }, { "content": "#[test]\n\nfn unit_sphere() {\n\n const N_DIM: usize = 3;\n\n let h = Histogram100::with_const_width(-1., 1.);\n\n let mut histograms = [h.clone(), h.clone(), h];\n\n let dist = rand::distributions::UnitSphereSurface::new();\n\n let mut rng = rand::rngs::SmallRng::from_entropy();\n\n for _ in 0..N_SAMPLES {\n\n let v = dist.sample(&mut rng);\n\n for i in 0..N_DIM {\n\n histograms[i].add(v[i]).map_err(\n\n |e| { println!(\"v: {}\", v[i]); e }\n\n ).unwrap();\n\n }\n\n }\n\n for h in &histograms {\n\n let sum: u64 = h.bins().iter().sum();\n\n println!(\"{:?}\", h);\n\n for &b in h.bins() {\n\n let p = (b as f64) / (sum as f64);\n\n assert!((p - 1.0 / (N_BINS as f64)).abs() < TOL, \"{}\", p);\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/uniformity.rs", "rank": 57, "score": 65881.28381203327 }, { "content": "fn main() {\n\n let range = Uniform::new(-1.0f64, 1.0);\n\n let mut rng = rand::thread_rng();\n\n\n\n let total = 1_000_000;\n\n let mut in_circle = 0;\n\n\n\n for _ in 0..total {\n\n let a = range.sample(&mut rng);\n\n let b = range.sample(&mut rng);\n\n if a*a + b*b <= 1.0 {\n\n in_circle += 1;\n\n }\n\n }\n\n\n\n // prints something close to 3.14159...\n\n println!(\"π is approximately {}\", 4. * (in_circle as f64) / (total as f64));\n\n}\n", "file_path": "examples/monte-carlo.rs", "rank": 58, "score": 65881.28381203327 }, { "content": "#[test]\n\nfn unit_circle() {\n\n use ::std::f64::consts::PI;\n\n let mut h = Histogram100::with_const_width(-PI, PI);\n\n let dist = rand::distributions::UnitCircle::new();\n\n let mut rng = rand::rngs::SmallRng::from_entropy();\n\n for _ in 0..N_SAMPLES {\n\n let v = dist.sample(&mut rng);\n\n h.add(v[0].atan2(v[1])).unwrap();\n\n }\n\n let sum: u64 = h.bins().iter().sum();\n\n println!(\"{:?}\", h);\n\n for &b in h.bins() {\n\n let p = (b as f64) / (sum as f64);\n\n assert!((p - 1.0 / (N_BINS as f64)).abs() < TOL, \"{}\", p);\n\n }\n\n}\n", "file_path": "tests/uniformity.rs", "rank": 59, "score": 65881.28381203327 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n let ac = autocfg::new();\n\n ac.emit_rustc_version(1, 26);\n\n}\n", "file_path": "rand_pcg/build.rs", "rank": 60, "score": 65881.28381203327 }, { "content": "/// Trait for casting types to byte slices\n\n///\n\n/// This is used by the [`fill`] and [`try_fill`] methods.\n\n///\n\n/// [`fill`]: trait.Rng.html#method.fill\n\n/// [`try_fill`]: trait.Rng.html#method.try_fill\n\npub trait AsByteSliceMut {\n\n /// Return a mutable reference to self as a byte slice\n\n fn as_byte_slice_mut(&mut self) -> &mut [u8];\n\n\n\n /// Call `to_le` on each element (i.e. byte-swap on Big Endian platforms).\n\n fn to_le(&mut self);\n\n}\n\n\n\nimpl AsByteSliceMut for [u8] {\n\n fn as_byte_slice_mut(&mut self) -> &mut [u8] {\n\n self\n\n }\n\n\n\n fn to_le(&mut self) {}\n\n}\n\n\n\nmacro_rules! impl_as_byte_slice {\n\n ($t:ty) => {\n\n impl AsByteSliceMut for [$t] {\n\n fn as_byte_slice_mut(&mut self) -> &mut [u8] {\n", "file_path": "src/lib.rs", "rank": 61, "score": 64709.412330218176 }, { "content": "/// Extension trait on slices, providing random mutation and sampling methods.\n\n/// \n\n/// An implementation is provided for slices. This may also be implementable for\n\n/// other types.\n\npub trait SliceRandom {\n\n /// The element type.\n\n type Item;\n\n\n\n /// Returns a reference to one random element of the slice, or `None` if the\n\n /// slice is empty.\n\n /// \n\n /// Depending on the implementation, complexity is expected to be `O(1)`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use rand::thread_rng;\n\n /// use rand::seq::SliceRandom;\n\n ///\n\n /// let choices = [1, 2, 4, 8, 16, 32];\n\n /// let mut rng = thread_rng();\n\n /// println!(\"{:?}\", choices.choose(&mut rng));\n\n /// assert_eq!(choices[..0].choose(&mut rng), None);\n\n /// ```\n", "file_path": "src/seq/mod.rs", "rank": 62, "score": 64700.68108868132 }, { "content": "#[allow(non_camel_case_types)]\n\ntype w64 = w<u64>;\n\n\n\nconst RAND_SIZE_LEN: usize = 8;\n\nconst RAND_SIZE: usize = 1 << RAND_SIZE_LEN;\n\n\n\n/// A random number generator that uses ISAAC-64, the 64-bit variant of the\n\n/// ISAAC algorithm.\n\n///\n\n/// ISAAC stands for \"Indirection, Shift, Accumulate, Add, and Count\" which are\n\n/// the principal bitwise operations employed. It is the most advanced of a\n\n/// series of array based random number generator designed by Robert Jenkins\n\n/// in 1996[^1].\n\n///\n\n/// ISAAC-64 is mostly similar to ISAAC. Because it operates on 64-bit integers\n\n/// instead of 32-bit, it uses twice as much memory to hold its state and\n\n/// results. Also it uses different constants for shifts and indirect indexing,\n\n/// optimized to give good results for 64bit arithmetic.\n\n///\n\n/// ISAAC-64 is notably fast and produces excellent quality random numbers for\n\n/// non-cryptographic applications.\n", "file_path": "rand_isaac/src/isaac64.rs", "rank": 63, "score": 63914.28264642498 }, { "content": "#[allow(non_camel_case_types)]\n\ntype w32 = w<u32>;\n\n\n\nconst RAND_SIZE_LEN: usize = 8;\n\nconst RAND_SIZE: usize = 1 << RAND_SIZE_LEN;\n\n\n\n/// A random number generator that uses the ISAAC algorithm.\n\n///\n\n/// ISAAC stands for \"Indirection, Shift, Accumulate, Add, and Count\" which are\n\n/// the principal bitwise operations employed. It is the most advanced of a\n\n/// series of array based random number generator designed by Robert Jenkins\n\n/// in 1996[^1][^2].\n\n///\n\n/// ISAAC is notably fast and produces excellent quality random numbers for\n\n/// non-cryptographic applications.\n\n///\n\n/// In spite of being designed with cryptographic security in mind, ISAAC hasn't\n\n/// been stringently cryptanalyzed and thus cryptographers do not not\n\n/// consensually trust it to be secure. When looking for a secure RNG, prefer\n\n/// [`Hc128Rng`] instead, which, like ISAAC, is an array-based RNG and one of\n\n/// the stream-ciphers selected the by eSTREAM contest.\n", "file_path": "rand_isaac/src/isaac.rs", "rank": 64, "score": 63677.92287591977 }, { "content": "#[test]\n\nfn test_read() {\n\n let bytes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];\n\n \n\n let mut buf = [0u32; 4];\n\n read_u32_into(&bytes, &mut buf);\n\n assert_eq!(buf[0], 0x04030201);\n\n assert_eq!(buf[3], 0x100F0E0D);\n\n \n\n let mut buf = [0u32; 3];\n\n read_u32_into(&bytes[1..13], &mut buf); // unaligned\n\n assert_eq!(buf[0], 0x05040302);\n\n assert_eq!(buf[2], 0x0D0C0B0A);\n\n \n\n let mut buf = [0u64; 2];\n\n read_u64_into(&bytes, &mut buf);\n\n assert_eq!(buf[0], 0x0807060504030201);\n\n assert_eq!(buf[1], 0x100F0E0D0C0B0A09);\n\n \n\n let mut buf = [0u64; 1];\n\n read_u64_into(&bytes[7..15], &mut buf); // unaligned\n\n assert_eq!(buf[0], 0x0F0E0D0C0B0A0908);\n\n}\n", "file_path": "rand_core/src/le.rs", "rank": 65, "score": 63342.11111807208 }, { "content": "/// Types (distributions) that can be used to create a random instance of `T`.\n\n///\n\n/// It is possible to sample from a distribution through both the\n\n/// `Distribution` and [`Rng`] traits, via `distr.sample(&mut rng)` and\n\n/// `rng.sample(distr)`. They also both offer the [`sample_iter`] method, which\n\n/// produces an iterator that samples from the distribution.\n\n///\n\n/// All implementations are expected to be immutable; this has the significant\n\n/// advantage of not needing to consider thread safety, and for most\n\n/// distributions efficient state-less sampling algorithms are available.\n\n///\n\n/// [`Rng`]: ../trait.Rng.html\n\n/// [`sample_iter`]: trait.Distribution.html#method.sample_iter\n\npub trait Distribution<T> {\n\n /// Generate a random value of `T`, using `rng` as the source of randomness.\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> T;\n\n\n\n /// Create an iterator that generates random values of `T`, using `rng` as\n\n /// the source of randomness.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use rand::thread_rng;\n\n /// use rand::distributions::{Distribution, Alphanumeric, Uniform, Standard};\n\n ///\n\n /// let mut rng = thread_rng();\n\n ///\n\n /// // Vec of 16 x f32:\n\n /// let v: Vec<f32> = Standard.sample_iter(&mut rng).take(16).collect();\n\n ///\n\n /// // String:\n\n /// let s: String = Alphanumeric.sample_iter(&mut rng).take(7).collect();\n", "file_path": "src/distributions/mod.rs", "rank": 66, "score": 62479.39396432147 }, { "content": "#[test]\n\nfn test_xorshift_clone() {\n\n let seed = [1,2,3,4, 5,5,7,8, 8,7,6,5, 4,3,2,1];\n\n let mut rng1 = XorShiftRng::from_seed(seed);\n\n let mut rng2 = rng1.clone();\n\n for _ in 0..16 {\n\n assert_eq!(rng1.next_u64(), rng2.next_u64());\n\n }\n\n}\n\n\n", "file_path": "rand_xorshift/tests/mod.rs", "rank": 67, "score": 62182.57004198366 }, { "content": "#[cfg(feature=\"serde1\")]\n\n#[test]\n\nfn test_mcg128xsl64_serde() {\n\n use bincode;\n\n use std::io::{BufWriter, BufReader};\n\n\n\n let mut rng = Mcg128Xsl64::seed_from_u64(0);\n\n\n\n let buf: Vec<u8> = Vec::new();\n\n let mut buf = BufWriter::new(buf);\n\n bincode::serialize_into(&mut buf, &rng).expect(\"Could not serialize\");\n\n\n\n let buf = buf.into_inner().unwrap();\n\n let mut read = BufReader::new(&buf[..]);\n\n let mut deserialized: Mcg128Xsl64 = bincode::deserialize_from(&mut read)\n\n .expect(\"Could not deserialize\");\n\n\n\n for _ in 0..16 {\n\n assert_eq!(rng.next_u64(), deserialized.next_u64());\n\n }\n\n}\n", "file_path": "rand_pcg/tests/mcg128xsl64.rs", "rank": 68, "score": 62182.57004198366 }, { "content": "#[test]\n\nfn test_lcg64xsh32_construction() {\n\n // Test that various construction techniques produce a working RNG.\n\n let seed = [1,2,3,4, 5,6,7,8, 9,10,11,12, 13,14,15,16];\n\n let mut rng1 = Lcg64Xsh32::from_seed(seed);\n\n assert_eq!(rng1.next_u64(), 1204678643940597513);\n\n\n\n let mut rng2 = Lcg64Xsh32::from_rng(&mut rng1).unwrap();\n\n assert_eq!(rng2.next_u64(), 12384929573776311845);\n\n\n\n let mut rng3 = Lcg64Xsh32::seed_from_u64(0);\n\n assert_eq!(rng3.next_u64(), 18195738587432868099);\n\n\n\n // This is the same as Lcg64Xsh32, so we only have a single test:\n\n let mut rng4 = Pcg32::seed_from_u64(0);\n\n assert_eq!(rng4.next_u64(), 18195738587432868099);\n\n}\n\n\n", "file_path": "rand_pcg/tests/lcg64xsh32.rs", "rank": 69, "score": 62182.57004198366 }, { "content": "#[cfg(feature=\"serde1\")]\n\n#[test]\n\nfn test_xorshift_serde() {\n\n use bincode;\n\n use std::io::{BufWriter, BufReader};\n\n\n\n let seed = [1,2,3,4, 5,6,7,8, 9,10,11,12, 13,14,15,16];\n\n let mut rng = XorShiftRng::from_seed(seed);\n\n\n\n let buf: Vec<u8> = Vec::new();\n\n let mut buf = BufWriter::new(buf);\n\n bincode::serialize_into(&mut buf, &rng).expect(\"Could not serialize\");\n\n\n\n let buf = buf.into_inner().unwrap();\n\n let mut read = BufReader::new(&buf[..]);\n\n let mut deserialized: XorShiftRng = bincode::deserialize_from(&mut read)\n\n .expect(\"Could not deserialize\");\n\n\n\n for _ in 0..16 {\n\n assert_eq!(rng.next_u64(), deserialized.next_u64());\n\n }\n\n}\n", "file_path": "rand_xorshift/tests/mod.rs", "rank": 70, "score": 62182.57004198366 }, { "content": "#[cfg(feature=\"serde1\")]\n\n#[test]\n\nfn test_lcg64xsh32_serde() {\n\n use bincode;\n\n use std::io::{BufWriter, BufReader};\n\n\n\n let mut rng = Lcg64Xsh32::seed_from_u64(0);\n\n\n\n let buf: Vec<u8> = Vec::new();\n\n let mut buf = BufWriter::new(buf);\n\n bincode::serialize_into(&mut buf, &rng).expect(\"Could not serialize\");\n\n\n\n let buf = buf.into_inner().unwrap();\n\n let mut read = BufReader::new(&buf[..]);\n\n let mut deserialized: Lcg64Xsh32 = bincode::deserialize_from(&mut read)\n\n .expect(\"Could not deserialize\");\n\n\n\n for _ in 0..16 {\n\n assert_eq!(rng.next_u64(), deserialized.next_u64());\n\n }\n\n}\n", "file_path": "rand_pcg/tests/lcg64xsh32.rs", "rank": 71, "score": 62182.57004198366 }, { "content": "#[test]\n\nfn test_mcg128xsl64_construction() {\n\n // Test that various construction techniques produce a working RNG.\n\n let seed = [1,2,3,4, 5,6,7,8, 9,10,11,12, 13,14,15,16];\n\n let mut rng1 = Mcg128Xsl64::from_seed(seed);\n\n assert_eq!(rng1.next_u64(), 7071994460355047496);\n\n\n\n let mut rng2 = Mcg128Xsl64::from_rng(&mut rng1).unwrap();\n\n assert_eq!(rng2.next_u64(), 12300796107712034932);\n\n\n\n let mut rng3 = Mcg128Xsl64::seed_from_u64(0);\n\n assert_eq!(rng3.next_u64(), 6198063878555692194);\n\n\n\n // This is the same as Mcg128Xsl64, so we only have a single test:\n\n let mut rng4 = Pcg64Mcg::seed_from_u64(0);\n\n assert_eq!(rng4.next_u64(), 6198063878555692194);\n\n}\n\n\n", "file_path": "rand_pcg/tests/mcg128xsl64.rs", "rank": 72, "score": 62182.57004198366 }, { "content": "#[test]\n\nfn test_xorshift_construction() {\n\n // Test that various construction techniques produce a working RNG.\n\n let seed = [1,2,3,4, 5,6,7,8, 9,10,11,12, 13,14,15,16];\n\n let mut rng1 = XorShiftRng::from_seed(seed);\n\n assert_eq!(rng1.next_u64(), 4325440999699518727);\n\n\n\n let _rng2 = XorShiftRng::from_rng(rng1).unwrap();\n\n // Note: we cannot test the state of _rng2 because from_rng does not\n\n // fix Endianness. This is allowed in the trait specification.\n\n}\n\n\n", "file_path": "rand_xorshift/tests/mod.rs", "rank": 73, "score": 62182.57004198366 }, { "content": "#[cfg(not(all(all(rustc_1_26, not(target_os = \"emscripten\")), target_pointer_width = \"64\")))]\n\ntype Rng = ::rand_pcg::Pcg32;\n\n\n\n/// An RNG recommended when small state, cheap initialization and good\n\n/// performance are required. The PRNG algorithm in `SmallRng` is chosen to be\n\n/// efficient on the current platform, **without consideration for cryptography\n\n/// or security**. The size of its state is much smaller than for [`StdRng`].\n\n///\n\n/// Reproducibility of output from this generator is however not required, thus\n\n/// future library versions may use a different internal generator with\n\n/// different output. Further, this generator may not be portable and can\n\n/// produce different output depending on the architecture. If you require\n\n/// reproducible output, use a named RNG. Refer to the documentation on the\n\n/// [`prng` module](../prng/index.html).\n\n///\n\n/// The current algorithm is [`Pcg64Mcg`] on 64-bit platforms with Rust version\n\n/// 1.26 and later, or [`Pcg32`] otherwise.\n\n///\n\n/// # Examples\n\n///\n\n/// Initializing `SmallRng` with a random seed can be done using [`FromEntropy`]:\n", "file_path": "src/rngs/small.rs", "rank": 74, "score": 62088.76808883964 }, { "content": "#[derive(Clone)]\n\nstruct UnhintedIterator<I: Iterator + Clone> {\n\n iter: I,\n\n}\n\nimpl<I: Iterator + Clone> Iterator for UnhintedIterator<I> {\n\n type Item = I::Item;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.iter.next()\n\n }\n\n}\n\n\n", "file_path": "benches/seq.rs", "rank": 75, "score": 61826.67654983001 }, { "content": "#[derive(Debug)]\n\nstruct ReseedingCore<R, Rsdr> {\n\n inner: R,\n\n reseeder: Rsdr,\n\n threshold: i64,\n\n bytes_until_reseed: i64,\n\n fork_counter: usize,\n\n}\n\n\n\nimpl<R, Rsdr> BlockRngCore for ReseedingCore<R, Rsdr>\n\nwhere R: BlockRngCore + SeedableRng,\n\n Rsdr: RngCore\n\n{\n\n type Item = <R as BlockRngCore>::Item;\n\n type Results = <R as BlockRngCore>::Results;\n\n\n\n fn generate(&mut self, results: &mut Self::Results) {\n\n let global_fork_counter = fork::get_fork_counter();\n\n if self.bytes_until_reseed <= 0 ||\n\n self.is_forked(global_fork_counter) {\n\n // We get better performance by not calling only `reseed` here\n", "file_path": "src/rngs/adapter/reseeding.rs", "rank": 76, "score": 61501.83933556071 }, { "content": "/// Helper trait handling actual uniform sampling.\n\n///\n\n/// See the [module documentation] on how to implement [`Uniform`] range\n\n/// sampling for a custom type.\n\n///\n\n/// Implementation of [`sample_single`] is optional, and is only useful when\n\n/// the implementation can be faster than `Self::new(low, high).sample(rng)`.\n\n///\n\n/// [module documentation]: index.html\n\n/// [`Uniform`]: struct.Uniform.html\n\n/// [`sample_single`]: trait.UniformSampler.html#method.sample_single\n\npub trait UniformSampler: Sized {\n\n /// The type sampled by this implementation.\n\n type X;\n\n\n\n /// Construct self, with inclusive lower bound and exclusive upper bound\n\n /// `[low, high)`.\n\n ///\n\n /// Usually users should not call this directly but instead use\n\n /// `Uniform::new`, which asserts that `low < high` before calling this.\n\n fn new<B1, B2>(low: B1, high: B2) -> Self\n\n where B1: SampleBorrow<Self::X> + Sized,\n\n B2: SampleBorrow<Self::X> + Sized;\n\n\n\n /// Construct self, with inclusive bounds `[low, high]`.\n\n ///\n\n /// Usually users should not call this directly but instead use\n\n /// `Uniform::new_inclusive`, which asserts that `low <= high` before\n\n /// calling this.\n\n fn new_inclusive<B1, B2>(low: B1, high: B2) -> Self\n\n where B1: SampleBorrow<Self::X> + Sized,\n", "file_path": "src/distributions/uniform.rs", "rank": 77, "score": 61322.273406464476 }, { "content": "/// Helper trait for creating objects using the correct implementation of\n\n/// [`UniformSampler`] for the sampling type.\n\n///\n\n/// See the [module documentation] on how to implement [`Uniform`] range\n\n/// sampling for a custom type.\n\n///\n\n/// [`UniformSampler`]: trait.UniformSampler.html\n\n/// [module documentation]: index.html\n\n/// [`Uniform`]: struct.Uniform.html\n\npub trait SampleUniform: Sized {\n\n /// The `UniformSampler` implementation supporting type `X`.\n\n type Sampler: UniformSampler<X = Self>;\n\n}\n\n\n", "file_path": "src/distributions/uniform.rs", "rank": 78, "score": 61311.827615227565 }, { "content": "/// Helper trait similar to [`Borrow`] but implemented\n\n/// only for SampleUniform and references to SampleUniform in\n\n/// order to resolve ambiguity issues.\n\n///\n\n/// [`Borrow`]: https://doc.rust-lang.org/std/borrow/trait.Borrow.html\n\npub trait SampleBorrow<Borrowed> {\n\n /// Immutably borrows from an owned value. See [`Borrow::borrow`]\n\n ///\n\n /// [`Borrow::borrow`]: https://doc.rust-lang.org/std/borrow/trait.Borrow.html#tymethod.borrow\n\n fn borrow(&self) -> &Borrowed;\n\n}\n\nimpl<Borrowed> SampleBorrow<Borrowed> for Borrowed where Borrowed: SampleUniform {\n\n #[inline(always)]\n\n fn borrow(&self) -> &Borrowed { self }\n\n}\n\nimpl<'a, Borrowed> SampleBorrow<Borrowed> for &'a Borrowed where Borrowed: SampleUniform {\n\n #[inline(always)]\n\n fn borrow(&self) -> &Borrowed { *self }\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\n// What follows are all back-ends.\n\n\n\n\n", "file_path": "src/distributions/uniform.rs", "rank": 79, "score": 61303.76097004756 }, { "content": "#[test]\n\nfn test_lcg64xsh32_true_values() {\n\n // Numbers copied from official test suite.\n\n let mut rng = Lcg64Xsh32::new(42, 54);\n\n\n\n let mut results = [0u32; 6];\n\n for i in results.iter_mut() { *i = rng.next_u32(); }\n\n let expected: [u32; 6] = [0xa15c02b7, 0x7b47f409, 0xba1d3330,\n\n 0x83d2f293, 0xbfa4784b, 0xcbed606e];\n\n assert_eq!(results, expected);\n\n}\n\n\n", "file_path": "rand_pcg/tests/lcg64xsh32.rs", "rank": 80, "score": 61088.151897113516 }, { "content": "#[test]\n\nfn test_xorshift_zero_seed() {\n\n // Xorshift does not work with an all zero seed.\n\n // Assert it does not panic.\n\n let seed = [0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0];\n\n let mut rng = XorShiftRng::from_seed(seed);\n\n let a = rng.next_u64();\n\n let b = rng.next_u64();\n\n assert!(a != 0);\n\n assert!(b != a);\n\n}\n\n\n", "file_path": "rand_xorshift/tests/mod.rs", "rank": 81, "score": 61088.151897113516 }, { "content": "#[test]\n\nfn test_xorshift_true_values() {\n\n let seed = [16,15,14,13, 12,11,10,9, 8,7,6,5, 4,3,2,1];\n\n let mut rng = XorShiftRng::from_seed(seed);\n\n\n\n let mut results = [0u32; 9];\n\n for i in results.iter_mut() { *i = rng.next_u32(); }\n\n let expected: [u32; 9] = [\n\n 2081028795, 620940381, 269070770, 16943764, 854422573, 29242889,\n\n 1550291885, 1227154591, 271695242];\n\n assert_eq!(results, expected);\n\n\n\n let mut results = [0u64; 9];\n\n for i in results.iter_mut() { *i = rng.next_u64(); }\n\n let expected: [u64; 9] = [\n\n 9247529084182843387, 8321512596129439293, 14104136531997710878,\n\n 6848554330849612046, 343577296533772213, 17828467390962600268,\n\n 9847333257685787782, 7717352744383350108, 1133407547287910111];\n\n assert_eq!(results, expected);\n\n\n\n let mut results = [0u8; 32];\n\n rng.fill_bytes(&mut results);\n\n let expected = [102, 57, 212, 16, 233, 130, 49, 183,\n\n 158, 187, 44, 203, 63, 149, 45, 17,\n\n 117, 129, 131, 160, 70, 121, 158, 155,\n\n 224, 209, 192, 53, 10, 62, 57, 72];\n\n assert_eq!(results, expected);\n\n}\n\n\n", "file_path": "rand_xorshift/tests/mod.rs", "rank": 82, "score": 61088.151897113516 }, { "content": "#[test]\n\nfn test_mcg128xsl64_true_values() {\n\n // Numbers copied from official test suite (C version).\n\n let mut rng = Mcg128Xsl64::new(42);\n\n\n\n let mut results = [0u64; 6];\n\n for i in results.iter_mut() { *i = rng.next_u64(); }\n\n let expected: [u64; 6] = [0x63b4a3a813ce700a, 0x382954200617ab24,\n\n 0xa7fd85ae3fe950ce, 0xd715286aa2887737, 0x60c92fee2e59f32c, 0x84c4e96beff30017];\n\n assert_eq!(results, expected);\n\n}\n\n\n", "file_path": "rand_pcg/tests/mcg128xsl64.rs", "rank": 83, "score": 61088.151897113516 }, { "content": "#[cfg(all(all(rustc_1_26, not(target_os = \"emscripten\")), target_pointer_width = \"64\"))]\n\ntype Rng = ::rand_pcg::Pcg64Mcg;\n", "file_path": "src/rngs/small.rs", "rank": 84, "score": 60986.719790081144 }, { "content": "fn is_getrandom_available() -> bool {\n\n use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering};\n\n use std::sync::{Once, ONCE_INIT};\n\n\n\n static CHECKER: Once = ONCE_INIT;\n\n static AVAILABLE: AtomicBool = ATOMIC_BOOL_INIT;\n\n\n\n CHECKER.call_once(|| {\n\n debug!(\"OsRng: testing getrandom\");\n\n let mut buf: [u8; 0] = [];\n\n let result = getrandom(&mut buf, false);\n\n let available = if result == -1 {\n\n let err = io::Error::last_os_error().raw_os_error();\n\n err != Some(libc::ENOSYS)\n\n } else {\n\n true\n\n };\n\n AVAILABLE.store(available, Ordering::Relaxed);\n\n info!(\"OsRng: using {}\", if available { \"getrandom\" } else { \"/dev/random\" });\n\n });\n\n\n\n AVAILABLE.load(Ordering::Relaxed)\n\n}\n", "file_path": "rand_os/src/solaris.rs", "rank": 85, "score": 59837.8681151578 }, { "content": "fn is_getrandom_available() -> bool {\n\n static CHECKER: Once = ONCE_INIT;\n\n static AVAILABLE: AtomicBool = ATOMIC_BOOL_INIT;\n\n\n\n if NR_GETRANDOM == 0 { return false };\n\n\n\n CHECKER.call_once(|| {\n\n debug!(\"OsRng: testing getrandom\");\n\n let mut buf: [u8; 0] = [];\n\n let result = getrandom(&mut buf, false);\n\n let available = if result == -1 {\n\n let err = io::Error::last_os_error().raw_os_error();\n\n err != Some(libc::ENOSYS)\n\n } else {\n\n true\n\n };\n\n AVAILABLE.store(available, Ordering::Relaxed);\n\n info!(\"OsRng: using {}\", if available { \"getrandom\" } else { \"/dev/urandom\" });\n\n });\n\n\n\n AVAILABLE.load(Ordering::Relaxed)\n\n}\n", "file_path": "rand_os/src/linux_android.rs", "rank": 86, "score": 58743.44997028766 }, { "content": "/// Extension trait on iterators, providing random sampling methods.\n\npub trait IteratorRandom: Iterator + Sized {\n\n /// Choose one element at random from the iterator. If you have a slice,\n\n /// it's significantly faster to call the [`choose`] or [`choose_mut`]\n\n /// functions using the slice instead.\n\n ///\n\n /// Returns `None` if and only if the iterator is empty.\n\n /// \n\n /// Complexity is `O(n)`, where `n` is the length of the iterator.\n\n /// This likely consumes multiple random numbers, but the exact number\n\n /// is unspecified.\n\n ///\n\n /// [`choose`]: trait.SliceRandom.html#method.choose\n\n /// [`choose_mut`]: trait.SliceRandom.html#method.choose_mut\n\n fn choose<R>(mut self, rng: &mut R) -> Option<Self::Item>\n\n where R: Rng + ?Sized\n\n {\n\n let (mut lower, mut upper) = self.size_hint();\n\n let mut consumed = 0;\n\n let mut result = None;\n\n\n", "file_path": "src/seq/mod.rs", "rank": 87, "score": 58347.16966629015 }, { "content": "#[bench]\n\nfn init_jitter(b: &mut Bencher) {\n\n b.iter(|| {\n\n JitterRng::new().unwrap()\n\n });\n\n}\n\n\n\n\n\nconst RESEEDING_THRESHOLD: u64 = 1024*1024*1024; // something high enough to get\n\n // deterministic measurements\n\n\n", "file_path": "benches/generators.rs", "rank": 88, "score": 57490.34046429962 }, { "content": "#[bench]\n\nfn seq_shuffle_100(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n let x : &mut [usize] = &mut [1; 100];\n\n b.iter(|| {\n\n x.shuffle(&mut rng);\n\n x[0]\n\n })\n\n}\n\n\n", "file_path": "benches/seq.rs", "rank": 89, "score": 57490.34046429962 }, { "content": "#[bench]\n\nfn gen_1k_fill(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n let mut buf = [0u64; 128];\n\n b.iter(|| {\n\n rng.fill(&mut buf[..]);\n\n buf\n\n });\n\n b.bytes = 1024;\n\n}\n", "file_path": "benches/misc.rs", "rank": 90, "score": 57490.34046429962 }, { "content": "#[bench]\n\nfn dist_iter(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_entropy();\n\n let distr = Normal::new(-2.71828, 3.14159);\n\n let mut iter = distr.sample_iter(&mut rng);\n\n\n\n b.iter(|| {\n\n let mut accum = 0.0;\n\n for _ in 0..::RAND_BENCH_N {\n\n accum += iter.next().unwrap();\n\n }\n\n accum\n\n });\n\n b.bytes = size_of::<f64>() as u64 * ::RAND_BENCH_N;\n\n}\n", "file_path": "benches/distributions.rs", "rank": 91, "score": 57490.34046429962 }, { "content": "#[bench]\n\nfn gen_1k_gen_array(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n // max supported array length is 32!\n\n let v: [[u64; 32]; 4] = rng.gen();\n\n v\n\n });\n\n b.bytes = 1024;\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 92, "score": 56330.79938821119 }, { "content": "#[bench]\n\nfn misc_bernoulli_var(b: &mut Bencher) {\n\n let mut rng = StdRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let mut accum = true;\n\n let mut p = 0.18;\n\n for _ in 0..::RAND_BENCH_N {\n\n let d = rand::distributions::Bernoulli::new(p);\n\n accum ^= rng.sample(d);\n\n p += 0.0001;\n\n }\n\n accum\n\n })\n\n}\n\n\n\nmacro_rules! sample_binomial {\n\n ($name:ident, $n:expr, $p:expr) => {\n\n #[bench]\n\n fn $name(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n let (n, p) = ($n, $p);\n", "file_path": "benches/misc.rs", "rank": 93, "score": 56330.79938821119 }, { "content": "#[bench]\n\nfn gen_1k_iter_repeat(b: &mut Bencher) {\n\n use std::iter;\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let v: Vec<u64> = iter::repeat(()).map(|()| rng.gen()).take(128).collect();\n\n v\n\n });\n\n b.bytes = 1024;\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 94, "score": 56330.79938821119 }, { "content": "#[bench]\n\nfn misc_bernoulli_const(b: &mut Bencher) {\n\n let mut rng = StdRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let d = rand::distributions::Bernoulli::new(0.18);\n\n let mut accum = true;\n\n for _ in 0..::RAND_BENCH_N {\n\n accum ^= rng.sample(d);\n\n }\n\n accum\n\n })\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 95, "score": 56330.79938821119 }, { "content": "#[bench]\n\nfn reseeding_hc128_bytes(b: &mut Bencher) {\n\n let mut rng = ReseedingRng::new(Hc128Core::from_entropy(),\n\n RESEEDING_THRESHOLD,\n\n EntropyRng::new());\n\n let mut buf = [0u8; BYTES_LEN];\n\n b.iter(|| {\n\n for _ in 0..RAND_BENCH_N {\n\n rng.fill_bytes(&mut buf);\n\n black_box(buf);\n\n }\n\n });\n\n b.bytes = BYTES_LEN as u64 * RAND_BENCH_N;\n\n}\n\n\n\nmacro_rules! reseeding_uint {\n\n ($fnn:ident, $ty:ty) => {\n\n #[bench]\n\n fn $fnn(b: &mut Bencher) {\n\n let mut rng = ReseedingRng::new(Hc128Core::from_entropy(),\n\n RESEEDING_THRESHOLD,\n", "file_path": "benches/generators.rs", "rank": 96, "score": 56330.79938821119 }, { "content": "#[bench]\n\nfn seq_iter_choose_from_1000(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n let x : &mut [usize] = &mut [1; 1000];\n\n for i in 0..1000 {\n\n x[i] = i;\n\n }\n\n b.iter(|| {\n\n let mut s = 0;\n\n for _ in 0..RAND_BENCH_N {\n\n s += x.iter().choose(&mut rng).unwrap();\n\n }\n\n s\n\n });\n\n b.bytes = size_of::<usize>() as u64 * ::RAND_BENCH_N;\n\n}\n\n\n", "file_path": "benches/seq.rs", "rank": 97, "score": 56330.79938821119 }, { "content": "#[bench]\n\nfn seq_slice_choose_1_of_1000(b: &mut Bencher) {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n let x : &mut [usize] = &mut [1; 1000];\n\n for i in 0..1000 {\n\n x[i] = i;\n\n }\n\n b.iter(|| {\n\n let mut s = 0;\n\n for _ in 0..RAND_BENCH_N {\n\n s += x.choose(&mut rng).unwrap();\n\n }\n\n s\n\n });\n\n b.bytes = size_of::<usize>() as u64 * ::RAND_BENCH_N;\n\n}\n\n\n\nmacro_rules! seq_slice_choose_multiple {\n\n ($name:ident, $amount:expr, $length:expr) => {\n\n #[bench]\n\n fn $name(b: &mut Bencher) {\n", "file_path": "benches/seq.rs", "rank": 98, "score": 56330.79938821119 }, { "content": "#[bench]\n\nfn gen_1k_sample_iter(b: &mut Bencher) {\n\n use rand::distributions::{Distribution, Standard};\n\n let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();\n\n b.iter(|| {\n\n let v: Vec<u64> = Standard.sample_iter(&mut rng).take(128).collect();\n\n v\n\n });\n\n b.bytes = 1024;\n\n}\n\n\n", "file_path": "benches/misc.rs", "rank": 99, "score": 56330.79938821119 } ]
Rust
src/validator/lockbox.rs
Cognoscan/fog_pack
7b3af246faa851bfc2aa09cc186ff2332124e791
use super::*; use crate::element::*; use crate::error::{Error, Result}; use serde::{Deserialize, Serialize}; #[inline] fn is_false(v: &bool) -> bool { !v } #[inline] fn u32_is_zero(v: &u32) -> bool { *v == 0 } #[inline] fn u32_is_max(v: &u32) -> bool { *v == u32::MAX } macro_rules! lockbox_validator { ($t: ty, $e: ident, $v: ident, $link:expr, $name:expr) => { #[doc = "Validator for a [`"] #[doc = $name] #[doc = "`]["] #[doc = $link] #[doc = "].\n\n"] #[doc = "This validator will only pass a "] #[doc = $name] #[doc = " value. Validation passes if:\n\n"] #[doc = "- The number of bytes in the lockbox is less than or equal to `max_len`\n"] #[doc = "- The number of bytes in the lockbox is greater than or equal to `min_len`\n"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(deny_unknown_fields, default)] pub struct $v { #[serde(skip_serializing_if = "String::is_empty")] pub comment: String, #[serde(skip_serializing_if = "u32_is_max")] pub max_len: u32, #[serde(skip_serializing_if = "u32_is_zero")] pub min_len: u32, #[serde(skip_serializing_if = "is_false")] pub size: bool, } impl std::default::Default for $v { fn default() -> Self { Self { comment: String::new(), max_len: u32::MAX, min_len: u32::MIN, size: false, } } } impl $v { pub fn new() -> Self { Self::default() } pub fn comment(mut self, comment: impl Into<String>) -> Self { self.comment = comment.into(); self } pub fn max_len(mut self, max_len: u32) -> Self { self.max_len = max_len; self } pub fn min_len(mut self, min_len: u32) -> Self { self.min_len = min_len; self } pub fn size(mut self, size: bool) -> Self { self.size = size; self } pub fn build(self) -> Validator { Validator::$e(self) } pub(crate) fn validate(&self, parser: &mut Parser) -> Result<()> { let elem = parser .next() .ok_or_else(|| Error::FailValidate(concat!("Expected a ",$name).to_string()))??; let elem = if let Element::$e(v) = elem { v } else { return Err(Error::FailValidate(format!( concat!("Expected ", $name, ", got {}"), elem.name() ))); }; let len = elem.as_bytes().len() as u32; if len > self.max_len { return Err(Error::FailValidate( concat!($name, " is longer than max_len").to_string() )); } if len < self.min_len { return Err(Error::FailValidate( concat!($name, " is shorter than min_len").to_string() )); } Ok(()) } fn query_check_self(&self, other: &Self) -> bool { self.size || (u32_is_max(&other.max_len) && u32_is_zero(&other.min_len)) } pub(crate) fn query_check(&self, other: &Validator) -> bool { match other { Validator::$e(other) => self.query_check_self(other), Validator::Multi(list) => list.iter().all(|other| match other { Validator::$e(other) => self.query_check_self(other), _ => false, }), Validator::Any => true, _ => false, } } } }; ($t: ty, $e: ident, $v: ident) => { lockbox_validator!($t, $e, $v, concat!("fog_crypto::lockbox::", stringify!($t)), stringify!($t)); } } lockbox_validator!(DataLockbox, DataLockbox, DataLockboxValidator); lockbox_validator!(IdentityLockbox, IdentityLockbox, IdentityLockboxValidator); lockbox_validator!(StreamLockbox, StreamLockbox, StreamLockboxValidator); lockbox_validator!(LockLockbox, LockLockbox, LockLockboxValidator);
use super::*; use crate::element::*; use crate::error::{Error, Result}; use serde::{Deserialize, Serialize}; #[inline] fn is_false(v: &bool) -> bool { !v } #[inline] fn u32_is_zero(v: &u32) -> bool { *v == 0 } #[inline] fn u32_is_max(v: &u32) -> bool { *v == u32::MAX } macro_rules! lockbox_validator { ($t: ty, $e: ident, $v: ident, $link:expr, $name:expr) => { #[doc = "Validator for a [`"] #[doc = $name] #[doc = "`]["] #[doc = $link] #[doc = "].\n\n"] #[doc = "This validator will only pass a "] #[doc = $name] #[doc = " value. Validation passes if:\n\n"] #[doc = "- The number of bytes in the lockbox is less than or equal to `max_len`\n"] #[doc = "- The number of bytes in the lockbox is greater than or equal to `min_len`\n"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(deny_unknown_fields, default)] pub struct $v { #[serde(skip_serializing_if = "String::is_empty")] pub comment: String, #[serde(skip_serializing_if = "u32_is_max")] pub max_len: u32, #[serde(skip_serializing_if = "u32_is_zero")] pub min_len: u32, #[serde(skip_serializing_if = "is_false")] pub size: bool, } impl std::default::Default for $v { fn default() -> Self { Self { comment: String::new(), max_len: u32::MAX, min_len: u32::MIN, size: false, } } } impl $v { pub fn new() -> Self { Self::default() } pub fn comment(mut self, comment: impl Into<String>) -> Self { self.comment = comment.into(); self } pub fn max_len(mut self, max_len: u32) -> Self { self.max_len = max_len; self } pub fn min_len(mut self, min_len: u32) -> Self { self.min_len = min_len; self } pub fn size(mut self, size: bool) -> Self { self.size = size; self } pub fn build(self) -> Validator { Validator::$e(self) } pub(crate) fn validate(&self, parser: &mut Parser) -> Result<()> { let elem = parser .next() .ok_or_else(|| Error::FailValidate(concat!("Expected a ",$name).to_string()))??; let elem = if let Element::$e(v) = elem { v } else { return Err(Error::FailValidate(format!( concat!("Expected ", $name, ", got {}"), elem.name() ))); }; let len = elem.as_bytes().len() as u32; if len > self.max_len { return Err(Error::FailValidate( concat!($name, " is longer than max_len").to_string() )); } if len < self.min_len { return Err(Error::FailValidate( concat!($name, " is shorter than min_len").to_string() )); } Ok(()) } fn query_check_self(&self, other: &Self) -> bool { self.size || (u32_is_max(&other.max_le
r().all(|other| match other { Validator::$e(other) => self.query_check_self(other), _ => false, }), Validator::Any => true, _ => false, } } } }; ($t: ty, $e: ident, $v: ident) => { lockbox_validator!($t, $e, $v, concat!("fog_crypto::lockbox::", stringify!($t)), stringify!($t)); } } lockbox_validator!(DataLockbox, DataLockbox, DataLockboxValidator); lockbox_validator!(IdentityLockbox, IdentityLockbox, IdentityLockboxValidator); lockbox_validator!(StreamLockbox, StreamLockbox, StreamLockboxValidator); lockbox_validator!(LockLockbox, LockLockbox, LockLockboxValidator);
n) && u32_is_zero(&other.min_len)) } pub(crate) fn query_check(&self, other: &Validator) -> bool { match other { Validator::$e(other) => self.query_check_self(other), Validator::Multi(list) => list.ite
random
[ { "content": "/// Read raw lockbox data out from a buffer\n\npub fn read_raw_lockbox(buf: &mut &[u8], len: usize) -> crate::Result<Lockbox> {\n\n Ok(Lockbox::decode(len, buf)?)\n\n}\n\n\n\n\n", "file_path": "old/decode.rs", "rank": 2, "score": 338446.3248757994 }, { "content": "/// General function for reading a field-value map from a buffer. Checks to make \n\n/// sure the keys are unique, valid UTF-8 Strings in lexicographic order.\n\npub fn read_to_map(buf: &mut &[u8], len: usize) -> crate::Result<BTreeMap<String, Value>> {\n\n\n\n let mut map: BTreeMap<String,Value> = BTreeMap::new();\n\n object_iterate(buf, len, |field, buf| {\n\n let val = read_value(buf)?;\n\n map.insert(field.to_string(), val);\n\n Ok(())\n\n })?;\n\n Ok(map)\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 3, "score": 329550.7416311451 }, { "content": "fn read_any(parser: &mut Parser) -> Result<()> {\n\n fn get_elem<'a>(parser: &mut Parser<'a>) -> Result<Element<'a>> {\n\n parser\n\n .next()\n\n .ok_or_else(|| Error::FailValidate(\"expected another value\".to_string()))?\n\n }\n\n let elem = get_elem(parser)?;\n\n match elem {\n\n Element::Map(len) => {\n\n let mut last_key = None;\n\n for _ in 0..len {\n\n if let Element::Str(key) = get_elem(parser)? {\n\n if let Some(last_key) = last_key {\n\n if key <= last_key {\n\n return Err(Error::FailValidate(format!(\n\n \"map keys are unordered: {} follows {}\",\n\n key, last_key\n\n )));\n\n }\n\n }\n", "file_path": "src/validator/mod.rs", "rank": 4, "score": 319454.287469927 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/map.rs", "rank": 5, "score": 317861.3906274191 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/str.rs", "rank": 6, "score": 317861.3906274191 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n", "file_path": "src/validator/str.rs", "rank": 7, "score": 317861.39062741917 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n\n/// Validator for byte sequences.\n\n///\n\n/// This validator type will only pass binary values (a sequence of bytes). A binary sequence can\n\n/// also be treated as a little-endian arbitrary-length unsigned integer. Validation passes if:\n\n///\n\n/// - The bits set in `bits_clr` are cleared in the byte sequence.\n\n/// - The bits set in `bits_set` are set in the byte sequence.\n\n/// - If `max` has 1 or more bytes, the value is less than the maximum in `max`, or equal to it if\n\n/// `ex_max` is not set to true.\n\n/// - The value is greater than the minimum in `min`, or equal to it if `ex_min` is not set to true.\n\n/// - The value's length in bytes is less than or equal to the value in `max_len`.\n\n/// - The value's length in bytes is greater than or equal to the value in `min_len`.\n\n/// - If the `in` list is not empty, the value must be among the values in the list.\n\n/// - The value must not be among the values in the `nin` list.\n\n///\n\n/// # Defaults\n", "file_path": "src/validator/bin.rs", "rank": 8, "score": 317861.39062741917 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n\n/// Validator for arrays.\n\n///\n\n/// This validator type will only pass array values. Validation passes if:\n\n///\n\n/// - If the `in` list is not empty, the array must be among the arrays in the list.\n\n/// - The array must not be among the arrays in the `nin` list.\n\n/// - The arrays's length is less than or equal to the value in `max_len`.\n\n/// - The arrays's length is greater than or equal to the value in `min_len`.\n\n/// - If `unique` is true, the array items are all unique.\n\n/// - For each validator in the `contains` list, at least one item in the array passes.\n\n/// - Each item in the array is checked with a validator at the same index in the `prefix` array.\n\n/// All validators must pass. If there is no validator at the same index, the validator in\n\n/// `items` must pass. If a validator is not used, it passes automatially.\n\n///\n\n/// # Defaults\n\n///\n", "file_path": "src/validator/array.rs", "rank": 9, "score": 317861.3906274191 }, { "content": "#[inline]\n\nfn u32_is_max(v: &u32) -> bool {\n\n *v == u32::MAX\n\n}\n\n\n", "file_path": "src/validator/map.rs", "rank": 10, "score": 317861.3906274191 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/array.rs", "rank": 11, "score": 317861.3906274191 }, { "content": "#[inline]\n\nfn u32_is_zero(v: &u32) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/bin.rs", "rank": 12, "score": 317861.39062741917 }, { "content": "/// Attempt to copy a string from a fogpack data structure. Fails if string wasn't present/valid.\n\npub fn read_string(buf: &mut &[u8]) -> crate::Result<String> {\n\n Ok(read_str(buf)?.to_string())\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 13, "score": 306269.0547389144 }, { "content": "/// Attempt to read a `Lockbox`.\n\npub fn read_lockbox(buf: &mut &[u8]) -> crate::Result<Lockbox> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Lockbox(len) = marker {\n\n read_raw_lockbox(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected Lockbox\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 14, "score": 306210.62344122294 }, { "content": "/// Attempt to read a u32 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a u32.\n\npub fn read_u32(buf: &mut &[u8]) -> crate::Result<u32> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_u64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value was negative\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as u32\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 15, "score": 306170.9296886973 }, { "content": "pub fn read_bool(buf: &mut &[u8]) -> crate::Result<bool> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Boolean(v) = marker {\n\n Ok(v)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected boolean\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 16, "score": 305241.903862121 }, { "content": "/// Read raw Identity out from a buffer\n\npub fn read_raw_id(buf: &mut &[u8], len: usize) -> crate::Result<Identity> {\n\n let fail_len = buf.len();\n\n let id = Identity::decode(buf)?;\n\n if id.size() != len {\n\n Err(Error::BadEncode(fail_len, \"Identity type has invalid size\"))\n\n }\n\n else {\n\n Ok(id)\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 17, "score": 300516.91932060634 }, { "content": "// Can fail due to bad public key\n\npub fn lockbox_from_identity(id: &FullIdentity, mut message: Vec<u8>) -> Result<(Lockbox, FullStreamKey),CryptoError> {\n\n let version = id.get_version();\n\n if version != 1 {\n\n memzero(&mut message[..]); // Must assume data is sensitive and zero it out before failing\n\n return Err(CryptoError::UnsupportedVersion);\n\n }\n\n let nonce = Nonce::new();\n\n let mut esk: SecretCryptKey = Default::default();\n\n let mut epk: PublicCryptKey = Default::default();\n\n crypt_keypair(&mut epk, &mut esk);\n\n let k = id.calc_stream_key(&esk)?;\n\n let k = FullStreamKey::from_secret(k);\n\n let type_id = LockType::Identity((id.get_id(),epk));\n\n\n\n message.reserve_exact(Tag::len()); // Need exactly enough to append the tag\n\n let tag = aead_encrypt(&mut message[..], &[], &nonce, &k.get_key());\n\n message.extend_from_slice(&tag.0);\n\n Ok((Lockbox {\n\n version,\n\n type_id,\n\n nonce,\n\n ciphertext: message\n\n }, k))\n\n}\n\n\n", "file_path": "old/crypto/lockbox.rs", "rank": 18, "score": 298120.33772077085 }, { "content": "/// Attempt to read an object as `Value`.\n\npub fn read_object(buf: &mut &[u8]) -> crate::Result<BTreeMap<String, Value>> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Object(len) = marker {\n\n read_to_map(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected object\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 19, "score": 295244.04549259366 }, { "content": "pub fn get_raw_array(raw: &mut &[u8], len: usize) -> crate::Result<Box<[u8]>> {\n\n let start: &[u8] = raw;\n\n for _ in 0..len {\n\n verify_value(raw)?;\n\n }\n\n let (array, _) = start.split_at(start.len()-raw.len());\n\n Ok(array.to_vec().into_boxed_slice())\n\n}\n\n\n\n\n\n\n", "file_path": "old/validator/array.rs", "rank": 20, "score": 284889.0288824026 }, { "content": "/// Serialize an element onto a byte vector. Doesn't check if Array & Map structures make\n\n/// sense, just writes elements out.\n\npub fn serialize_elem(buf: &mut Vec<u8>, elem: Element) {\n\n use self::Element::*;\n\n match elem {\n\n Null => buf.push(Marker::Null.into()),\n\n Bool(v) => buf.push(if v { Marker::True } else { Marker::False }.into()),\n\n Int(v) => match integer::get_int_internal(&v) {\n\n integer::IntPriv::PosInt(v) => {\n\n if v <= 127 {\n\n buf.push(Marker::PosFixInt(v as u8).into());\n\n } else if v <= u8::MAX as u64 {\n\n buf.push(Marker::UInt8.into());\n\n buf.push(v as u8);\n\n } else if v <= u16::MAX as u64 {\n\n buf.push(Marker::UInt16.into());\n\n buf.extend_from_slice(&(v as u16).to_le_bytes());\n\n } else if v <= u32::MAX as u64 {\n\n buf.push(Marker::UInt32.into());\n\n buf.extend_from_slice(&(v as u32).to_le_bytes());\n\n } else {\n\n buf.push(Marker::UInt64.into());\n", "file_path": "src/element.rs", "rank": 21, "score": 281028.7126045275 }, { "content": "/// Read a negative integer straight out of the stream. The size of the integer should be known from the \n\n/// fogpack marker that was used. If the marker contained the integer, it should be included as `v`.\n\npub fn read_neg_int(buf: &mut &[u8], len: usize, v: i8) -> crate::Result<Integer> {\n\n let fail_len = buf.len();\n\n match len {\n\n 0 => Ok(v.into()),\n\n 1 => {\n\n let v = buf.read_i8()?;\n\n if v < -32 {\n\n Ok(v.into())\n\n }\n\n else if v >= 0 {\n\n Err(not_negative(fail_len))\n\n }\n\n else {\n\n Err(not_shortest(fail_len))\n\n }\n\n },\n\n 2 => {\n\n let v = buf.read_i16::<BigEndian>()?;\n\n if v < (std::i8::MIN as i16) {\n\n Ok(v.into())\n", "file_path": "old/decode.rs", "rank": 22, "score": 280092.46960965265 }, { "content": "/// Read a positive integer straight out of the stream. The size of the integer should be known from the \n\n/// fogpack marker that was used. If the marker contained the integer, it should be included as `v`.\n\npub fn read_pos_int(buf: &mut &[u8], len: usize, v: u8) -> crate::Result<Integer> {\n\n let fail_len = buf.len();\n\n match len {\n\n 0 => Ok(v.into()),\n\n 1 => {\n\n let v = buf.read_u8()?;\n\n if v > 127 {\n\n Ok(v.into())\n\n }\n\n else {\n\n Err(not_shortest(fail_len))\n\n }\n\n },\n\n 2 => {\n\n let v = buf.read_u16::<BigEndian>()?;\n\n if v > (std::u8::MAX as u16) {\n\n Ok(v.into())\n\n }\n\n else {\n\n Err(not_shortest(fail_len))\n", "file_path": "old/decode.rs", "rank": 23, "score": 280092.46960965265 }, { "content": "/// Decode a MessagePack value. Decoding will fail if the value isn't in \n\n/// condense-db canonical form. That is:\n\n/// - All types are encoded in as few bytes as possible\n\n/// - Positive integers are always encoded using UInt types\n\n/// - Map types always have unique strings as keys\n\n/// - Maps are ordered lexicographically\n\n/// - Strings are valid UTF-8\n\npub fn read_value(buf: &mut &[u8]) -> crate::Result<Value> {\n\n let marker = read_marker(buf)?;\n\n Ok(match marker {\n\n MarkerType::Null => Value::Null,\n\n MarkerType::Boolean(v) => Value::Boolean(v),\n\n MarkerType::NegInt((len, v)) => Value::Integer(read_neg_int(buf, len, v)?),\n\n MarkerType::PosInt((len, v)) => Value::Integer(read_pos_int(buf, len, v)?),\n\n MarkerType::String(len) => Value::String(read_raw_str(buf, len)?.to_string()),\n\n MarkerType::F32 => Value::F32(buf.read_f32::<BigEndian>()?),\n\n MarkerType::F64 => Value::F64(buf.read_f64::<BigEndian>()?),\n\n MarkerType::Binary(len) => Value::Binary(read_raw_bin(buf, len)?.to_vec()),\n\n MarkerType::Array(len) => {\n\n let mut v = Vec::with_capacity(len);\n\n for _i in 0..len {\n\n v.push(read_value(buf)?);\n\n }\n\n Value::Array(v)\n\n },\n\n MarkerType::Object(len) => Value::Object(read_to_map(buf, len)?),\n\n MarkerType::Hash(len) => Value::Hash(read_raw_hash(buf, len)?),\n\n MarkerType::Identity(len) => Value::Identity(read_raw_id(buf, len)?),\n\n MarkerType::Lockbox(len) => Value::Lockbox(read_raw_lockbox(buf, len)?),\n\n MarkerType::Timestamp(len) => Value::Timestamp(read_raw_time(buf, len)?),\n\n })\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 24, "score": 278676.6268154724 }, { "content": "/// General function for verifying a field-value map in a buffer. Makes sure the keys are unique, \n\n/// valid UTF-8 Strings in lexicographic order.\n\npub fn verify_map(buf: &mut &[u8], len: usize) -> crate::Result<usize> {\n\n let length = buf.len();\n\n object_iterate(buf, len, |_, buf| { verify_value(buf)?; Ok(()) })?;\n\n Ok(length - buf.len())\n\n}\n\n\n\n\n", "file_path": "old/decode.rs", "rank": 25, "score": 274168.26513151644 }, { "content": "/// Consume the lockbox and spit out the decrypted data\n\npub fn decrypt_lockbox(k: &FullStreamKey, mut lock: Lockbox) -> Result<Vec<u8>, CryptoError> {\n\n let m_len = lock.ciphertext.len() - Tag::len();\n\n let success = {\n\n let (mut message, tag) = lock.ciphertext.split_at_mut(m_len);\n\n aead_decrypt(\n\n &mut message,\n\n &[],\n\n &tag,\n\n &lock.nonce,\n\n &k.get_key()\n\n )\n\n };\n\n if success {\n\n lock.ciphertext.truncate(m_len);\n\n Ok(lock.ciphertext) // Value is moved, so plaintext is only in the Result\n\n }\n\n else {\n\n Err(CryptoError::DecryptFailed)\n\n }\n\n}\n", "file_path": "old/crypto/lockbox.rs", "rank": 26, "score": 273398.465466682 }, { "content": "pub fn lockbox_from_stream(k: &FullStreamKey, mut message: Vec<u8>) -> Result<Lockbox, CryptoError> {\n\n let version = k.get_version();\n\n if version != 1 { \n\n memzero(&mut message[..]); // Must assume data is sensitive and zero it out before failing\n\n return Err(CryptoError::UnsupportedVersion);\n\n }\n\n let nonce = Nonce::new();\n\n let raw_key = k.get_key();\n\n let type_id = LockType::Stream(k.get_id());\n\n\n\n message.reserve_exact(Tag::len()); // Need exactly enough to append the tag\n\n let tag = aead_encrypt(&mut message[..], &[], &nonce, &raw_key);\n\n message.extend_from_slice(&tag.0);\n\n Ok(Lockbox {\n\n version,\n\n type_id,\n\n nonce,\n\n ciphertext: message\n\n })\n\n}\n\n\n", "file_path": "old/crypto/lockbox.rs", "rank": 27, "score": 273393.10177484795 }, { "content": "/// Hashes a password according to a given PasswordConfig, returning a usable SecretKey.\n\n/// The password string is zeroed out no matter what.\n\npub fn password_to_key(mut password: String, config: &PasswordConfig) -> Result<SecretKey, ()> {\n\n let mut key: SecretKey = Default::default();\n\n let result = unsafe {\n\n libsodium_sys::crypto_pwhash(\n\n key.0.as_mut_ptr(),\n\n key.0.len() as c_ulonglong,\n\n password.as_ptr() as *const _,\n\n password.len() as c_ulonglong,\n\n config.salt.as_ptr(),\n\n config.ops_limit as c_ulonglong,\n\n config.mem_limit,\n\n config.alg\n\n )\n\n };\n\n unsafe { \n\n let mut pw = password.as_bytes_mut();\n\n memzero(&mut pw);\n\n };\n\n if result >= 0 {\n\n Ok(key)\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 28, "score": 269877.6708918759 }, { "content": "/// Read raw Hash out from a buffer\n\npub fn read_raw_hash(buf: &mut &[u8], len: usize) -> crate::Result<Hash> {\n\n let fail_len = buf.len();\n\n let hash = Hash::decode(buf)?;\n\n if hash.size() != len {\n\n Err(Error::BadEncode(fail_len, \"Hash type has invalid size\"))\n\n }\n\n else {\n\n Ok(hash)\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 29, "score": 269576.6790281087 }, { "content": "/// Read raw Timestamp out from a buffer\n\npub fn read_raw_time(buf: &mut &[u8], len: usize) -> crate::Result<Timestamp> {\n\n let fail_len = buf.len();\n\n match len {\n\n 4 => {\n\n let sec = buf.read_u32::<BigEndian>()?;\n\n Ok(Timestamp::from_sec(sec as i64))\n\n },\n\n 8 => {\n\n let raw_time = buf.read_u64::<BigEndian>()?;\n\n let sec = (raw_time & 0x0003_FFFF_FFFFu64) as i64;\n\n let nano = (raw_time >> 34) as u32;\n\n Ok(Timestamp::from_raw(sec,nano).ok_or(Error::BadEncode(fail_len, \"Timestamp nanoseconds is too big\"))?)\n\n },\n\n 12 => {\n\n let nano = buf.read_u32::<BigEndian>()?;\n\n let sec = buf.read_i64::<BigEndian>()?;\n\n Ok(Timestamp::from_raw(sec,nano).ok_or(Error::BadEncode(fail_len, \"Timestamp nanoseconds is too big\"))?)\n\n },\n\n _ => Err(Error::BadEncode(fail_len, \"Timestamp type has invalid size\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 30, "score": 269576.6790281087 }, { "content": "/// Step through every field/value pair in an object\n\npub fn object_iterate<'a, F>(buf: &mut &'a [u8], len: usize, mut f: F) -> crate::Result<()>\n\n where F: FnMut(&'a str, &mut &'a [u8]) -> crate::Result<()>\n\n{\n\n if len == 0 { return Ok(()); }\n\n let mut old_field = read_str(buf)?;\n\n f(old_field, buf)?;\n\n let mut field: &str;\n\n for _ in 1..len {\n\n let fail_len = buf.len();\n\n field = read_str(buf)?;\n\n match old_field.cmp(&field) {\n\n Ordering::Less => {\n\n // old_field is lower in order. This is correct\n\n f(field, buf)?;\n\n },\n\n Ordering::Equal => {\n\n return Err(Error::BadEncode(fail_len, \"Object has non-unique field\"));\n\n },\n\n Ordering::Greater => {\n\n return Err(Error::BadEncode(fail_len, \"Object fields not in lexicographic order\"));\n\n },\n\n }\n\n old_field = field;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 31, "score": 268914.492678285 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n\n/// Validator for a cryptographic [`Identity`][crate::Identity].\n\n///\n\n/// This validator will only pass an Identity value. Validation passes if:\n\n///\n\n/// - If the `in` list is not empty, the Identity must be among the ones in the list.\n\n/// - The Identity must not be among the ones in the `nin` list.\n\n///\n\n/// # Defaults\n\n///\n\n/// Fields that aren't specified for the validator use their defaults instead. The defaults for\n\n/// each field are:\n\n///\n\n/// - comment: \"\"\n\n/// - in_list: empty\n\n/// - nin_list: empty\n\n/// - query: false\n", "file_path": "src/validator/identity.rs", "rank": 32, "score": 267069.66329855897 }, { "content": "/// Converts a mutable slice of bytes to a mutable string slice. Works exactly like \n\n/// `std::str::from_utf8_mut` except that it counts the number of unicode code points.\n\npub fn from_utf8_mut(v: &mut [u8]) -> Result<(usize, &mut str), Utf8Error> {\n\n let count = run_utf8_validation(v)?;\n\n Ok((count, unsafe { str::from_utf8_unchecked_mut(v) }))\n\n}\n\n\n\n// use truncation to fit u64 into usize\n\nconst NONASCII_MASK: usize = 0x80808080_80808080u64 as usize;\n\n\n\n/// Returns `true` if any byte in the word `x` is nonascii (>= 128).\n", "file_path": "old/str_char.rs", "rank": 34, "score": 266511.8704244697 }, { "content": "/// Attempt to read an `Identity`.\n\npub fn read_id(buf: &mut &[u8]) -> crate::Result<Identity> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Identity(len) = marker {\n\n read_raw_id(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected Identity\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 35, "score": 266381.85446655325 }, { "content": "/// Verify a MessagePack value and return the number of bytes in it. Fails if the value isn't in \n\n/// condense-db canonical form. That is:\n\n/// - All types are encoded in as few bytes as possible\n\n/// - Positive integers are always encoded using UInt types\n\n/// - Map types always have unique strings as keys\n\n/// - Maps are ordered lexicographically\n\n/// - Strings are valid UTF-8\n\npub fn verify_value(buf: &mut &[u8]) -> crate::Result<usize> {\n\n let length = buf.len();\n\n let marker = read_marker(buf)?;\n\n match marker {\n\n MarkerType::NegInt((len, v)) => { read_neg_int(buf, len, v)?; },\n\n MarkerType::PosInt((len, v)) => { read_pos_int(buf, len, v)?; },\n\n MarkerType::String(len) => { read_raw_str(buf, len)?; },\n\n MarkerType::F32 => { buf.read_f32::<BigEndian>()?; },\n\n MarkerType::F64 => { buf.read_f64::<BigEndian>()?; },\n\n MarkerType::Binary(len) => { read_raw_bin(buf, len)?; },\n\n MarkerType::Array(len) => {\n\n for _i in 0..len {\n\n verify_value(buf)?;\n\n }\n\n },\n\n MarkerType::Object(len) => { verify_map(buf, len)?; },\n\n MarkerType::Hash(len) => { read_raw_hash(buf, len)?; },\n\n MarkerType::Identity(len) => { read_raw_id(buf, len)?; },\n\n MarkerType::Lockbox(len) => { read_raw_lockbox(buf, len)?; },\n\n MarkerType::Timestamp(len) => { read_raw_time(buf, len)?; },\n\n _ => (),\n\n }\n\n Ok(length - buf.len())\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 36, "score": 266063.7400988502 }, { "content": "/// Decode a MessagePack value without copying binary data or strings. Decoding will fail if the \n\n/// value isn't in condense-db canonical form. That is:\n\n/// - All types are encoded in as few bytes as possible\n\n/// - Positive integers are always encoded using UInt types\n\n/// - Map types always have unique strings as keys\n\n/// - Maps are ordered lexicographically\n\n/// - Strings are valid UTF-8\n\npub fn read_value_ref<'a>(buf: &mut &'a [u8]) -> crate::Result<ValueRef<'a>> {\n\n let marker = read_marker(buf)?;\n\n Ok(match marker {\n\n MarkerType::Null => ValueRef::Null,\n\n MarkerType::Boolean(v) => ValueRef::Boolean(v),\n\n MarkerType::NegInt((len, v)) => ValueRef::Integer(read_neg_int(buf, len, v)?),\n\n MarkerType::PosInt((len, v)) => ValueRef::Integer(read_pos_int(buf, len, v)?),\n\n MarkerType::String(len) => ValueRef::String(read_raw_str(buf, len)?),\n\n MarkerType::F32 => ValueRef::F32(buf.read_f32::<BigEndian>()?),\n\n MarkerType::F64 => ValueRef::F64(buf.read_f64::<BigEndian>()?),\n\n MarkerType::Binary(len) => ValueRef::Binary(read_raw_bin(buf, len)?),\n\n MarkerType::Array(len) => {\n\n let mut v = Vec::with_capacity(len);\n\n for _i in 0..len {\n\n v.push(read_value_ref(buf)?);\n\n }\n\n ValueRef::Array(v)\n\n },\n\n MarkerType::Object(len) => ValueRef::Object(read_to_map_ref(buf, len)?),\n\n MarkerType::Hash(len) => ValueRef::Hash(read_raw_hash(buf, len)?),\n\n MarkerType::Identity(len) => ValueRef::Identity(read_raw_id(buf, len)?),\n\n MarkerType::Lockbox(len) => ValueRef::Lockbox(read_raw_lockbox(buf, len)?),\n\n MarkerType::Timestamp(len) => ValueRef::Timestamp(read_raw_time(buf, len)?),\n\n })\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 37, "score": 258405.34234050085 }, { "content": "/// General function for referencing a UTF-8 string in a buffer. Checks for if the \n\n/// length is greater than remaining bytes in the buffer, or if the bytes \n\n/// received are not valid UTF-8.\n\npub fn read_raw_str<'a>(buf: &mut &'a [u8], len: usize) -> crate::Result<&'a str> {\n\n let fail_len = buf.len();\n\n if buf.len() >= len {\n\n let (data, rem) = buf.split_at(len);\n\n *buf = rem;\n\n let data = std::str::from_utf8(data)\n\n .map_err(|_| Error::BadEncode(fail_len, \"String wasn't valid UTF-8\"))?;\n\n Ok(data)\n\n }\n\n else {\n\n Err(Error::BadEncode(fail_len, \"String length larger than amount of data\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 38, "score": 258183.3334879334 }, { "content": "/// General function for referencing binary data in a buffer. Checks for if the \n\n/// length is greater than remaining bytes in the buffer.\n\npub fn read_raw_bin<'a>(buf: &mut &'a [u8], len: usize) -> crate::Result<&'a [u8]> {\n\n let fail_len = buf.len();\n\n if buf.len() >= len {\n\n let (data, rem) = buf.split_at(len);\n\n *buf = rem;\n\n Ok(data)\n\n }\n\n else {\n\n Err(Error::BadEncode(fail_len, \"Binary length larger than amount of data\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 39, "score": 258173.42853470577 }, { "content": "/// Attempt to read an array as `Value`.\n\npub fn read_array(buf: &mut &[u8]) -> crate::Result<Vec<Value>> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Array(len) = marker {\n\n let mut v = Vec::with_capacity(len);\n\n for _i in 0..len {\n\n v.push(read_value(buf)?);\n\n }\n\n Ok(v)\n\n }\n\n else {\n\n Err(Error::BadEncode(fail_len, \"Expected array\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 40, "score": 257176.4432470037 }, { "content": "/// General function for referencing a field-value map in a buffer. Checks to make \n\n/// sure the keys are unique, valid UTF-8 Strings in lexicographic order.\n\npub fn read_to_map_ref<'a>(buf: &mut &'a [u8], len: usize) -> crate::Result<BTreeMap<&'a str, ValueRef<'a>>> {\n\n let mut map: BTreeMap<&'a str,ValueRef<'a>> = BTreeMap::new();\n\n object_iterate(buf, len, |field, buf| {\n\n let val = read_value_ref(buf)?;\n\n map.insert(field, val);\n\n Ok(())\n\n })?;\n\n Ok(map)\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 41, "score": 256148.48668740946 }, { "content": "/// Attempt to get the schema for a raw document. Fails if the raw byte slice doesn't conform to \n\n/// the right format, or if the hash is invalid.\n\npub fn get_doc_schema(doc: &[u8]) -> Result<Option<Hash>> {\n\n let hash_raw = SplitDoc::split(doc)?.hash_raw;\n\n if hash_raw.is_empty() {\n\n Ok(None)\n\n }\n\n else {\n\n Ok(Some(hash_raw.try_into()?))\n\n }\n\n}\n\n\n\n// Header format:\n\n// 1. Compression Type marker\n\n// 2. If schema is used: one byte indicating length of hash (must be 127 or\n\n// lower), then the schema hash.\n\n// 3. 3-byte length of data\n\n// 4. The data\n\n// 5. The optional signature\n\n//\n\n// If compressed, only the data portion is compressed, and the 3-byte length is updated\n\n// accordingly\n", "file_path": "src/document.rs", "rank": 42, "score": 251219.4804999106 }, { "content": "#[inline]\n\nfn key_validator_is_default(v: &KeyValidator) -> bool {\n\n v.matches.is_none()\n\n && normalize_is_none(&v.normalize)\n\n && u32_is_max(&v.max_len)\n\n && u32_is_zero(&v.min_len)\n\n}\n\n\n\n/// Special validator for the keys in a Map. Used by MapValidator.\n\n///\n\n/// This validator type will only pass UTF-8 strings as map keys. Validation passes if:\n\n///\n\n/// - The number of bytes in the string is less than or equal to `max_len`.\n\n/// - The number of bytes in the string is greater than or equal to `min_len`.\n\n/// - If a regular expression is present in `matches`, the possibly-normalized string must match\n\n/// against the expression.\n\n///\n\n/// The `normalize` field sets any Unicode normalization that should be applied to the string. See\n\n/// [`StrValidator`]'s documentation for details.\n\n///\n\n/// # Defaults\n", "file_path": "src/validator/map.rs", "rank": 43, "score": 249023.9926201953 }, { "content": "#[inline]\n\nfn bytes_empty(v: &ByteBuf) -> bool {\n\n v.is_empty()\n\n}\n\n\n", "file_path": "src/validator/bin.rs", "rank": 44, "score": 248432.51838432683 }, { "content": "// Get an object's bytes, after the leading marker has already been parsed\n\nfn get_obj_raw(raw: &mut &[u8], len: usize) -> crate::Result<Box<[u8]>> {\n\n let start: &[u8] = raw;\n\n verify_map(raw, len)?;\n\n let (obj, _) = start.split_at(start.len()-raw.len());\n\n Ok(obj.to_vec().into_boxed_slice())\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use encode;\n\n use value::Value;\n\n use crypto::Hash;\n\n use timestamp::Timestamp;\n\n use super::*;\n\n\n\n fn read_it(raw: &mut &[u8], is_query: bool) -> (usize, Vec<Validator>) {\n\n let mut types = Vec::new();\n\n types.push(Validator::Invalid);\n\n types.push(Validator::Valid);\n", "file_path": "old/validator/object.rs", "rank": 45, "score": 240962.45160749502 }, { "content": "pub fn read_null(buf: &mut &[u8]) -> crate::Result<()> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Null = marker {\n\n Ok(())\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected null\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 46, "score": 240913.9193239516 }, { "content": "fn write_ext_marker(buf: &mut Vec<u8>, len: u32) {\n\n match len {\n\n 1 => buf.push(Marker::FixExt1.into()),\n\n 2 => buf.push(Marker::FixExt2.into()),\n\n 4 => buf.push(Marker::FixExt4.into()),\n\n 8 => buf.push(Marker::FixExt8.into()),\n\n 16 => buf.push(Marker::FixExt16.into()),\n\n len if len < (std::u8::MAX as u32) => {\n\n buf.push(Marker::Ext8.into());\n\n buf.push(len as u8);\n\n },\n\n len if len < (std::u16::MAX as u32) => {\n\n buf.push(Marker::Ext16.into());\n\n buf.extend_from_slice(&(len as u16).to_be_bytes());\n\n },\n\n len => {\n\n buf.push(Marker::Ext32.into());\n\n buf.extend_from_slice(&(len as u32).to_be_bytes());\n\n },\n\n };\n", "file_path": "old/encode.rs", "rank": 47, "score": 240544.1832238002 }, { "content": "/// Attempt to read an array as `ValueRef`.\n\npub fn read_array_ref<'a>(buf: &mut &'a [u8]) -> crate::Result<Vec<ValueRef<'a>>> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Array(len) = marker {\n\n let mut v = Vec::with_capacity(len);\n\n for _i in 0..len {\n\n v.push(read_value_ref(buf)?);\n\n }\n\n Ok(v)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected array\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 48, "score": 238152.37915309484 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n\n/// Validator for boolean values.\n\n///\n\n/// This validator type will only pass booleans. Validation only passes if the value also\n\n/// meets the `in`/`nin` requirements.\n\n///\n\n/// # Defaults\n\n///\n\n/// Fields that aren't specified for the validator use their defaults instead. The defaults for\n\n/// each field are:\n\n/// - comment: \"\"\n\n/// - in_list: empty\n\n/// - nin_list: empty\n\n/// - query: false\n\n///\n\n#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]\n\n#[serde(deny_unknown_fields, default)]\n", "file_path": "src/validator/bool.rs", "rank": 49, "score": 232639.0909911613 }, { "content": "/// Attempt to read a i16 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a i16.\n\npub fn read_i16(buf: &mut &[u8]) -> crate::Result<i16> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_i64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value bigger than i64 maximum\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as i16\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 50, "score": 232506.6334521514 }, { "content": "/// Attempt to read a F32 from a fogpack data structure. Fails if invalid F32 retrieved.\n\npub fn read_f32(buf: &mut &[u8]) -> crate::Result<f32> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::F32 = marker {\n\n Ok(buf.read_f32::<BigEndian>()?)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected a f32\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 51, "score": 232506.6334521514 }, { "content": "/// Attempt to read a u64 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a u64.\n\npub fn read_u64(buf: &mut &[u8]) -> crate::Result<u64> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n int.as_u64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value was negative\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 52, "score": 232506.6334521514 }, { "content": "/// Attempt to read a i8 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a i8.\n\npub fn read_i8(buf: &mut &[u8]) -> crate::Result<i8> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_i64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value bigger than i64 maximum\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as i8\"))\n\n}\n\n\n\n\n", "file_path": "old/decode.rs", "rank": 53, "score": 232506.6334521514 }, { "content": "/// Attempt to read a `Timestamp`.\n\npub fn read_time(buf: &mut &[u8]) -> crate::Result<Timestamp> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Timestamp(len) = marker {\n\n read_raw_time(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected Timestamp\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 54, "score": 232506.6334521514 }, { "content": "/// Attempt to read a F32 from a fogpack data structure. Fails if invalid F64 retrieved.\n\npub fn read_f64(buf: &mut &[u8]) -> crate::Result<f64> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::F64 = marker {\n\n Ok(buf.read_f64::<BigEndian>()?)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected a f64\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 55, "score": 232506.6334521514 }, { "content": "/// Attempt to read a u8 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a u8.\n\npub fn read_u8(buf: &mut &[u8]) -> crate::Result<u8> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_u64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value was negative\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as u8\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 56, "score": 232506.6334521514 }, { "content": "/// Attempt to read a i32 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a i32.\n\npub fn read_i32(buf: &mut &[u8]) -> crate::Result<i32> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_i64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value bigger than i64 maximum\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as i32\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 57, "score": 232506.6334521514 }, { "content": "/// Attempt to read an integer from a fogpack data structure. Fails if an integer wasn't retrieved.\n\npub fn read_integer(buf: &mut &[u8]) -> crate::Result<Integer> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n match marker {\n\n MarkerType::PosInt((len, v)) => read_pos_int(buf, len, v),\n\n MarkerType::NegInt((len, v)) => read_neg_int(buf, len, v),\n\n _ => Err(Error::FailValidate(fail_len, \"Expected Integer\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 58, "score": 232506.6334521514 }, { "content": "/// Attempt to read a `Hash`.\n\npub fn read_hash(buf: &mut &[u8]) -> crate::Result<Hash> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Hash(len) = marker {\n\n read_raw_hash(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected hash\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 59, "score": 232506.6334521514 }, { "content": "/// Attempt to read a i64 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a i64.\n\npub fn read_i64(buf: &mut &[u8]) -> crate::Result<i64> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n int.as_i64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value bigger than i64 maximum\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 60, "score": 232506.6334521514 }, { "content": "/// Attempt to read a u16 from a fogpack data structure. Fails if an integer wasn't retrieved, or if \n\n/// the integer isn't a u16.\n\npub fn read_u16(buf: &mut &[u8]) -> crate::Result<u16> {\n\n let fail_len = buf.len();\n\n let int = read_integer(buf)?;\n\n NumCast::from(int.as_u64()\n\n .ok_or(Error::FailValidate(fail_len, \"Value was negative\"))?)\n\n .ok_or(Error::FailValidate(fail_len, \"Value couldn't be represented as u16\"))\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 61, "score": 232506.6334521514 }, { "content": "fn format_string(val: &str, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n if val.starts_with('<') {\n\n write!(f, \"\\\"<{}\\\"\", val)\n\n }\n\n else {\n\n write!(f, \"\\\"{}\\\"\", val)\n\n }\n\n}\n\n\n", "file_path": "old/value.rs", "rank": 62, "score": 230911.37173096515 }, { "content": "/// Computes the FullStreamKey for a lockbox given the needed FullKey. Does not \n\n/// verify that the correct FullKey was provided, but does check the versions\n\npub fn stream_key_from_lockbox(k: &FullKey, lock: &Lockbox) -> Result<FullStreamKey, CryptoError> {\n\n if k.get_version() != lock.get_version() { return Err(CryptoError::DecryptFailed); }\n\n if let LockType::Identity(ref id) = lock.type_id {\n\n let stream = k.calc_stream_key(&id.1)?;\n\n Ok(FullStreamKey::from_secret(stream))\n\n } else {\n\n Err(CryptoError::DecryptFailed)\n\n }\n\n}\n\n\n", "file_path": "old/crypto/lockbox.rs", "rank": 63, "score": 229123.03898122974 }, { "content": "/// Read a fogpack marker, length, and/or extension type from a buffer.\n\npub fn read_marker(buf: &mut &[u8]) -> crate::Result<MarkerType> {\n\n let fail_len = buf.len();\n\n let marker = Marker::from_u8(buf.read_u8()?);\n\n Ok(match marker {\n\n Marker::PosFixInt(val) => MarkerType::PosInt((0,val)),\n\n Marker::FixMap(len) => MarkerType::Object(len as usize),\n\n Marker::FixStr(len) => MarkerType::String(len as usize),\n\n Marker::FixArray(len) => MarkerType::Array(len as usize),\n\n Marker::Nil => MarkerType::Null,\n\n Marker::False => MarkerType::Boolean(false),\n\n Marker::True => MarkerType::Boolean(true),\n\n Marker::Bin8 => {\n\n let len = buf.read_u8()? as usize;\n\n MarkerType::Binary(len)\n\n },\n\n Marker::Bin16 => {\n\n let len = buf.read_u16::<BigEndian>()? as usize;\n\n if len <= (std::u8::MAX as usize) { return Err(not_shortest(fail_len)); }\n\n MarkerType::Binary(len)\n\n },\n", "file_path": "old/decode.rs", "rank": 64, "score": 228379.4181022218 }, { "content": "pub fn decompress(dctx: &mut DCtx, max_size: usize, extra_size: usize, buf: &[u8], decode: &mut Vec<u8>) -> crate::Result<()> {\n\n // Decompress the data\n\n // Find the expected size, and fail if it's larger than the maximum allowed size.\n\n let decode_len = decode.len();\n\n let expected_len = get_frame_content_size(buf);\n\n // First check if expected_len is above size on its own\n\n if expected_len >= (max_size as u64) {\n\n return Err(Error::BadSize);\n\n }\n\n if (decode_len+extra_size+(expected_len as usize)) >= max_size {\n\n return Err(Error::BadSize);\n\n }\n\n let expected_len = expected_len as usize;\n\n decode.reserve(expected_len);\n\n unsafe {\n\n decode.set_len(decode_len + expected_len);\n\n let len = decompress_dctx(\n\n dctx,\n\n &mut decode[decode_len..],\n\n buf\n\n ).map_err(|_| Error::FailDecompress)?;\n\n decode.set_len(decode_len + len);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "old/zstd_help.rs", "rank": 65, "score": 227906.132051818 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n", "file_path": "src/validator/str.rs", "rank": 66, "score": 226838.34419966541 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n", "file_path": "src/validator/float64.rs", "rank": 67, "score": 226838.34419966541 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n", "file_path": "src/validator/float32.rs", "rank": 68, "score": 226838.34419966541 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n\nconst MIN_TIME: Timestamp = Timestamp::min_value();\n\nconst MAX_TIME: Timestamp = Timestamp::max_value();\n\n\n", "file_path": "src/validator/time.rs", "rank": 69, "score": 226838.34419966541 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n", "file_path": "src/validator/integer.rs", "rank": 70, "score": 226838.34419966541 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n", "file_path": "src/validator/map.rs", "rank": 71, "score": 226838.34419966541 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n", "file_path": "src/validator/bin.rs", "rank": 72, "score": 226838.34419966541 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n", "file_path": "src/validator/hash.rs", "rank": 73, "score": 226838.34419966541 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n", "file_path": "src/validator/array.rs", "rank": 74, "score": 226838.34419966541 }, { "content": "/// Attempt to read binary data to a Vec.\n\npub fn read_vec(buf: &mut &[u8]) -> crate::Result<Vec<u8>> {\n\n Ok(read_bin(buf)?.to_vec())\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 75, "score": 224672.69303287272 }, { "content": "#[inline]\n\nfn validator_is_any(v: &Validator) -> bool {\n\n *v == Validator::Any\n\n}\n\n\n", "file_path": "src/validator/array.rs", "rank": 76, "score": 223756.5465100545 }, { "content": "/// Train a zstd dictionary from a sequence of documents.\n\n///\n\n/// Dictionaries can be limited to a maximum size. On failure, a zstd library error code is \n\n/// returned.\n\n///\n\n/// The zstd documentation recommends around 100 times as many input bytes as the desired \n\n/// dictionary size. It can be useful to check the resulting dictionary for overlearning - just \n\n/// dump the dictionary to a file and look for human-readable strings. These can occur when the \n\n/// dictionary is larger than necessary, and begins encoding the randomized portions of the \n\n/// Documents. In the future, this function may become smarter and get better at eliminating \n\n/// low-probability dictionary items.\n\npub fn train_doc_dict(max_size: usize, docs: Vec<Document>) -> Result<Vec<u8>, usize> {\n\n let samples = docs\n\n .iter()\n\n .map(|doc| {\n\n // We can call unwrap below because all Documents should already have vetted that:\n\n // 1) The raw document contains an object\n\n // 2) The object keys are strings\n\n // 3) The empty string field has a hash as the value\n\n let mut buf: &[u8] = &doc.raw_doc()[4..doc.doc_len()];\n\n let obj_len = decode::read_marker(&mut buf).unwrap();\n\n // Marker is always an object, we're just checking to see if it's empty\n\n if let MarkerType::Object(0) = obj_len {\n\n Vec::from(buf)\n\n }\n\n else {\n\n // Document might contain a schema already. Skip over it.\n\n let mut buf2: &[u8] = buf;\n\n let field = decode::read_str(&mut buf2).unwrap();\n\n if !field.is_empty() {\n\n // Wasn't a schema, use the first parsed field along with everything else\n", "file_path": "old/document.rs", "rank": 77, "score": 223199.7412950271 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n\n/// Validator for a cryptographic [`LockId`][crate::LockId].\n\n///\n\n/// This validator will only pass a LockId value. Validation passes if:\n\n///\n\n/// - If the `in` list is not empty, the LockId must be among the ones in the list.\n\n/// - The LockId must not be among the ones in the `nin` list.\n\n///\n\n/// # Defaults\n\n///\n\n/// Fields that aren't specified for the validator use their defaults instead. The defaults for\n\n/// each field are:\n\n///\n\n/// - comment: \"\"\n\n/// - in_list: empty\n\n/// - nin_list: empty\n\n/// - query: false\n", "file_path": "src/validator/lock_id.rs", "rank": 78, "score": 222770.3185019371 }, { "content": "#[inline]\n\nfn is_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n\n/// Validator for a cryptographic [`StreamId`][crate::StreamId].\n\n///\n\n/// This validator will only pass a StreamId value. Validation passes if:\n\n///\n\n/// - If the `in` list is not empty, the StreamId must be among the ones in the list.\n\n/// - The StreamId must not be among the ones in the `nin` list.\n\n///\n\n/// # Defaults\n\n///\n\n/// Fields that aren't specified for the validator use their defaults instead. The defaults for\n\n/// each field are:\n\n///\n\n/// - comment: \"\"\n\n/// - in_list: empty\n\n/// - nin_list: empty\n\n/// - query: false\n", "file_path": "src/validator/stream_id.rs", "rank": 79, "score": 222770.31850193714 }, { "content": "/// Attempt to read a str from a fogpack data structure. Fails if str wasn't present/valid.\n\npub fn read_str<'a>(buf: &mut &'a [u8]) -> crate::Result<&'a str> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::String(len) = marker {\n\n read_raw_str(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected a string\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 80, "score": 222174.25249045942 }, { "content": "/// Attempt to read binary data.\n\npub fn read_bin<'a>(buf: &mut &'a [u8]) -> crate::Result<&'a [u8]> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Binary(len) = marker {\n\n read_raw_bin(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected binary data\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 81, "score": 222169.4496910114 }, { "content": "/// Attempt to read an object as `ValueRef`.\n\npub fn read_object_ref<'a>(buf: &mut &'a [u8]) -> crate::Result<BTreeMap<&'a str, ValueRef<'a>>> {\n\n let fail_len = buf.len();\n\n let marker = read_marker(buf)?;\n\n if let MarkerType::Object(len) = marker {\n\n read_to_map_ref(buf, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected object\"))\n\n }\n\n}\n\n\n", "file_path": "old/decode.rs", "rank": 82, "score": 220519.59534053947 }, { "content": "/// Write the MessagePack value out to a Vector. This code assumes that all strings, binary data, \n\n/// objects, and arrays are less than 2^32 elements in size.\n\npub fn write_value(buf: &mut Vec<u8>, val: &Value) {\n\n match *val {\n\n\n\n Value::Null => {\n\n buf.push(Marker::Nil.into())\n\n },\n\n\n\n Value::Boolean(val) => {\n\n if val {\n\n buf.push(Marker::True.into())\n\n } else {\n\n buf.push(Marker::False.into())\n\n }\n\n },\n\n\n\n Value::Integer(ref val) => {\n\n match integer::get_int_internal(val) {\n\n integer::IntPriv::PosInt(u) => {\n\n if u <= 127 {\n\n buf.push(Marker::PosFixInt(u as u8).into());\n", "file_path": "old/encode.rs", "rank": 83, "score": 219336.91184665068 }, { "content": "pub fn query_check(s: usize, q: usize, s_types: &[Validator], q_types: &[Validator]) -> bool {\n\n let q_index = q;\n\n let s_index = s;\n\n let s = &s_types[s];\n\n let q = &q_types[q];\n\n\n\n // If other is type multi, verify it against each of these. This logic would otherwise have to \n\n // be in each and every validator.\n\n if let Validator::Multi(q) = q {\n\n q.iter().all(|q| {\n\n query_check(s_index, *q, s_types, q_types)\n\n })\n\n }\n\n else {\n\n match s {\n\n Validator::Invalid => false,\n\n Validator::Valid => false,\n\n Validator::Null => { if let Validator::Null = q { true } else { false } },\n\n Validator::Type(_) => false,\n\n Validator::Boolean(v) => v.query_check(q),\n", "file_path": "old/validator/validator.rs", "rank": 84, "score": 213530.21822036325 }, { "content": "pub fn dict_decompress(dctx: &mut DCtx, dict: &DDict, max_size: usize, extra_size: usize, buf: &[u8], decode: &mut Vec<u8>) -> crate::Result<()> {\n\n // Decompress the data\n\n // Find the expected size, and fail if it's larger than the maximum allowed size.\n\n let decode_len = decode.len();\n\n let expected_len = get_frame_content_size(buf);\n\n if expected_len >= (max_size as u64) {\n\n return Err(Error::BadSize);\n\n }\n\n if (decode_len+extra_size+(expected_len as usize)) >= max_size {\n\n return Err(Error::BadSize);\n\n }\n\n let expected_len = expected_len as usize;\n\n decode.reserve(expected_len);\n\n unsafe {\n\n decode.set_len(decode_len + expected_len);\n\n let len = decompress_using_ddict(\n\n dctx,\n\n &mut decode[decode_len..],\n\n buf,\n\n dict\n\n ).map_err(|_| Error::FailDecompress)?;\n\n decode.set_len(decode_len + len);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "old/zstd_help.rs", "rank": 85, "score": 212935.96964831167 }, { "content": "/// Initializes the underlying crypto library and makes all random number generation functions \n\n/// thread-safe. *Must* be called successfully before using the rest of this library.\n\npub fn init() -> Result<(), ()> {\n\n sodium::init()\n\n}\n\n\n\n/// Contains either the Key, StreamKey or data that was in the Lockbox\n\n#[derive(Debug)]\n\npub enum LockboxContent {\n\n Key(Key),\n\n StreamKey(StreamKey),\n\n Data(Vec<u8>),\n\n}\n\n\n", "file_path": "old/crypto/mod.rs", "rank": 86, "score": 210157.9652455692 }, { "content": "/// Initializes the underlying crypto library and makes all random number generation functions \n\n/// thread-safe.\n\npub fn init() -> Result<(), ()> {\n\n if unsafe { libsodium_sys::sodium_init() } >= 0 {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n const BEFORE_NM_BYTES: usize = libsodium_sys::crypto_box_curve25519xchacha20poly1305_BEFORENMBYTES as usize;\n\n\n\n #[test]\n\n fn correct_sizes() {\n\n assert_eq!(BEFORE_NM_BYTES, SECRET_KEY_BYTES);\n\n }\n\n}\n", "file_path": "old/crypto/sodium.rs", "rank": 87, "score": 210153.65405874912 }, { "content": "#[inline]\n\nfn is_nan(v: &f64) -> bool {\n\n v.is_nan()\n\n}\n\n\n\n/// Validator for 64-bit floating-point values.\n\n///\n\n/// This validator will only pass f64 values. Validation passes if:\n\n///\n\n/// - If `max` is a number, that the value is less than the maximum in `max`, or equal to it if\n\n/// `ex_max` is not set to true.\n\n/// - If `min` is a number, that the value is greater than the minimum in `min`, or equal to it if\n\n/// `ex_min` is not set to true.\n\n/// - If the `in` list is not empty, the value must be among the values in it. This performs an\n\n/// exact bit-wise match.\n\n/// - The value must not be among the values in the `nin` list. This performas an exact bit-wise\n\n/// match.\n\n///\n\n/// # Defaults\n\n///\n\n/// Fields that aren't specified for the validator use their defaults instead. The defaults for\n", "file_path": "src/validator/float64.rs", "rank": 88, "score": 206744.61158527067 }, { "content": "#[inline]\n\nfn is_nan(v: &f32) -> bool {\n\n v.is_nan()\n\n}\n\n\n\n/// Validator for 32-bit floating-point values.\n\n///\n\n/// This validator will only pass f32 values. Validation passes if:\n\n///\n\n/// - If `max` is a number, that the value is less than the maximum in `max`, or equal to it if\n\n/// `ex_max` is not set to true.\n\n/// - If `min` is a number, that the value is greater than the minimum in `min`, or equal to it if\n\n/// `ex_min` is not set to true.\n\n/// - If the `in` list is not empty, the value must be among the values in it. This performs an\n\n/// exact bit-wise match.\n\n/// - The value must not be among the values in the `nin` list. This performas an exact bit-wise\n\n/// match.\n\n///\n\n/// # Defaults\n\n///\n\n/// Fields that aren't specified for the validator use their defaults instead. The defaults for\n", "file_path": "src/validator/float32.rs", "rank": 89, "score": 206744.61158527067 }, { "content": "pub fn blake2b( hash: &mut [u8; HASH_BYTES], data: &[u8] ) {\n\n if data.len() > ::std::u64::MAX as usize {\n\n panic!(\"Data for hasher is somehow larger than maximum u64 value\");\n\n }\n\n // The below will only fail if we set up this function wrong.\n\n unsafe { \n\n libsodium_sys::crypto_generichash_blake2b(\n\n hash.as_mut_ptr(), HASH_BYTES, \n\n data.as_ptr(), data.len() as u64,\n\n ::std::ptr::null(), 0);\n\n }\n\n}\n\n\n", "file_path": "old/crypto/sodium.rs", "rank": 90, "score": 202950.58747353387 }, { "content": "#[inline]\n\nfn int_is_max(v: &Integer) -> bool {\n\n v.as_u64().map(|v| v == u64::MAX).unwrap_or(false)\n\n}\n\n\n", "file_path": "src/validator/integer.rs", "rank": 91, "score": 202467.9425409966 }, { "content": "#[inline]\n\nfn time_is_max(v: &Timestamp) -> bool {\n\n *v == MAX_TIME\n\n}\n\n\n\n/// Validator for timestamps.\n\n///\n\n/// This validator will only pass timestamps. Validation passes if:\n\n///\n\n/// - If the `in` list is not empty, the timestamp must be among the timestamp in the list.\n\n/// - The timestamp must not be among the timestamp in the `nin` list.\n\n/// - The timestamp is less than the maximum in `max`, or equal to it if `ex_max` is not set to true.\n\n/// - The timestamp is greater than the minimum in `min`, or equal to it if `ex_min` is not set to true.\n\n///\n\n/// # Defaults\n\n///\n\n/// Fields that aren't specified for the validator use their defaults instead. The defaults for\n\n/// each field are:\n\n///\n\n/// - comment: \"\"\n\n/// - max: maximum possible timestamp\n", "file_path": "src/validator/time.rs", "rank": 92, "score": 202467.94254099662 }, { "content": "#[inline]\n\nfn int_is_min(v: &Integer) -> bool {\n\n v.as_i64().map(|v| v == i64::MIN).unwrap_or(false)\n\n}\n\n\n\n/// Validator for integer values.\n\n///\n\n/// This validator type will only pass integers. Validation passes if:\n\n///\n\n/// - The bits set in `bits_clr` are cleared in the integer\n\n/// - The bits set in `bits_set` are set in the integer\n\n/// - The integer is less than the maximum in `max`, or equal to it if `ex_max` is not set to true.\n\n/// - The integer is greater than the minimum in `min`, or equal to it if `ex_min` is not set to true.\n\n/// - If the `in` list is not empty, the integer must be among the integers in it.\n\n/// - The integer must not be among the integers in the `nin` list.\n\n///\n\n/// # Defaults\n\n///\n\n/// Fields that aren't specified for the validator use their defaults instead. The defaults for\n\n/// each field are:\n\n///\n", "file_path": "src/validator/integer.rs", "rank": 93, "score": 202467.9425409966 }, { "content": "#[inline]\n\nfn time_is_min(v: &Timestamp) -> bool {\n\n *v == MIN_TIME\n\n}\n\n\n", "file_path": "src/validator/time.rs", "rank": 94, "score": 202467.9425409966 }, { "content": "#[inline]\n\nfn u64_is_zero(v: &u64) -> bool {\n\n *v == 0\n\n}\n\n\n", "file_path": "src/validator/integer.rs", "rank": 95, "score": 202467.9425409966 }, { "content": "#[inline]\n\nfn normalize_is_none(v: &Normalize) -> bool {\n\n matches!(v, Normalize::None)\n\n}\n\n\n\n/// Validator for UTF-8 strings.\n\n///\n\n/// This validator type will only pass string values. Validation passes if:\n\n///\n\n/// - The value's length in bytes is less than or equal to the value in `max_len`.\n\n/// - The value's length in bytes is greater than or equal to the value in `min_len`.\n\n/// - The value's number of unicode characters is less than or equal to the value in `max_char`.\n\n/// - The value's number of unicode characters is greater than or equal to the value in `min_char`.\n\n/// - If a regular expression is present in `matches`, the possibly-normalized value must match\n\n/// against the expression.\n\n/// - If the `in` list is not empty, the possibly-normalized value must be among the values in the list.\n\n/// - The possibly-normalized value must not be among the values in the `nin` list.\n\n///\n\n/// The `normalize` field may be set to `None`, `NFC`, or `NFKC`, corresponding to Unicode\n\n/// normalization forms. When checked for `in`, `nin`, and `matches`, the value is first put\n\n/// into the selected normalization form, and any `in` and `nin` list strings are normalized as\n", "file_path": "src/validator/str.rs", "rank": 96, "score": 202467.9425409966 }, { "content": "#[inline]\n\nfn normalize_is_none(v: &Normalize) -> bool {\n\n matches!(v, Normalize::None)\n\n}\n\n\n", "file_path": "src/validator/map.rs", "rank": 97, "score": 202467.9425409966 }, { "content": "// Get an object's bytes, without the leading marker\n\nfn get_obj(raw: &mut &[u8]) -> crate::Result<Box<[u8]>> {\n\n let fail_len = raw.len();\n\n if let MarkerType::Object(len) = read_marker(raw)? {\n\n get_obj_raw(raw, len)\n\n }\n\n else {\n\n Err(Error::FailValidate(fail_len, \"Expected objects in `in`/`nin` fields\"))\n\n }\n\n}\n\n\n", "file_path": "old/validator/object.rs", "rank": 98, "score": 202155.50281657476 }, { "content": "/// Converts a slice of bytes to a string slice. Works exactly like `std::str::from_utf8` except \n\n/// that it counts the number of unicode code points.\n\npub fn from_utf8(v: &[u8]) -> Result<(usize, &str), Utf8Error> {\n\n let count = run_utf8_validation(v)?;\n\n Ok((count, unsafe { str::from_utf8_unchecked(v) }))\n\n}\n\n\n", "file_path": "old/str_char.rs", "rank": 99, "score": 199707.9394627202 } ]
Rust
ring/src/aead/poly1305.rs
Soptq/phala-blockchain
f2fbd1e62b1b8c2567bfed993ae85a56f227a880
use super::{Tag, TAG_LEN}; use crate::{c, cpu}; pub(super) struct Key { key_and_nonce: [u8; KEY_LEN], cpu_features: cpu::Features, } pub(super) const BLOCK_LEN: usize = 16; pub(super) const KEY_LEN: usize = 2 * BLOCK_LEN; impl Key { #[inline] pub(super) fn new(key_and_nonce: [u8; KEY_LEN], cpu_features: cpu::Features) -> Self { Self { key_and_nonce, cpu_features, } } } pub struct Context { state: poly1305_state, #[allow(dead_code)] cpu_features: cpu::Features, } #[repr(C, align(64))] struct poly1305_state([u8; OPAQUE_LEN]); const OPAQUE_LEN: usize = 512; macro_rules! dispatch { ( $features:expr => ( $f:ident | $neon_f:ident ) ( $( $p:ident : $t:ty ),+ ) ( $( $a:expr ),+ ) ) => { match () { #[cfg(all(target_arch = "arm", not(target_vendor = "apple")))] () if cpu::arm::NEON.available($features) => { extern "C" { fn $neon_f( $( $p : $t ),+ ); } unsafe { $neon_f( $( $a ),+ ) } } () => { extern "C" { fn $f( $( $p : $t ),+ ); } unsafe { $f( $( $a ),+ ) } } } } } impl Context { #[inline] pub(super) fn from_key( Key { key_and_nonce, cpu_features, }: Key, ) -> Self { let mut ctx = Self { state: poly1305_state([0u8; OPAQUE_LEN]), cpu_features, }; dispatch!( cpu_features => (GFp_poly1305_init | GFp_poly1305_init_neon) (statep: &mut poly1305_state, key: &[u8; KEY_LEN]) (&mut ctx.state, &key_and_nonce)); ctx } #[inline(always)] pub fn update(&mut self, input: &[u8]) { dispatch!( self.cpu_features => (GFp_poly1305_update | GFp_poly1305_update_neon) (statep: &mut poly1305_state, input: *const u8, in_len: c::size_t) (&mut self.state, input.as_ptr(), input.len())); } pub(super) fn finish(mut self) -> Tag { let mut tag = Tag([0u8; TAG_LEN]); dispatch!( self.cpu_features => (GFp_poly1305_finish | GFp_poly1305_finish_neon) (statep: &mut poly1305_state, mac: &mut [u8; TAG_LEN]) (&mut self.state, &mut tag.0)); tag } } pub(super) fn sign(key: Key, input: &[u8]) -> Tag { let mut ctx = Context::from_key(key); ctx.update(input); ctx.finish() } #[cfg(test)] mod tests { use super::*; use crate::test; use core::convert::TryInto; #[test] pub fn test_poly1305() { let cpu_features = cpu::features(); test::run(test_file!("poly1305_test.txt"), |section, test_case| { assert_eq!(section, ""); let key = test_case.consume_bytes("Key"); let key: &[u8; KEY_LEN] = key.as_slice().try_into().unwrap(); let input = test_case.consume_bytes("Input"); let expected_mac = test_case.consume_bytes("MAC"); let key = Key::new(*key, cpu_features); let Tag(actual_mac) = sign(key, &input); assert_eq!(expected_mac, actual_mac.as_ref()); Ok(()) }) } }
use super::{Tag, TAG_LEN}; use crate::{c, cpu}; pub(super) struct Key { key_and_nonce: [u8; KEY_LEN], cpu_features: cpu::Features, } pub(super) const BLOCK_LEN: usize = 16; pub(super) const KEY_LEN: usize = 2 * BLOCK_LEN; impl Key { #[inline] pub(super) fn new(key_and_nonce: [u8; KEY_LEN], cpu_features: cpu::Features) -> Self { Self { key_and_nonce, cpu_features, } } } pub struct Context { state: poly1305_state, #[allow(dead_code)] cpu_features: cpu::Features, } #[repr(C, align(64))] struct poly1305_state([u8; OPAQUE_LEN]); const OPAQUE_LEN: usize = 512; macro_rules! dispatch { ( $features:expr => ( $f:ident | $neon_f:ident ) ( $( $p:ident : $t:ty ),+ ) ( $( $a:expr ),+ ) ) => { match () { #[cfg(all(target_arch = "arm", not(target_vendor = "apple")))] () if cpu::arm::NEON.available($features) => { extern "C" { fn $neon_f( $( $p : $t ),+ ); } unsafe { $neon_f( $( $a ),+ ) } } () => { extern "C" { fn $f( $( $p : $t ),+ ); } unsafe { $f( $( $a ),+ ) } } } } } impl Context { #[inline] pub(super) fn from_key( Key { key_and_nonce, cpu_features, }: Key, ) -> Self { let mut ctx = Self { state: poly1305_state([0u8; OPAQUE_LEN]), cpu_features, }; dispatch!( cpu_features => (GFp_poly1305_init | GFp_poly1305_init_neon) (statep: &mut poly1305_state, key: &[u8; KEY_LEN]) (&mut ctx.state, &key_and_nonce)); ctx } #[inline(always)]
pub(super) fn finish(mut self) -> Tag { let mut tag = Tag([0u8; TAG_LEN]); dispatch!( self.cpu_features => (GFp_poly1305_finish | GFp_poly1305_finish_neon) (statep: &mut poly1305_state, mac: &mut [u8; TAG_LEN]) (&mut self.state, &mut tag.0)); tag } } pub(super) fn sign(key: Key, input: &[u8]) -> Tag { let mut ctx = Context::from_key(key); ctx.update(input); ctx.finish() } #[cfg(test)] mod tests { use super::*; use crate::test; use core::convert::TryInto; #[test] pub fn test_poly1305() { let cpu_features = cpu::features(); test::run(test_file!("poly1305_test.txt"), |section, test_case| { assert_eq!(section, ""); let key = test_case.consume_bytes("Key"); let key: &[u8; KEY_LEN] = key.as_slice().try_into().unwrap(); let input = test_case.consume_bytes("Input"); let expected_mac = test_case.consume_bytes("MAC"); let key = Key::new(*key, cpu_features); let Tag(actual_mac) = sign(key, &input); assert_eq!(expected_mac, actual_mac.as_ref()); Ok(()) }) } }
pub fn update(&mut self, input: &[u8]) { dispatch!( self.cpu_features => (GFp_poly1305_update | GFp_poly1305_update_neon) (statep: &mut poly1305_state, input: *const u8, in_len: c::size_t) (&mut self.state, input.as_ptr(), input.len())); }
function_block-full_function
[ { "content": "pub fn shift_partial<F>((in_prefix_len, in_out): (usize, &mut [u8]), transform: F)\n\nwhere\n\n F: FnOnce(&[u8]) -> Block,\n\n{\n\n let (block, in_out_len) = {\n\n let input = &in_out[in_prefix_len..];\n\n let in_out_len = input.len();\n\n if in_out_len == 0 {\n\n return;\n\n }\n\n debug_assert!(in_out_len < BLOCK_LEN);\n\n (transform(input), in_out_len)\n\n };\n\n in_out[..in_out_len].copy_from_slice(&block.as_ref()[..in_out_len]);\n\n}\n", "file_path": "ring/src/aead/shift.rs", "rank": 0, "score": 385522.09376822517 }, { "content": "#[cfg(target_arch = \"x86\")]\n\npub fn shift_full_blocks<F>(in_out: &mut [u8], src: core::ops::RangeFrom<usize>, mut transform: F)\n\nwhere\n\n F: FnMut(&[u8; BLOCK_LEN]) -> Block,\n\n{\n\n use core::convert::TryFrom;\n\n\n\n let in_out_len = in_out[src.clone()].len();\n\n\n\n for i in (0..in_out_len).step_by(BLOCK_LEN) {\n\n let block = {\n\n let input =\n\n <&[u8; BLOCK_LEN]>::try_from(&in_out[(src.start + i)..][..BLOCK_LEN]).unwrap();\n\n transform(input)\n\n };\n\n let output = <&mut [u8; BLOCK_LEN]>::try_from(&mut in_out[i..][..BLOCK_LEN]).unwrap();\n\n *output = *block.as_ref();\n\n }\n\n}\n\n\n", "file_path": "ring/src/aead/shift.rs", "rank": 1, "score": 370165.36740318855 }, { "content": "type GetEntropyFn = unsafe extern \"C\" fn(*mut u8, libc::size_t) -> libc::c_int;\n\n\n", "file_path": "getrandom-sgx/src/macos.rs", "rank": 2, "score": 357529.1393408607 }, { "content": "#[inline]\n\nfn poly1305_update_padded_16(ctx: &mut poly1305::Context, input: &[u8]) {\n\n if input.len() > 0 {\n\n ctx.update(input);\n\n let remainder_len = input.len() % poly1305::BLOCK_LEN;\n\n if remainder_len != 0 {\n\n const ZEROES: [u8; poly1305::BLOCK_LEN] = [0; poly1305::BLOCK_LEN];\n\n ctx.update(&ZEROES[..(poly1305::BLOCK_LEN - remainder_len)])\n\n }\n\n }\n\n}\n\n\n\n// Also used by chacha20_poly1305_openssh.\n\npub(super) fn derive_poly1305_key(chacha_key: &chacha::Key, iv: Iv) -> poly1305::Key {\n\n let mut key_bytes = [0u8; poly1305::KEY_LEN];\n\n chacha_key.encrypt_iv_xor_in_place(iv, &mut key_bytes);\n\n poly1305::Key::new(key_bytes, chacha_key.cpu_features())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn max_input_len_test() {\n\n // Errata 4858 at https://www.rfc-editor.org/errata_search.php?rfc=7539.\n\n assert_eq!(super::CHACHA20_POLY1305.max_input_len, 274_877_906_880u64);\n\n }\n\n}\n", "file_path": "ring/src/aead/chacha20_poly1305.rs", "rank": 3, "score": 343827.7508545805 }, { "content": "#[cfg(target_os = \"illumos\")]\n\ntype GetRandomFn = unsafe extern \"C\" fn(*mut u8, libc::size_t, libc::c_uint) -> libc::ssize_t;\n", "file_path": "getrandom-sgx/src/solaris_illumos.rs", "rank": 4, "score": 332976.42203307635 }, { "content": "#[cfg(target_os = \"solaris\")]\n\ntype GetRandomFn = unsafe extern \"C\" fn(*mut u8, libc::size_t, libc::c_uint) -> libc::c_int;\n\n\n", "file_path": "getrandom-sgx/src/solaris_illumos.rs", "rank": 5, "score": 332976.42203307635 }, { "content": "/// Do a Blake2 128-bit hash and place result in `dest`.\n\npub fn blake2_128_into(data: &[u8], dest: &mut [u8; 16]) {\n\n\tdest.copy_from_slice(blake2_rfc::blake2b::blake2b(16, &[], data).as_bytes());\n\n}\n\n\n", "file_path": "pallets/bridge/src/hashing.rs", "rank": 6, "score": 331281.1607571207 }, { "content": "struct DropGuard<F: FnMut()>(F);\n\n\n\nimpl<F: FnMut()> Drop for DropGuard<F> {\n\n fn drop(&mut self) {\n\n self.0()\n\n }\n\n}\n", "file_path": "getrandom-sgx/src/use_file.rs", "rank": 7, "score": 325581.21302145626 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n let fd = get_rng_fd()?;\n\n let read = |buf: &mut [u8]| unsafe { libc::read(fd, buf.as_mut_ptr() as *mut _, buf.len()) };\n\n\n\n if cfg!(target_os = \"emscripten\") {\n\n // `Crypto.getRandomValues` documents `dest` should be at most 65536 bytes.\n\n for chunk in dest.chunks_mut(65536) {\n\n sys_fill_exact(chunk, read)?;\n\n }\n\n } else {\n\n sys_fill_exact(dest, read)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "getrandom-sgx/src/use_file.rs", "rank": 8, "score": 321800.53063670505 }, { "content": "/// Calculates the HMAC of `data` using the key `key` in one step.\n\n///\n\n/// Use `Context` to calculate HMACs where the input is in multiple parts.\n\n///\n\n/// It is generally not safe to implement HMAC verification by comparing the\n\n/// return value of `sign` to a tag. Use `verify` for verification instead.\n\npub fn sign(key: &Key, data: &[u8]) -> Tag {\n\n let mut ctx = Context::with_key(key);\n\n ctx.update(data);\n\n ctx.sign()\n\n}\n\n\n", "file_path": "ring/src/hmac.rs", "rank": 9, "score": 318389.4550526388 }, { "content": "#[cfg(feature = \"alloc\")]\n\npub fn run<F>(test_file: File, mut f: F)\n\nwhere\n\n F: FnMut(&str, &mut TestCase) -> Result<(), error::Unspecified>,\n\n{\n\n let lines = &mut test_file.contents.lines();\n\n\n\n let mut current_section = String::from(\"\");\n\n let mut failed = false;\n\n\n\n while let Some(mut test_case) = parse_test_case(&mut current_section, lines) {\n\n let result = match f(&current_section, &mut test_case) {\n\n Ok(()) => {\n\n if !test_case\n\n .attributes\n\n .iter()\n\n .any(|&(_, _, consumed)| !consumed)\n\n {\n\n Ok(())\n\n } else {\n\n failed = true;\n", "file_path": "ring/src/test.rs", "rank": 10, "score": 314939.98729463434 }, { "content": "// Decrypts the cipher (with 128 auth tag appended) in-place and returns the message as a slice.\n\npub fn decrypt<'in_out>(iv: &[u8], secret: &[u8], in_out: &'in_out mut [u8]) -> &'in_out mut [u8] {\n\n let mut iv_arr = [0u8; IV_BYTES];\n\n iv_arr.copy_from_slice(&iv[..IV_BYTES]);\n\n let key = load_key(secret);\n\n let nonce = ring::aead::Nonce::assume_unique_for_key(iv_arr);\n\n\n\n key.open_in_place(nonce, ring::aead::Aad::empty(), in_out)\n\n .expect(\"open_in_place failed\")\n\n}\n\n\n\n// TODO: handle error\n", "file_path": "crates/phactory/src/cryptography/aead.rs", "rank": 11, "score": 313933.11618284706 }, { "content": "/// Calculates the HMAC of `data` using the signing key `key`, and verifies\n\n/// whether the resultant value equals `tag`, in one step.\n\n///\n\n/// This is logically equivalent to, but more efficient than, constructing a\n\n/// `Key` with the same value as `key` and then using `verify`.\n\n///\n\n/// The verification will be done in constant time to prevent timing attacks.\n\npub fn verify(key: &Key, data: &[u8], tag: &[u8]) -> Result<(), error::Unspecified> {\n\n constant_time::verify_slices_are_equal(sign(key, data).as_ref(), tag)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{hmac, rand};\n\n\n\n // Make sure that `Key::generate` and `verify_with_own_key` aren't\n\n // completely wacky.\n\n #[test]\n\n pub fn hmac_signing_key_coverage() {\n\n let rng = rand::SystemRandom::new();\n\n\n\n const HELLO_WORLD_GOOD: &[u8] = b\"hello, world\";\n\n const HELLO_WORLD_BAD: &[u8] = b\"hello, worle\";\n\n\n\n for algorithm in &[\n\n hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY,\n\n hmac::HMAC_SHA256,\n", "file_path": "ring/src/hmac.rs", "rank": 12, "score": 305049.1924233644 }, { "content": "#[inline(always)]\n\nfn chacha_core(output: &mut [u8; BLOCK_LEN], input: &State) {\n\n let mut x = *input;\n\n\n\n for _ in (0..20).step_by(2) {\n\n quarterround(&mut x, 0, 4, 8, 12);\n\n quarterround(&mut x, 1, 5, 9, 13);\n\n quarterround(&mut x, 2, 6, 10, 14);\n\n quarterround(&mut x, 3, 7, 11, 15);\n\n quarterround(&mut x, 0, 5, 10, 15);\n\n quarterround(&mut x, 1, 6, 11, 12);\n\n quarterround(&mut x, 2, 7, 8, 13);\n\n quarterround(&mut x, 3, 4, 9, 14);\n\n }\n\n\n\n for (x, input) in x.iter_mut().zip(input.iter()) {\n\n *x = x.wrapping_add(*input);\n\n }\n\n\n\n for (output, &x) in ChunksFixedMut::<[u8; 4]>::chunks_fixed_mut(output).zip(x.iter()) {\n\n *output = u32::to_le_bytes(x)\n\n }\n\n}\n\n\n", "file_path": "ring/src/aead/chacha/fallback.rs", "rank": 13, "score": 300492.51428902254 }, { "content": "/// Instantiate all Full RPC extensions.\n\npub fn create_full<C, P, SC, B>(\n\n\tdeps: FullDeps<C, P, SC, B>,\n\n) -> Result<jsonrpc_core::IoHandler<sc_rpc_api::Metadata>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n\tC: ProvideRuntimeApi<Block>\n\n\t\t+ HeaderBackend<Block>\n\n\t\t+ AuxStore\n\n\t\t+ HeaderMetadata<Block, Error = BlockChainError>\n\n\t\t+ Sync\n\n\t\t+ Send\n\n\t\t+ 'static,\n\n\tC::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>,\n\n\tC::Api: pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>,\n\n\tC::Api: BabeApi<Block>,\n\n\tC::Api: BlockBuilder<Block>,\n\n\tP: TransactionPool + 'static,\n\n\tSC: SelectChain<Block> + 'static,\n\n\tB: sc_client_api::Backend<Block> + Send + Sync + 'static,\n\n\tB::State: sc_client_api::backend::StateBackend<sp_runtime::traits::HashFor<Block>>,\n\n{\n", "file_path": "standalone/rpc/src/lib.rs", "rank": 14, "score": 294321.0574281624 }, { "content": "/// Formats a private key \"prefix||private_key||middle||public_key\" where\n\n/// `template` is \"prefix||middle\" split at position `private_key_index`.\n\nfn wrap_key_(template: &Template, private_key: &[u8], public_key: &[u8], bytes: &mut [u8]) {\n\n let (before_private_key, after_private_key) =\n\n template.bytes.split_at(template.private_key_index);\n\n let private_key_end_index = template.private_key_index + private_key.len();\n\n bytes[..template.private_key_index].copy_from_slice(before_private_key);\n\n bytes[template.private_key_index..private_key_end_index].copy_from_slice(&private_key);\n\n bytes[private_key_end_index..(private_key_end_index + after_private_key.len())]\n\n .copy_from_slice(after_private_key);\n\n bytes[(private_key_end_index + after_private_key.len())..].copy_from_slice(public_key);\n\n}\n", "file_path": "ring/src/pkcs8.rs", "rank": 15, "score": 293322.7637861257 }, { "content": "/// Fill `dest` with random bytes from the system's preferred random number\n\n/// source.\n\n///\n\n/// This function returns an error on any failure, including partial reads. We\n\n/// make no guarantees regarding the contents of `dest` on error. If `dest` is\n\n/// empty, `getrandom` immediately returns success, making no calls to the\n\n/// underlying operating system.\n\n///\n\n/// Blocking is possible, at least during early boot; see module documentation.\n\n///\n\n/// In general, `getrandom` will be fast enough for interactive usage, though\n\n/// significantly slower than a user-space CSPRNG; for the latter consider\n\n/// [`rand::thread_rng`](https://docs.rs/rand/*/rand/fn.thread_rng.html).\n\npub fn getrandom(dest: &mut [u8]) -> Result<(), Error> {\n\n if dest.is_empty() {\n\n return Ok(());\n\n }\n\n imp::getrandom_inner(dest)\n\n}\n", "file_path": "getrandom-sgx/src/lib.rs", "rank": 16, "score": 286262.5126775132 }, { "content": "pub fn big_endian_from_limbs(limbs: &[Limb], out: &mut [u8]) {\n\n let num_limbs = limbs.len();\n\n let out_len = out.len();\n\n assert_eq!(out_len, num_limbs * LIMB_BYTES);\n\n for i in 0..num_limbs {\n\n let mut limb = limbs[i];\n\n for j in 0..LIMB_BYTES {\n\n out[((num_limbs - i - 1) * LIMB_BYTES) + (LIMB_BYTES - j - 1)] = (limb & 0xff) as u8;\n\n limb >>= 8;\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\npub type Window = Limb;\n\n\n\n/// Processes `limbs` as a sequence of 5-bit windows, folding the windows from\n\n/// most significant to least significant and returning the accumulated result.\n\n/// The first window will be mapped by `init` to produce the initial value for\n\n/// the accumulator. Then `f` will be called to fold the accumulator and the\n\n/// next window until all windows are processed. When the input's bit length\n\n/// isn't divisible by 5, the window passed to `init` will be partial; all\n\n/// windows passed to `fold` will be full.\n\n///\n\n/// This is designed to avoid leaking the contents of `limbs` through side\n\n/// channels as long as `init` and `fold` are side-channel free.\n\n///\n\n/// Panics if `limbs` is empty.\n", "file_path": "ring/src/limb.rs", "rank": 17, "score": 286258.72821670613 }, { "content": "#[inline(always)]\n\nfn quarterround(x: &mut State, a: usize, b: usize, c: usize, d: usize) {\n\n #[inline(always)]\n\n fn step(x: &mut State, a: usize, b: usize, c: usize, rotation: u32) {\n\n x[a] = x[a].wrapping_add(x[b]);\n\n x[c] = (x[c] ^ x[a]).rotate_left(rotation);\n\n }\n\n step(x, a, b, d, 16);\n\n step(x, c, d, b, 12);\n\n step(x, a, b, d, 8);\n\n step(x, c, d, b, 7);\n\n}\n\n\n", "file_path": "ring/src/aead/chacha/fallback.rs", "rank": 18, "score": 283367.2970603888 }, { "content": "// Encrypts the data in-place and appends a 128bit auth tag\n\npub fn encrypt(iv: &IV, secret: &[u8], in_out: &mut Vec<u8>) {\n\n let nonce = ring::aead::Nonce::assume_unique_for_key(*iv);\n\n let key = load_key(secret);\n\n\n\n key.seal_in_place_append_tag(nonce, ring::aead::Aad::empty(), in_out)\n\n .expect(\"seal_in_place_separate_tag failed\");\n\n}\n\n\n", "file_path": "crates/phactory/src/cryptography/aead.rs", "rank": 19, "score": 282961.5872593245 }, { "content": "/// Do a Blake2 128-bit hash and return result.\n\npub fn blake2_128(data: &[u8]) -> [u8; 16] {\n\n\tlet mut r = [0; 16];\n\n\tblake2_128_into(data, &mut r);\n\n\tr\n\n}\n", "file_path": "pallets/bridge/src/hashing.rs", "rank": 20, "score": 281932.8018926466 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n // Prevent overflow of u32\n\n for chunk in dest.chunks_mut(u32::max_value() as usize) {\n\n let ret = unsafe {\n\n BCryptGenRandom(\n\n ptr::null_mut(),\n\n chunk.as_mut_ptr(),\n\n chunk.len() as u32,\n\n BCRYPT_USE_SYSTEM_PREFERRED_RNG,\n\n )\n\n };\n\n // NTSTATUS codes use the two highest bits for severity status.\n\n if ret >> 30 == 0b11 {\n\n // We zeroize the highest bit, so the error code will reside\n\n // inside the range designated for OS codes.\n\n let code = ret ^ (1 << 31);\n\n // SAFETY: the second highest bit is always equal to one,\n\n // so it's impossible to get zero. Unfortunately the type\n\n // system does not have a way to express this yet.\n\n let code = unsafe { NonZeroU32::new_unchecked(code) };\n\n return Err(Error::from(code));\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "getrandom-sgx/src/windows.rs", "rank": 21, "score": 281752.0553226169 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n // Apple's documentation guarantees kSecRandomDefault is a synonym for NULL.\n\n let ret = unsafe { SecRandomCopyBytes(null(), dest.len(), dest.as_mut_ptr()) };\n\n if ret == -1 {\n\n Err(Error::IOS_SEC_RANDOM)\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "getrandom-sgx/src/ios.rs", "rank": 22, "score": 281752.05532261694 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n static RNG_INIT: AtomicBool = AtomicBool::new(false);\n\n while !RNG_INIT.load(Relaxed) {\n\n let ret = unsafe { libc::randSecure() };\n\n if ret < 0 {\n\n return Err(Error::VXWORKS_RAND_SECURE);\n\n } else if ret > 0 {\n\n RNG_INIT.store(true, Relaxed);\n\n break;\n\n }\n\n unsafe { libc::usleep(10) };\n\n }\n\n\n\n // Prevent overflow of i32\n\n for chunk in dest.chunks_mut(i32::max_value() as usize) {\n\n let ret = unsafe { libc::randABytes(chunk.as_mut_ptr(), chunk.len() as i32) };\n\n if ret != 0 {\n\n return Err(last_os_error());\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "getrandom-sgx/src/vxworks.rs", "rank": 23, "score": 281752.0553226169 }, { "content": "#[allow(dead_code)]\n\npub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n extern \"C\" {\n\n fn __getrandom_custom(dest: *mut u8, len: usize) -> u32;\n\n }\n\n let ret = unsafe { __getrandom_custom(dest.as_mut_ptr(), dest.len()) };\n\n match NonZeroU32::new(ret) {\n\n None => Ok(()),\n\n Some(code) => Err(Error::from(code)),\n\n }\n\n}\n", "file_path": "getrandom-sgx/src/custom.rs", "rank": 24, "score": 281752.0553226169 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n static GETENTROPY: Weak = unsafe { Weak::new(\"getentropy\\0\") };\n\n if let Some(fptr) = GETENTROPY.ptr() {\n\n let func: GetEntropyFn = unsafe { mem::transmute(fptr) };\n\n for chunk in dest.chunks_mut(256) {\n\n let ret = unsafe { func(chunk.as_mut_ptr(), chunk.len()) };\n\n if ret != 0 {\n\n return Err(last_os_error());\n\n }\n\n }\n\n Ok(())\n\n } else {\n\n // We fallback to reading from /dev/random instead of SecRandomCopyBytes\n\n // to avoid high startup costs and linking the Security framework.\n\n use_file::getrandom_inner(dest)\n\n }\n\n}\n", "file_path": "getrandom-sgx/src/macos.rs", "rank": 25, "score": 281752.05532261694 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n if !is_rdrand_supported() {\n\n return Err(Error::NO_RDRAND);\n\n }\n\n\n\n // SAFETY: After this point, rdrand is supported, so calling the rdrand\n\n // functions is not undefined behavior.\n\n unsafe { rdrand_exact(dest) }\n\n}\n\n\n\n#[target_feature(enable = \"rdrand\")]\n\nunsafe fn rdrand_exact(dest: &mut [u8]) -> Result<(), Error> {\n\n // We use chunks_exact_mut instead of chunks_mut as it allows almost all\n\n // calls to memcpy to be elided by the compiler.\n\n let mut chunks = dest.chunks_exact_mut(WORD_SIZE);\n\n for chunk in chunks.by_ref() {\n\n chunk.copy_from_slice(&rdrand()?);\n\n }\n\n\n\n let tail = chunks.into_remainder();\n\n let n = tail.len();\n\n if n > 0 {\n\n tail.copy_from_slice(&rdrand()?[..n]);\n\n }\n\n Ok(())\n\n}\n", "file_path": "getrandom-sgx/src/rdrand.rs", "rank": 26, "score": 281752.05532261694 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n unsafe { zx_cprng_draw(dest.as_mut_ptr(), dest.len()) }\n\n Ok(())\n\n}\n", "file_path": "getrandom-sgx/src/fuchsia.rs", "rank": 27, "score": 281752.05532261694 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n for chunk in dest.chunks_mut(256) {\n\n let ret = unsafe { libc::getentropy(chunk.as_mut_ptr() as *mut libc::c_void, chunk.len()) };\n\n if ret == -1 {\n\n return Err(last_os_error());\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "getrandom-sgx/src/openbsd.rs", "rank": 28, "score": 281752.05532261694 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n static GETRANDOM: Weak = unsafe { Weak::new(\"getrandom\\0\") };\n\n type GetRandomFn = unsafe extern \"C\" fn(*mut u8, libc::size_t, libc::c_uint) -> libc::ssize_t;\n\n\n\n if let Some(fptr) = GETRANDOM.ptr() {\n\n let func: GetRandomFn = unsafe { core::mem::transmute(fptr) };\n\n return sys_fill_exact(dest, |buf| unsafe { func(buf.as_mut_ptr(), buf.len(), 0) });\n\n } else {\n\n use_file::getrandom_inner(dest)\n\n }\n\n}\n", "file_path": "getrandom-sgx/src/dragonfly.rs", "rank": 29, "score": 281752.05532261694 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n unsafe { random_get(dest.as_mut_ptr(), dest.len()) }.map_err(|e: wasi::Error| {\n\n // convert wasi's Error into getrandom's NonZeroU32 error\n\n NonZeroU32::new(e.raw_error() as u32).unwrap().into()\n\n })\n\n}\n", "file_path": "getrandom-sgx/src/wasi.rs", "rank": 30, "score": 281752.0553226169 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n #[cfg(target_os = \"freebsd\")]\n\n {\n\n use crate::util_libc::Weak;\n\n static GETRANDOM: Weak = unsafe { Weak::new(\"getrandom\\0\") };\n\n type GetRandomFn =\n\n unsafe extern \"C\" fn(*mut u8, libc::size_t, libc::c_uint) -> libc::ssize_t;\n\n\n\n if let Some(fptr) = GETRANDOM.ptr() {\n\n let func: GetRandomFn = unsafe { core::mem::transmute(fptr) };\n\n return sys_fill_exact(dest, |buf| unsafe { func(buf.as_mut_ptr(), buf.len(), 0) });\n\n }\n\n }\n\n // Both FreeBSD and NetBSD will only return up to 256 bytes at a time, and\n\n // older NetBSD kernels will fail on longer buffers.\n\n for chunk in dest.chunks_mut(256) {\n\n sys_fill_exact(chunk, kern_arnd)?\n\n }\n\n Ok(())\n\n}\n", "file_path": "getrandom-sgx/src/bsd_arandom.rs", "rank": 31, "score": 277459.6055810433 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n static HAS_GETRANDOM: LazyBool = LazyBool::new();\n\n if HAS_GETRANDOM.unsync_init(is_getrandom_available) {\n\n sys_fill_exact(dest, |buf| unsafe {\n\n getrandom(buf.as_mut_ptr() as *mut libc::c_void, buf.len(), 0)\n\n })\n\n } else {\n\n use_file::getrandom_inner(dest)\n\n }\n\n}\n\n\n", "file_path": "getrandom-sgx/src/linux_android.rs", "rank": 32, "score": 277459.6055810433 }, { "content": "#[allow(deprecated)]\n\npub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n\n\n // sgx_read_rand cannot take len=0, but this function does\n\n if dest.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n match rsgx_read_rand(dest) {\n\n Ok(()) => Ok(()),\n\n Err(_) => Err(Error::UNSUPPORTED),\n\n }\n\n}\n\n\n\n//#[inline(always)]\n\n//pub fn error_msg_inner(_: NonZeroU32) -> Option<&'static str> { None }\n", "file_path": "getrandom-sgx/src/mesalock_sgx.rs", "rank": 33, "score": 277459.6055810433 }, { "content": "pub fn getrandom_inner(dest: &mut [u8]) -> Result<(), Error> {\n\n static GETRANDOM: Weak = unsafe { Weak::new(\"getrandom\\0\") };\n\n if let Some(fptr) = GETRANDOM.ptr() {\n\n let func: GetRandomFn = unsafe { mem::transmute(fptr) };\n\n // 256 bytes is the lowest common denominator across all the Solaris\n\n // derived platforms for atomically obtaining random data.\n\n for chunk in dest.chunks_mut(256) {\n\n sys_fill_exact(chunk, |buf| unsafe {\n\n func(buf.as_mut_ptr(), buf.len(), 0) as libc::ssize_t\n\n })?\n\n }\n\n Ok(())\n\n } else {\n\n use_file::getrandom_inner(dest)\n\n }\n\n}\n", "file_path": "getrandom-sgx/src/solaris_illumos.rs", "rank": 34, "score": 277459.6055810433 }, { "content": "fn chacha20_init(key: &[u8], cpu_features: cpu::Features) -> Result<KeyInner, error::Unspecified> {\n\n let chacha20_key: [u8; chacha::KEY_LEN] = key.try_into()?;\n\n Ok(KeyInner::ChaCha20(chacha::Key::new(\n\n chacha20_key,\n\n cpu_features,\n\n )))\n\n}\n\n\n", "file_path": "ring/src/aead/quic.rs", "rank": 35, "score": 276191.62809214235 }, { "content": "fn aes_init_256(key: &[u8], cpu_features: cpu::Features) -> Result<KeyInner, error::Unspecified> {\n\n let aes_key = aes::Key::new(key, aes::Variant::AES_256, cpu_features)?;\n\n Ok(KeyInner::Aes(aes_key))\n\n}\n\n\n", "file_path": "ring/src/aead/quic.rs", "rank": 36, "score": 276191.62809214235 }, { "content": "fn aes_init_128(key: &[u8], cpu_features: cpu::Features) -> Result<KeyInner, error::Unspecified> {\n\n let aes_key = aes::Key::new(key, aes::Variant::AES_128, cpu_features)?;\n\n Ok(KeyInner::Aes(aes_key))\n\n}\n\n\n", "file_path": "ring/src/aead/quic.rs", "rank": 37, "score": 276191.62809214235 }, { "content": "pub fn ecdh_pubkey(i: u8) -> EcdhPublicKey {\n\n\tlet mut raw = [0u8; 32];\n\n\traw[31] = i;\n\n\traw[30] = 1; // distinguish with the genesis config\n\n\tEcdhPublicKey(raw)\n\n}\n\n\n", "file_path": "pallets/phala/src/mock.rs", "rank": 38, "score": 272353.55386197055 }, { "content": "pub fn worker_pubkey(i: u8) -> WorkerPublicKey {\n\n\tlet mut raw = [0u8; 32];\n\n\traw[31] = i;\n\n\traw[30] = 1; // distinguish with the genesis config\n\n\tWorkerPublicKey::from_raw(raw)\n\n}\n", "file_path": "pallets/phala/src/mock.rs", "rank": 39, "score": 272353.55386197055 }, { "content": "fn init_128(key: &[u8], cpu_features: cpu::Features) -> Result<aead::KeyInner, error::Unspecified> {\n\n init(key, aes::Variant::AES_128, cpu_features)\n\n}\n\n\n", "file_path": "ring/src/aead/aes_gcm.rs", "rank": 40, "score": 268254.99513517343 }, { "content": "fn init_256(key: &[u8], cpu_features: cpu::Features) -> Result<aead::KeyInner, error::Unspecified> {\n\n init(key, aes::Variant::AES_256, cpu_features)\n\n}\n\n\n", "file_path": "ring/src/aead/aes_gcm.rs", "rank": 41, "score": 268254.99513517343 }, { "content": "pub fn eddsa_digest(signature_r: &[u8], public_key: &[u8], msg: &[u8]) -> digest::Digest {\n\n let mut ctx = digest::Context::new(&digest::SHA512);\n\n ctx.update(signature_r);\n\n ctx.update(public_key);\n\n ctx.update(msg);\n\n ctx.finish()\n\n}\n", "file_path": "ring/src/ec/curve25519/ed25519.rs", "rank": 42, "score": 266066.9218420593 }, { "content": "#[inline]\n\nfn with_swapped_xi(Xi(xi): &mut Xi, f: impl FnOnce(&mut [u64; 2])) {\n\n let unswapped: [u64; 2] = (*xi).into();\n\n let mut swapped: [u64; 2] = [unswapped[1], unswapped[0]];\n\n f(&mut swapped);\n\n *xi = Block::from([swapped[1], swapped[0]])\n\n}\n", "file_path": "ring/src/aead/gcm/gcm_nohw.rs", "rank": 43, "score": 265817.1746241769 }, { "content": "fn aes_gcm_seal(key: &aead::KeyInner, nonce: Nonce, aad: Aad<&[u8]>, in_out: &mut [u8]) -> Tag {\n\n let Key { aes_key, gcm_key } = match key {\n\n aead::KeyInner::AesGcm(key) => key,\n\n _ => unreachable!(),\n\n };\n\n\n\n let mut ctr = Counter::one(nonce);\n\n let tag_iv = ctr.increment();\n\n\n\n let total_in_out_len = in_out.len();\n\n let aad_len = aad.0.len();\n\n let mut auth = gcm::Context::new(gcm_key, aad);\n\n\n\n #[cfg(target_arch = \"x86_64\")]\n\n let in_out = {\n\n if !aes_key.is_aes_hw() || !auth.is_avx2() {\n\n in_out\n\n } else {\n\n use crate::c;\n\n extern \"C\" {\n", "file_path": "ring/src/aead/aes_gcm.rs", "rank": 44, "score": 265096.9696163923 }, { "content": "// Encrypts the data in-place and appends a 128bit auth tag\n\npub fn encrypt(iv: &IV, secret: &[u8], in_out: &mut Vec<u8>) -> Result<(), CryptoError> {\n\n let nonce = ring::aead::Nonce::assume_unique_for_key(*iv);\n\n let key = load_key(secret)?;\n\n\n\n key.0\n\n .seal_in_place_append_tag(nonce, ring::aead::Aad::empty(), in_out)\n\n .map_err(|_| CryptoError::AeadEncryptError)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/phala-crypto/src/aead.rs", "rank": 45, "score": 262783.4939778498 }, { "content": "pub fn small_nonnegative_integer(input: &mut untrusted::Reader) -> Result<u8, Error> {\n\n ring::io::der::small_nonnegative_integer(input).map_err(|_| Error::BadDer)\n\n}\n\n\n", "file_path": "webpki/src/der.rs", "rank": 46, "score": 261033.67384375838 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn uniform_keypair_strategy<Priv, Pub>() -> impl Strategy<Value = KeyPair<Priv, Pub>>\n\nwhere\n\n Pub: Serialize + for<'a> From<&'a Priv>,\n\n Priv: Serialize + Uniform,\n\n{\n\n // The no_shrink is because keypairs should be fixed -- shrinking would cause a different\n\n // keypair to be generated, which appears to not be very useful.\n\n any::<[u8; 32]>()\n\n .prop_map(|seed| {\n\n let mut rng = StdRng::from_seed(seed);\n\n KeyPair::<Priv, Pub>::generate(&mut rng)\n\n })\n\n .no_shrink()\n\n}\n\n\n\n/// This struct provides a means of testing signing and verification through\n\n/// BCS serialization and domain separation\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct TestDiemCrypto(pub String);\n", "file_path": "diem/crypto/crypto/src/test_utils.rs", "rank": 47, "score": 259290.48396431192 }, { "content": "/// Rotate the sender's authentication key to `new_key`.\n\n/// `new_key` should be a 256 bit sha3 hash of an ed25519 public key.\n\n/// * Aborts with `LibraAccount::EKEY_ROTATION_CAPABILITY_ALREADY_EXTRACTED` if the `KeyRotationCapability` for `account` has already been extracted.\n\n/// * Aborts with `LibraAccount::EMALFORMED_AUTHENTICATION_KEY` if the length of `new_key` != 32.\n\npub fn encode_rotate_authentication_key_script(new_key: Vec<u8>) -> Script {\n\n Script::new(\n\n ROTATE_AUTHENTICATION_KEY_CODE.to_vec(),\n\n vec![],\n\n vec![TransactionArgument::U8Vector(new_key)],\n\n )\n\n}\n\n\n", "file_path": "diem/client/transaction-builder/src/stdlib.rs", "rank": 48, "score": 256995.70995722438 }, { "content": "fn mix_key(ck: &mut Vec<u8>, dh_output: &[u8]) -> Result<Vec<u8>, NoiseError> {\n\n let (new_ck, k) = hkdf(ck, Some(dh_output))?;\n\n *ck = new_ck;\n\n Ok(k)\n\n}\n\n\n\n//\n\n// Noise implementation\n\n// --------------------\n\n//\n\n\n\n/// A key holder structure used for both initiators and responders.\n\n#[derive(Debug)]\n\npub struct NoiseConfig {\n\n private_key: x25519::PrivateKey,\n\n public_key: x25519::PublicKey,\n\n}\n\n\n\n/// Refer to the Noise protocol framework specification in order to understand these fields.\n\n#[cfg_attr(test, derive(Clone))]\n", "file_path": "diem/crypto/crypto/src/noise.rs", "rank": 49, "score": 256669.68461494622 }, { "content": "fn derive_block(secret: &hmac::Key, iterations: NonZeroU32, salt: &[u8], idx: u32, out: &mut [u8]) {\n\n let mut ctx = hmac::Context::with_key(secret);\n\n ctx.update(salt);\n\n ctx.update(&u32::to_be_bytes(idx));\n\n\n\n let mut u = ctx.sign();\n\n\n\n let mut remaining: u32 = iterations.into();\n\n loop {\n\n for i in 0..out.len() {\n\n out[i] ^= u.as_ref()[i];\n\n }\n\n\n\n if remaining == 1 {\n\n break;\n\n }\n\n remaining -= 1;\n\n\n\n u = hmac::sign(secret, u.as_ref());\n\n }\n\n}\n\n\n", "file_path": "ring/src/pbkdf2.rs", "rank": 50, "score": 256154.90894839336 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn keypair_strategy() -> impl Strategy<Value = KeyPair<PrivateKey, PublicKey>> {\n\n test_utils::uniform_keypair_strategy::<PrivateKey, PublicKey>()\n\n}\n", "file_path": "diem/crypto/crypto/src/x25519.rs", "rank": 52, "score": 254727.91805580407 }, { "content": "pub fn deopaque_query<T>(mut data: &[u8]) -> Result<T, ContractQueryError>\n\nwhere\n\n T: Decode + Debug,\n\n{\n\n Decode::decode(&mut data).or(Err(ContractQueryError::DecodeError))\n\n}\n\n\n\n#[derive(Debug, Error)]\n\n#[error(\"{:?}\", self)]\n\n#[allow(clippy::enum_variant_names)]\n\npub enum Error {\n\n IoError(#[from] anyhow::Error),\n\n DecodeError(#[from] CodecError),\n\n PersistentRuntimeNotFound,\n\n}\n", "file_path": "crates/phactory/src/types.rs", "rank": 53, "score": 253392.2308688286 }, { "content": "/// Derives a secret key for symmetric encryption without a KDF\n\n///\n\n/// `pk` must be in compressed version.\n\npub fn agree(sk: &EcdhKey, pk: &[u8]) -> Result<Vec<u8>, CryptoError> {\n\n // The first 32 bytes holds the canonical private key\n\n let mut key = [0u8; 32];\n\n key.copy_from_slice(&sk.secret()[0..32]);\n\n let key = Scalar::from_canonical_bytes(key).expect(\"This should never fail with correct seed\");\n\n let public = PublicKey::from_bytes(pk).or(Err(CryptoError::EcdhInvalidPublicKey))?;\n\n Ok((key * public.as_point()).compress().0.to_vec())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn generate_key() -> EcdhKey {\n\n use rand::RngCore;\n\n let mut rng = rand::thread_rng();\n\n let mut seed: Seed = [0_u8; MINI_SECRET_KEY_LENGTH];\n\n\n\n rng.fill_bytes(&mut seed);\n\n\n", "file_path": "crates/phala-crypto/src/ecdh.rs", "rank": 54, "score": 250412.53688175624 }, { "content": "/// (1) Rotate the public key stored in `account`'s `SharedEd25519PublicKey` resource to\n\n/// `new_public_key`\n\n/// (2) Rotate the authentication key using the capability stored in `account`'s\n\n/// `SharedEd25519PublicKey` to a new value derived from `new_public_key`\n\n/// Aborts if `account` does not have a `SharedEd25519PublicKey` resource.\n\n/// Aborts if the length of `new_public_key` is not 32.\n\npub fn encode_rotate_shared_ed25519_public_key_script(public_key: Vec<u8>) -> Script {\n\n Script::new(\n\n ROTATE_SHARED_ED25519_PUBLIC_KEY_CODE.to_vec(),\n\n vec![],\n\n vec![TransactionArgument::U8Vector(public_key)],\n\n )\n\n}\n\n\n", "file_path": "diem/client/transaction-builder/src/stdlib.rs", "rank": 55, "score": 250369.52393284248 }, { "content": "/// (1) Rotate the authentication key of the sender to `public_key`\n\n/// (2) Publish a resource containing a 32-byte ed25519 public key and the rotation capability\n\n/// of the sender under the sender's address.\n\n/// Aborts if the sender already has a `SharedEd25519PublicKey` resource.\n\n/// Aborts if the length of `new_public_key` is not 32.\n\npub fn encode_publish_shared_ed25519_public_key_script(public_key: Vec<u8>) -> Script {\n\n Script::new(\n\n PUBLISH_SHARED_ED25519_PUBLIC_KEY_CODE.to_vec(),\n\n vec![],\n\n vec![TransactionArgument::U8Vector(public_key)],\n\n )\n\n}\n\n\n", "file_path": "diem/client/transaction-builder/src/stdlib.rs", "rank": 56, "score": 250364.83903010676 }, { "content": "#[inline]\n\npub fn small_nonnegative_integer(input: &mut untrusted::Reader) -> Result<u8, error::Unspecified> {\n\n let value = nonnegative_integer(input, 0)?;\n\n value.read_all(error::Unspecified, |input| {\n\n let r = input.read_byte()?;\n\n Ok(r)\n\n })\n\n}\n\n\n", "file_path": "ring/src/io/der.rs", "rank": 57, "score": 248295.48058025588 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn keypair_strategy() -> impl Strategy<Value = KeyPair<Ed25519PrivateKey, Ed25519PublicKey>> {\n\n test_utils::uniform_keypair_strategy::<Ed25519PrivateKey, Ed25519PublicKey>()\n\n}\n\n\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\nuse proptest::prelude::*;\n\n\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\nimpl proptest::arbitrary::Arbitrary for Ed25519PublicKey {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {\n\n crate::test_utils::uniform_keypair_strategy::<Ed25519PrivateKey, Ed25519PublicKey>()\n\n .prop_map(|v| v.public_key)\n\n .boxed()\n\n }\n\n}\n", "file_path": "diem/crypto/crypto/src/ed25519.rs", "rank": 58, "score": 248291.77067871872 }, { "content": "fn generate_seal_key() -> [u8; 16] {\n\n let key_request = sgx_key_request_t {\n\n key_name: SGX_KEYSELECT_SEAL,\n\n key_policy: SGX_KEYPOLICY_MRSIGNER,\n\n isv_svn: 0_u16,\n\n reserved1: 0_u16,\n\n cpu_svn: sgx_cpu_svn_t { svn: [0_u8; 16] },\n\n attribute_mask: sgx_attributes_t { flags: 0, xfrm: 0 },\n\n key_id: sgx_key_id_t::default(),\n\n misc_mask: 0,\n\n config_svn: 0_u16,\n\n reserved2: [0_u8; SGX_KEY_REQUEST_RESERVED2_BYTES],\n\n };\n\n let seal_key = rsgx_get_align_key(&key_request).unwrap_or_default();\n\n seal_key.key\n\n}\n\n\n\n#[global_allocator]\n\nstatic ALLOCATOR: StatSizeAllocator<System> = StatSizeAllocator::new(System);\n\n\n", "file_path": "standalone/pruntime/enclave/src/pal_sgx.rs", "rank": 59, "score": 247519.0402847125 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn random_serializable_struct() -> impl Strategy<Value = TestDiemCrypto> {\n\n (String::arbitrary()).prop_map(TestDiemCrypto).no_shrink()\n\n}\n", "file_path": "diem/crypto/crypto/src/test_utils.rs", "rank": 60, "score": 245614.249172672 }, { "content": "fn verify(key: poly1305::Key, msg: &[u8], tag: &[u8; TAG_LEN]) -> Result<(), error::Unspecified> {\n\n let Tag(calculated_tag) = poly1305::sign(key, msg);\n\n constant_time::verify_slices_are_equal(calculated_tag.as_ref(), tag)\n\n}\n", "file_path": "ring/src/aead/chacha20_poly1305_openssh.rs", "rank": 61, "score": 245063.15177356557 }, { "content": "/// Sets up `n` workers starting from 1, registered and benchmarked. All owned by account1.\n\npub fn setup_workers(n: u8) {\n\n\tuse frame_support::assert_ok;\n\n\tfor i in 1..=n {\n\n\t\tlet worker = worker_pubkey(i);\n\n\t\tassert_ok!(PhalaRegistry::force_register_worker(\n\n\t\t\tOrigin::root(),\n\n\t\t\tworker.clone(),\n\n\t\t\tecdh_pubkey(1),\n\n\t\t\tSome(1)\n\n\t\t));\n\n\t\tPhalaRegistry::internal_set_benchmark(&worker, Some(1));\n\n\t}\n\n}\n\n\n", "file_path": "pallets/phala/src/mock.rs", "rank": 62, "score": 244777.20968109986 }, { "content": "fn finish(mut auth: poly1305::Context, aad_len: usize, in_out_len: usize) -> Tag {\n\n auth.update(\n\n [\n\n LittleEndian::from(polyfill::u64_from_usize(aad_len)),\n\n LittleEndian::from(polyfill::u64_from_usize(in_out_len)),\n\n ]\n\n .as_byte_array(),\n\n );\n\n auth.finish()\n\n}\n\n\n\npub type Key = chacha::Key;\n\n\n\n// Keep in sync with BoringSSL's `chacha20_poly1305_open_data` and\n\n// `chacha20_poly1305_seal_data`.\n\n#[repr(C)]\n\n#[cfg(target_arch = \"x86_64\")]\n\nunion InOut<T>\n\nwhere\n\n T: Copy,\n\n{\n\n input: T,\n\n out: Out,\n\n}\n\n\n\n// It isn't obvious whether the assembly code works for tags that aren't\n\n// 16-byte aligned. In practice it will always be 16-byte aligned because it\n\n// is embedded in a union where the other member of the union is 16-byte\n\n// aligned.\n", "file_path": "ring/src/aead/chacha20_poly1305.rs", "rank": 63, "score": 243012.83637178843 }, { "content": "fn x25519(private_key: &[u8], public_key: &[u8]) -> Vec<u8> {\n\n x25519_(private_key, public_key).unwrap()\n\n}\n\n\n", "file_path": "ring/tests/agreement_tests.rs", "rank": 64, "score": 239697.24293106777 }, { "content": "fn mix_hash(h: &mut Vec<u8>, data: &[u8]) {\n\n h.extend_from_slice(data);\n\n *h = hash(h);\n\n}\n\n\n", "file_path": "diem/crypto/crypto/src/noise.rs", "rank": 65, "score": 238821.0721077752 }, { "content": "/// Rotate `account`'s base URL to `new_url` and its compliance public key to `new_key`.\n\n/// Aborts if `account` is not a ParentVASP or DesignatedDealer\n\n/// Aborts if `new_key` is not a well-formed public key\n\npub fn encode_rotate_dual_attestation_info_script(new_url: Vec<u8>, new_key: Vec<u8>) -> Script {\n\n Script::new(\n\n ROTATE_DUAL_ATTESTATION_INFO_CODE.to_vec(),\n\n vec![],\n\n vec![\n\n TransactionArgument::U8Vector(new_url),\n\n TransactionArgument::U8Vector(new_key),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "diem/client/transaction-builder/src/stdlib.rs", "rank": 66, "score": 238795.17522465112 }, { "content": "#[inline(always)]\n\nfn hex_encode(src: &[u8], dst: &mut [u8]) {\n\n // debug_checked_precondition!(dst.len() == 2 * src.len());\n\n\n\n for (byte, out) in src.iter().zip(dst.chunks_mut(2)) {\n\n let (hi, lo) = byte2hex(*byte);\n\n out[0] = hi;\n\n out[1] = lo;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use proptest::prelude::*;\n\n use std::{str, u8};\n\n\n\n #[test]\n\n fn test_hex_encode() {\n\n let src = [0x12_u8, 0x34, 0xfe, 0xba];\n\n let mut actual = [0u8; 8];\n", "file_path": "diem/common/short-hex-str/src/lib.rs", "rank": 67, "score": 238590.71380220784 }, { "content": "/// Returns the list of siblings in this proof.\n\npub fn siblings(&self) -> &[HashValue] {\n\n &self.siblings\n\n}\n\n\n", "file_path": "diem/types/src/proof/definition.rs", "rank": 68, "score": 237469.81141721332 }, { "content": "/// Sets up `n` workers starting from 1, registered and benchmarked, owned by the corresponding\n\n/// accounts.\n\npub fn setup_workers_linked_operators(n: u8) {\n\n\tuse frame_support::assert_ok;\n\n\tfor i in 1..=n {\n\n\t\tlet worker = worker_pubkey(i);\n\n\t\tassert_ok!(PhalaRegistry::force_register_worker(\n\n\t\t\tOrigin::root(),\n\n\t\t\tworker.clone(),\n\n\t\t\tecdh_pubkey(1),\n\n\t\t\tSome(i as _)\n\n\t\t));\n\n\t\tPhalaRegistry::internal_set_benchmark(&worker, Some(1));\n\n\t}\n\n}\n\n\n", "file_path": "pallets/phala/src/mock.rs", "rank": 69, "score": 237108.61947916105 }, { "content": "#[cfg(all(feature = \"std\", feature = \"include-wasm\"))]\n\npub fn wasm_binary_unwrap() -> &'static [u8] {\n\n\tWASM_BINARY.expect(\n\n\t\t\"Development wasm binary is not available. This means the client is built with \\\n\n\t\t `SKIP_WASM_BUILD` flag and it is only usable for production chains. Please rebuild with \\\n\n\t\t the flag disabled.\",\n\n\t)\n\n}\n\n\n\n/// Runtime version.\n\n#[sp_version::runtime_version]\n\npub const VERSION: RuntimeVersion = RuntimeVersion {\n\n\tspec_name: create_runtime_str!(\"phala-node\"),\n\n\timpl_name: create_runtime_str!(\"phala-node\"),\n\n\tauthoring_version: 1,\n\n\t// Per convention: if the runtime behavior changes, increment spec_version\n\n\t// and set impl_version to 0. If only runtime\n\n\t// implementation changes and behavior does not, then leave spec_version as\n\n\t// is and increment impl_version.\n\n\tspec_version: 1,\n\n\timpl_version: 0,\n", "file_path": "standalone/runtime/src/lib.rs", "rank": 70, "score": 237108.61947916105 }, { "content": "/// The wasm runtime code.\n\npub fn compact_code_unwrap() -> &'static [u8] {\n\n\tnode_runtime::WASM_BINARY.expect(\n\n\t\t\"Development wasm binary is not available. Testing is only supported with the flag \\\n\n\t\t disabled.\",\n\n\t)\n\n}\n\n\n\nconst GENESIS_HASH: [u8; 32] = [69u8; 32];\n\n\n\nconst TRANSACTION_VERSION: u32 = node_runtime::VERSION.transaction_version;\n\n\n\nconst SPEC_VERSION: u32 = node_runtime::VERSION.spec_version;\n\n\n\nconst HEAP_PAGES: u64 = 20;\n\n\n", "file_path": "standalone/executor/benches/bench.rs", "rank": 71, "score": 237108.61947916105 }, { "content": "fn bitmap_set_bit(input: &mut [u8; BITMAP_NUM_OF_BYTES], index: usize) {\n\n let bucket = index / 8;\n\n // It's always invoked with index < 32, thus there is no need to check range.\n\n let bucket_pos = index - (bucket * 8);\n\n input[bucket] |= 128 >> bucket_pos as u8;\n\n}\n\n\n", "file_path": "diem/crypto/crypto/src/multi_ed25519.rs", "rank": 72, "score": 237075.98281245035 }, { "content": "/// Push a message to a topic accepting optinal secret messages\n\n///\n\n/// Contract commands topic accept osp messages\n\npub fn push_osp_message(payload: Vec<u8>, topic: Vec<u8>, remote_pubkey: Option<EcdhPublicKey>) {\n\n emit_event::<PinkEnvironment, _>(PinkEvent::OspMessage(OspMessage {\n\n message: Message { payload, topic },\n\n remote_pubkey,\n\n }))\n\n}\n\n\n", "file_path": "crates/pink/pink-extension/src/lib.rs", "rank": 73, "score": 231086.77520935546 }, { "content": "#[inline]\n\n#[rustfmt::skip]\n\nfn block_data_order_(mut H: State, M: &[[<W32 as Word>::InputBytes; 16]]) -> State {\n\n for M in M {\n\n // FIPS 180-4 6.1.2 Step 1\n\n let mut W: [W32; ROUNDS] = [W32::ZERO; ROUNDS];\n\n for t in 0..16 {\n\n W[t] = W32::from_be_bytes(M[t]);\n\n }\n\n for t in 16..ROUNDS {\n\n let wt = W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16];\n\n W[t] = rotl(wt, 1);\n\n }\n\n\n\n // FIPS 180-4 6.1.2 Step 2\n\n let [a, b, c, d, e] = H;\n\n\n\n // FIPS 180-4 6.1.2 Step 3 with constants and functions from FIPS 180-4 {4.1.1, 4.2.1}\n\n let W: &[[W32; 20]; 4] = W.chunks_fixed();\n\n let (a, b, c, d, e) = step3(a, b, c, d, e, W[0], Wrapping(0x5a827999), ch);\n\n let (a, b, c, d, e) = step3(a, b, c, d, e, W[1], Wrapping(0x6ed9eba1), parity);\n\n let (a, b, c, d, e) = step3(a, b, c, d, e, W[2], Wrapping(0x8f1bbcdc), maj);\n", "file_path": "ring/src/digest/sha1.rs", "rank": 74, "score": 230430.84742487036 }, { "content": "// hack strategy to generate a length from `impl Into<SizeRange>`\n\nfn arb_length(size_range: impl Into<SizeRange>) -> impl Strategy<Value = usize> {\n\n vec(Just(()), size_range).prop_map(|vec| vec.len())\n\n}\n\n\n", "file_path": "diem/types/src/unit_tests/trusted_state_test.rs", "rank": 75, "score": 226188.1009253901 }, { "content": "pub fn generate_iv(nonce: &[u8]) -> IV {\n\n let mut iv: IV = Default::default();\n\n let min_len = min(nonce.len(), iv.len());\n\n iv.copy_from_slice(&nonce[..min_len]);\n\n iv\n\n}\n\n\n", "file_path": "crates/phala-crypto/src/aead.rs", "rank": 76, "score": 225905.8258278739 }, { "content": "fn auth_key_prefix(auth_key: Vec<u8>) -> Vec<u8> {\n\n auth_key[0..16].to_vec()\n\n}\n\n\n\nimpl contracts::NativeContract for Diem {\n\n type Cmd = Command;\n\n type Event = ();\n\n type QReq = Request;\n\n type QResp = Response;\n\n\n\n fn id(&self) -> contracts::ContractId {\n\n contracts::id256(contracts::DIEM)\n\n }\n\n\n\n fn handle_command(\n\n &mut self,\n\n _context: &NativeContext,\n\n origin: MessageOrigin,\n\n cmd: PushCommand<Self::Cmd>,\n\n ) -> TransactionResult {\n", "file_path": "crates/phactory/src/contracts/diem.rs", "rank": 77, "score": 225668.009203168 }, { "content": "/// Seal master key seed with signature to ensure integrity\n\npub fn seal(\n\n sealing_path: String,\n\n master_key: &sr25519::Pair,\n\n identity_key: &sr25519::Pair,\n\n sys: &impl Sealing,\n\n) {\n\n let secret = master_key.dump_secret_key();\n\n let signature = identity_key.sign_data(&secret);\n\n\n\n let data = MasterKeySeal::V1(PersistentMasterKey { secret, signature });\n\n let filepath = master_key_file_path(sealing_path);\n\n info!(\"Seal master key to {}\", filepath.as_path().display());\n\n sys.seal_data(filepath, &data.encode())\n\n .expect(\"Seal master key failed\");\n\n}\n\n\n", "file_path": "crates/phactory/src/system/master_key.rs", "rank": 78, "score": 225280.93635557863 }, { "content": "/// Returns `Ok(())` if `a == b` and `Err(error::Unspecified)` otherwise.\n\n/// The comparison of `a` and `b` is done in constant time with respect to the\n\n/// contents of each, but NOT in constant time with respect to the lengths of\n\n/// `a` and `b`.\n\npub fn verify_slices_are_equal(a: &[u8], b: &[u8]) -> Result<(), error::Unspecified> {\n\n if a.len() != b.len() {\n\n return Err(error::Unspecified);\n\n }\n\n let result = unsafe { GFp_memcmp(a.as_ptr(), b.as_ptr(), a.len()) };\n\n match result {\n\n 0 => Ok(()),\n\n _ => Err(error::Unspecified),\n\n }\n\n}\n\n\n\nextern \"C\" {\n\n fn GFp_memcmp(a: *const u8, b: *const u8, len: c::size_t) -> c::int;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{bssl, error};\n\n\n\n #[test]\n\n fn test_constant_time() -> Result<(), error::Unspecified> {\n\n extern \"C\" {\n\n fn bssl_constant_time_test_main() -> bssl::Result;\n\n }\n\n Result::from(unsafe { bssl_constant_time_test_main() })\n\n }\n\n}\n", "file_path": "ring/src/constant_time.rs", "rank": 79, "score": 225219.3762714641 }, { "content": "/// Push a raw message to a topic accepting only vanilla messages\n\n///\n\n/// Most phala system topics accept vanilla messages\n\npub fn push_message(payload: Vec<u8>, topic: Vec<u8>) {\n\n emit_event::<PinkEnvironment, _>(PinkEvent::Message(Message { payload, topic }))\n\n}\n\n\n", "file_path": "crates/pink/pink-extension/src/lib.rs", "rank": 80, "score": 223942.1767421721 }, { "content": "fn chacha20_new_mask(key: &KeyInner, sample: Sample) -> [u8; 5] {\n\n let chacha20_key = match key {\n\n KeyInner::ChaCha20(key) => key,\n\n _ => unreachable!(),\n\n };\n\n\n\n chacha20_key.new_mask(sample)\n\n}\n", "file_path": "ring/src/aead/quic.rs", "rank": 81, "score": 223086.53742707524 }, { "content": "fn aes_new_mask(key: &KeyInner, sample: Sample) -> [u8; 5] {\n\n let aes_key = match key {\n\n KeyInner::Aes(key) => key,\n\n _ => unreachable!(),\n\n };\n\n\n\n aes_key.new_mask(sample)\n\n}\n\n\n\n/// ChaCha20.\n\npub static CHACHA20: Algorithm = Algorithm {\n\n key_len: chacha::KEY_LEN,\n\n init: chacha20_init,\n\n new_mask: chacha20_new_mask,\n\n id: AlgorithmID::CHACHA20,\n\n};\n\n\n", "file_path": "ring/src/aead/quic.rs", "rank": 82, "score": 223086.53742707524 }, { "content": "#[inline]\n\npub fn agree_ephemeral<B: AsRef<[u8]>, R>(\n\n my_private_key: EphemeralPrivateKey,\n\n peer_public_key: &UnparsedPublicKey<B>,\n\n kdf: impl FnOnce(&[u8]) -> R,\n\n) -> Result<R, error::Unspecified> {\n\n let peer_public_key = UnparsedPublicKey {\n\n algorithm: peer_public_key.algorithm,\n\n bytes: peer_public_key.bytes.as_ref(),\n\n };\n\n agree_ephemeral_(my_private_key, peer_public_key, kdf)\n\n}\n\n\n", "file_path": "ring/src/agreement.rs", "rank": 83, "score": 222946.55729404048 }, { "content": "fn x25519_(private_key: &[u8], public_key: &[u8]) -> Result<Vec<u8>, error::Unspecified> {\n\n let rng = test::rand::FixedSliceRandom { bytes: private_key };\n\n let private_key = agreement::EphemeralPrivateKey::generate(&agreement::X25519, &rng)?;\n\n let public_key = agreement::UnparsedPublicKey::new(&agreement::X25519, public_key);\n\n agreement::agree_ephemeral(private_key, &public_key, |agreed_value| {\n\n Vec::from(agreed_value)\n\n })\n\n}\n\n\n", "file_path": "ring/tests/agreement_tests.rs", "rank": 84, "score": 222579.01841378334 }, { "content": "/// Unseal local master key seed and verify signature\n\n///\n\n/// This function could panic a lot.\n\npub fn try_unseal(\n\n sealing_path: String,\n\n identity_key: &sr25519::Pair,\n\n sys: &impl Sealing,\n\n) -> Option<sr25519::Pair> {\n\n let filepath = master_key_file_path(sealing_path);\n\n info!(\"Unseal master key from {}\", filepath.as_path().display());\n\n let sealed_data = match sys\n\n .unseal_data(&filepath)\n\n .expect(\"Unseal master key failed\")\n\n {\n\n Some(data) => data,\n\n None => {\n\n warn!(\"No sealed master key\");\n\n return None;\n\n }\n\n };\n\n\n\n let versioned_data =\n\n MasterKeySeal::decode(&mut &sealed_data[..]).expect(\"Failed to decode sealed master key\");\n", "file_path": "crates/phactory/src/system/master_key.rs", "rank": 85, "score": 221346.52012403964 }, { "content": "pub fn public_from_private(\n\n ops: &PrivateKeyOps,\n\n public_out: &mut [u8],\n\n my_private_key: &ec::Seed,\n\n) -> Result<(), error::Unspecified> {\n\n let elem_and_scalar_bytes = ops.common.num_limbs * LIMB_BYTES;\n\n debug_assert_eq!(public_out.len(), 1 + (2 * elem_and_scalar_bytes));\n\n let my_private_key = private_key_as_scalar(ops, my_private_key);\n\n let my_public_key = ops.point_mul_base(&my_private_key);\n\n public_out[0] = 4; // Uncompressed encoding.\n\n let (x_out, y_out) = (&mut public_out[1..]).split_at_mut(elem_and_scalar_bytes);\n\n\n\n // `big_endian_affine_from_jacobian` verifies that the point is not at\n\n // infinity and is on the curve.\n\n big_endian_affine_from_jacobian(ops, Some(x_out), Some(y_out), &my_public_key)\n\n}\n\n\n", "file_path": "ring/src/ec/suite_b/private_key.rs", "rank": 86, "score": 221340.62507594127 }, { "content": "/// Generates a random scalar in the range [1, n).\n\npub fn random_scalar(\n\n ops: &PrivateKeyOps,\n\n rng: &dyn rand::SecureRandom,\n\n) -> Result<Scalar, error::Unspecified> {\n\n let num_limbs = ops.common.num_limbs;\n\n let mut bytes = [0; ec::SCALAR_MAX_BYTES];\n\n let bytes = &mut bytes[..(num_limbs * LIMB_BYTES)];\n\n generate_private_scalar_bytes(ops, rng, bytes)?;\n\n scalar_from_big_endian_bytes(ops, bytes)\n\n}\n\n\n", "file_path": "ring/src/ec/suite_b/private_key.rs", "rank": 87, "score": 221340.62507594127 }, { "content": "pub fn affine_from_jacobian(\n\n ops: &PrivateKeyOps,\n\n p: &Point,\n\n) -> Result<(Elem<R>, Elem<R>), error::Unspecified> {\n\n let z = ops.common.point_z(p);\n\n\n\n // Since we restrict our private key to the range [1, n), the curve has\n\n // prime order, and we verify that the peer's point is on the curve,\n\n // there's no way that the result can be at infinity. But, use `assert!`\n\n // instead of `debug_assert!` anyway\n\n assert!(ops.common.elem_verify_is_not_zero(&z).is_ok());\n\n\n\n let x = ops.common.point_x(p);\n\n let y = ops.common.point_y(p);\n\n\n\n let zz_inv = ops.elem_inverse_squared(&z);\n\n\n\n let x_aff = ops.common.elem_product(&x, &zz_inv);\n\n\n\n // `y_aff` is needed to validate the point is on the curve. It is also\n", "file_path": "ring/src/ec/suite_b/private_key.rs", "rank": 88, "score": 221340.62507594127 }, { "content": "pub fn usize_from_u32(x: u32) -> usize {\n\n x as usize\n\n}\n\n\n\npub mod slice {\n\n // https://github.com/rust-lang/rust/issues/27750\n\n // https://internals.rust-lang.org/t/stabilizing-basic-functions-on-arrays-and-slices/2868\n\n #[inline(always)]\n\n pub fn fill<T>(dest: &mut [T], value: T)\n\n where\n\n T: Copy,\n\n {\n\n for d in dest {\n\n *d = value;\n\n }\n\n }\n\n}\n\n\n\n#[macro_use]\n\nmod chunks_fixed;\n\n\n\npub(crate) mod array_map;\n\n\n\npub use chunks_fixed::*;\n", "file_path": "ring/src/polyfill.rs", "rank": 89, "score": 221245.3769889301 }, { "content": "/// Returns the leaf node in this proof.\n\npub fn leaf(&self) -> Option<SparseMerkleLeafNode> {\n\n self.leaf\n\n}\n\n\n", "file_path": "diem/types/src/proof/definition.rs", "rank": 90, "score": 219650.0525232688 }, { "content": "// Checks events against the latest. A contiguous set of events must be provided. They must\n\n// include the most recent event, but do not have to include every past event.\n\npub fn assert_events(mut expected: Vec<Event>) {\n\n\tlet mut actual: Vec<Event> = system::Pallet::<Test>::events()\n\n\t\t.iter()\n\n\t\t.map(|e| e.event.clone())\n\n\t\t.collect();\n\n\n\n\texpected.reverse();\n\n\n\n\tfor evt in expected {\n\n\t\tlet next = actual.pop().expect(\"event expected\");\n\n\t\tassert_eq!(next, evt.into(), \"Events don't match (actual,expected)\");\n\n\t}\n\n}\n", "file_path": "pallets/bridge/src/mock.rs", "rank": 91, "score": 219500.08741717902 }, { "content": "// TODO: delete and use StdlibScript::try_from directly if it's ok to drop the \"_transaction\"?\n\n/// Returns a user friendly mnemonic for the transaction type if the transaction is\n\n/// for a known, white listed, transaction.\n\npub fn get_transaction_name(code: &[u8]) -> String {\n\n StdlibScript::try_from(code).map_or(\"<unknown transaction>\".to_string(), |name| {\n\n format!(\"{}_transaction\", name)\n\n })\n\n}\n", "file_path": "diem/client/transaction-builder/src/misc.rs", "rank": 92, "score": 219322.47226013837 }, { "content": "pub fn new_block_event_key() -> EventKey {\n\n EventKey::new_from_address(&diem_root_address(), 17)\n\n}\n\n\n\n/// The path to the new block event handle under a DiemBlock::BlockMetadata resource.\n\npub static NEW_BLOCK_EVENT_PATH: Lazy<Vec<u8>> = Lazy::new(|| {\n\n let mut path = DiemBlockResource::resource_path();\n\n // it can be anything as long as it's referenced in AccountState::get_event_handle_by_query_path\n\n path.extend_from_slice(b\"/new_block_event/\");\n\n path\n\n});\n\n\n\n#[derive(Deserialize, Serialize)]\n\npub struct DiemBlockResource {\n\n height: u64,\n\n new_block_events: EventHandle,\n\n}\n\n\n\nimpl DiemBlockResource {\n\n pub fn new_block_events(&self) -> &EventHandle {\n", "file_path": "diem/types/src/block_metadata.rs", "rank": 93, "score": 218255.0933642118 }, { "content": "pub fn impl_enum_signingkey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n\n\n let mut match_arms_arbitrary = quote! {};\n\n let mut match_struct_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_struct_arms.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign(message)),\n\n });\n\n match_arms_arbitrary.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign_arbitrary_message(message)),\n\n });\n", "file_path": "diem/crypto/crypto-derive/src/unions.rs", "rank": 94, "score": 217689.7631907192 }, { "content": "pub fn impl_enum_signature(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let priv_kt: syn::Type = private_key_type.parse().unwrap();\n\n let pub_kt: syn::Type = public_key_type.parse().unwrap();\n\n let mut res = impl_enum_tryfrom(name, variants);\n\n let to_bytes_arms = match_enum_to_bytes(name, variants);\n\n\n\n let mut match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_arms.extend(quote! {\n\n (#name::#variant_ident(sig), #pub_kt::#variant_ident(pk)) => {\n\n sig.verify_arbitrary_msg(message, pk)\n\n }\n\n })\n", "file_path": "diem/crypto/crypto-derive/src/unions.rs", "rank": 95, "score": 217689.7631907192 }, { "content": "pub fn impl_enum_privatekey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let res = quote! {\n\n impl diem_crypto::PrivateKey for #name {\n\n type PublicKeyMaterial = #pkt;\n\n }\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "diem/crypto/crypto-derive/src/unions.rs", "rank": 96, "score": 217689.7631907192 }, { "content": "pub fn impl_enum_publickey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let mut from_match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n from_match_arms.extend(quote! {\n\n #pkt::#variant_ident(key) => #name::#variant_ident(key.into()),\n\n });\n\n }\n\n let mut res = quote! {\n\n impl From<&#pkt> for #name {\n\n fn from(public_key: &#pkt) -> Self {\n\n match public_key {\n\n #from_match_arms\n\n }\n", "file_path": "diem/crypto/crypto-derive/src/unions.rs", "rank": 97, "score": 217689.7631907192 }, { "content": "pub fn impl_enum_verifyingkey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n let res = quote! {\n\n impl diem_crypto::VerifyingKey for #name {\n\n type SigningKeyMaterial = #pkt;\n\n type SignatureMaterial = #st;\n\n }\n\n impl diem_crypto::private::Sealed for #name {}\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "diem/crypto/crypto-derive/src/unions.rs", "rank": 98, "score": 217689.7631907192 }, { "content": "// TODO: investigate taking decoder as a reference to reduce generated code\n\n// size.\n\npub fn nested<'a, F, R, E: Copy>(\n\n input: &mut untrusted::Reader<'a>,\n\n tag: Tag,\n\n error: E,\n\n decoder: F,\n\n) -> Result<R, E>\n\nwhere\n\n F: FnOnce(&mut untrusted::Reader<'a>) -> Result<R, E>,\n\n{\n\n let inner = expect_tag_and_get_value(input, tag).map_err(|_| error)?;\n\n inner.read_all(error, decoder)\n\n}\n\n\n", "file_path": "ring/src/io/der.rs", "rank": 99, "score": 217624.2373521676 } ]
Rust
wire_protocol/src/connection.rs
vangork/pravega-client-rust
b57b2ea6eee3aa49d354e19b5fc457d4ccf72e9f
use crate::error::*; use async_trait::async_trait; use pravega_client_shared::PravegaNodeUri; use snafu::ResultExt; use std::fmt; use std::fmt::{Debug, Formatter}; use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::io::{ReadHalf, WriteHalf}; use tokio::net::TcpStream; use tokio_rustls::client::TlsStream; use uuid::Uuid; #[async_trait] pub trait Connection: Send + Sync + Debug { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError>; async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError>; fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>); fn get_endpoint(&self) -> PravegaNodeUri; fn get_uuid(&self) -> Uuid; fn is_valid(&self) -> bool; fn can_recycle(&mut self, recycle: bool); } pub struct TokioConnection { pub uuid: Uuid, pub endpoint: PravegaNodeUri, pub stream: Option<TcpStream>, pub can_recycle: bool, } #[async_trait] impl Connection for TokioConnection { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .write_all(payload) .await .context(SendData { endpoint })?; Ok(()) } async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .read_exact(buf) .await .context(ReadData { endpoint })?; Ok(()) } fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>) { assert!(self.stream.is_some()); let (read_half, write_half) = tokio::io::split(self.stream.take().expect("take connection")); let read = Box::new(ConnectionReadHalfTokio { uuid: self.uuid, endpoint: self.endpoint.clone(), read_half: Some(read_half), }) as Box<dyn ConnectionReadHalf>; let write = Box::new(ConnectionWriteHalfTokio { uuid: self.uuid, endpoint: self.endpoint.clone(), write_half: Some(write_half), }) as Box<dyn ConnectionWriteHalf>; (read, write) } fn get_endpoint(&self) -> PravegaNodeUri { self.endpoint.clone() } fn get_uuid(&self) -> Uuid { self.uuid } fn is_valid(&self) -> bool { self.can_recycle && self.stream.as_ref().is_some() && self.stream.as_ref().expect("get connection").is_valid() } fn can_recycle(&mut self, can_recycle: bool) { self.can_recycle = can_recycle } } impl Debug for TokioConnection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TlsConnection") .field("connection id", &self.uuid) .field("pravega endpoint", &self.endpoint) .finish() } } pub struct TlsConnection { pub uuid: Uuid, pub endpoint: PravegaNodeUri, pub stream: Option<TlsStream<TcpStream>>, pub can_recycle: bool, } #[async_trait] impl Connection for TlsConnection { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .write_all(payload) .await .context(SendData { endpoint: endpoint.clone(), })?; self.stream .as_mut() .expect("get connection") .flush() .await .context(SendData { endpoint })?; Ok(()) } async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .read_exact(buf) .await .context(ReadData { endpoint })?; Ok(()) } fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>) { assert!(self.stream.is_some()); let (read_half, write_half) = tokio::io::split(self.stream.take().expect("take connection")); let read = Box::new(ConnectionReadHalfTls { uuid: self.uuid, endpoint: self.endpoint.clone(), read_half: Some(read_half), }) as Box<dyn ConnectionReadHalf>; let write = Box::new(ConnectionWriteHalfTls { uuid: self.uuid, endpoint: self.endpoint.clone(), write_half: Some(write_half), }) as Box<dyn ConnectionWriteHalf>; (read, write) } fn get_endpoint(&self) -> PravegaNodeUri { self.endpoint.clone() } fn get_uuid(&self) -> Uuid { self.uuid } fn is_valid(&self) -> bool { self.can_recycle && self.stream.as_ref().is_some() && self.stream.as_ref().expect("get connection").is_valid() } fn can_recycle(&mut self, can_recycle: bool) { self.can_recycle = can_recycle; } } impl Debug for TlsConnection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TlsConnection") .field("connection id", &self.uuid) .field("pravega endpoint", &self.endpoint) .finish() } } #[async_trait] pub trait ConnectionReadHalf: Send + Sync { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError>; fn get_id(&self) -> Uuid; } pub struct ConnectionReadHalfTokio { uuid: Uuid, endpoint: PravegaNodeUri, read_half: Option<ReadHalf<TcpStream>>, } #[async_trait] impl ConnectionReadHalf for ConnectionReadHalfTokio { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut reader) = self.read_half { reader.read_exact(buf).await.context(ReadData { endpoint })?; } else { panic!("should not try to read when read half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } pub struct ConnectionReadHalfTls { uuid: Uuid, endpoint: PravegaNodeUri, read_half: Option<ReadHalf<TlsStream<TcpStream>>>, } #[async_trait] impl ConnectionReadHalf for ConnectionReadHalfTls { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut reader) = self.read_half { reader.read_exact(buf).await.context(ReadData { endpoint })?; } else { panic!("should not try to read when read half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } #[async_trait] pub trait ConnectionWriteHalf: Send + Sync + Debug { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError>; fn get_id(&self) -> Uuid; } #[derive(Debug)] pub struct ConnectionWriteHalfTokio { uuid: Uuid, endpoint: PravegaNodeUri, write_half: Option<WriteHalf<TcpStream>>, } #[async_trait] impl ConnectionWriteHalf for ConnectionWriteHalfTokio { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut writer) = self.write_half { writer.write_all(payload).await.context(SendData { endpoint })?; } else { panic!("should not try to write when write half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } #[derive(Debug)] pub struct ConnectionWriteHalfTls { uuid: Uuid, endpoint: PravegaNodeUri, write_half: Option<WriteHalf<TlsStream<TcpStream>>>, } #[async_trait] impl ConnectionWriteHalf for ConnectionWriteHalfTls { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut writer) = self.write_half { writer.write_all(payload).await.context(SendData { endpoint: endpoint.clone(), })?; writer.flush().await.context(SendData { endpoint })?; } else { panic!("should not try to write when write half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } pub trait Validate { fn is_valid(&self) -> bool; } impl Validate for TcpStream { fn is_valid(&self) -> bool { self.peer_addr().map_or_else(|_e| false, |_addr| true) } } impl Validate for TlsStream<TcpStream> { fn is_valid(&self) -> bool { let (io, _session) = self.get_ref(); io.peer_addr().map_or_else(|_e| false, |_addr| true) } }
use crate::error::*; use async_trait::async_trait; use pravega_client_shared::PravegaNodeUri; use snafu::ResultExt; use std::fmt; use std::fmt::{Debug, Formatter}; use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::io::{ReadHalf, WriteHalf}; use tokio::net::TcpStream; use tokio_rustls::client::TlsStream; use uuid::Uuid; #[async_trait] pub trait Connection: Send + Sync + Debug { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError>; async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError>; fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>); fn get_endpoint(&self) -> PravegaNodeUri; fn get_uuid(&self) -> Uuid; fn is_valid(&self) -> bool; fn can_recycle(&mut self, recycle: bool); } pub struct TokioConnection { pub uuid: Uuid, pub endpoint: PravegaNodeUri, pub stream: Option<TcpStream>, pub can_recycle: bool, } #[async_trait] impl Connection for TokioConnection { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .write_all(payload) .await .context(SendData { endpoint })?; Ok(()) } async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .read_exact(buf) .await .context(ReadData { endpoint })?; Ok(()) } fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>) { assert!(self.stream.is_some()); let (read_half, write_half) = tokio::io::split(self.stream.take().expect("take connection")); let read = Box::new(ConnectionReadHalfTokio { uuid: self.uuid, endpoint: self.endpoint.clone(), read_half: Some(read_half), }) as Box<dyn ConnectionReadHalf>; let write = Box::new(ConnectionWriteHalfTokio { uuid: self.uuid, endpoint: self.endpoint.clone(), write_half: Some(write_half), }) as Box<dyn ConnectionWriteHalf>; (read, write) } fn get_endpoint(&self) -> PravegaNodeUri { self.endpoint.clone() } fn get_uuid(&self) -> Uuid { self.uuid } fn is_valid(&self) -> bool { self.can_recycle && self.stream.as_ref().is_some() && self.stream.as_ref().expect("get connection").is_valid() } fn can_recycle(&mut self, can_recycle: bool) { self.can_recycle = can_recycle } } impl Debug for TokioConnection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TlsConnection") .field("connection id", &self.uuid) .field("pravega endpoint", &self.endpoint) .finish() } } pub struct TlsConnection { pub uuid: Uuid, pub endpoint: PravegaNodeUri, pub stream: Option<TlsStream<TcpStream>>, pub can_recycle: bool, } #[async_trait] impl Connection for TlsConnection { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .write_all(payload) .await .context(SendData { endpoint: endpoint.clone(), })?; self.stream .as_mut() .expect("get connection") .flush() .await .context(SendData { endpoint })?; Ok(()) } async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .read_exact(buf) .await .context(ReadData { endpoint })?; Ok(()) } fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>) { assert!(self.stream.is_some()); let (read_half, write_half) = tokio::io::split(self.stream.take().expect("take connection")); let read = Box::new(ConnectionReadHalfTls { uuid: self.uuid, endpoint: self.endpoint.clone(), read_half: Some(read_half), }) as Box<dyn ConnectionReadHalf>;
(read, write) } fn get_endpoint(&self) -> PravegaNodeUri { self.endpoint.clone() } fn get_uuid(&self) -> Uuid { self.uuid } fn is_valid(&self) -> bool { self.can_recycle && self.stream.as_ref().is_some() && self.stream.as_ref().expect("get connection").is_valid() } fn can_recycle(&mut self, can_recycle: bool) { self.can_recycle = can_recycle; } } impl Debug for TlsConnection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TlsConnection") .field("connection id", &self.uuid) .field("pravega endpoint", &self.endpoint) .finish() } } #[async_trait] pub trait ConnectionReadHalf: Send + Sync { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError>; fn get_id(&self) -> Uuid; } pub struct ConnectionReadHalfTokio { uuid: Uuid, endpoint: PravegaNodeUri, read_half: Option<ReadHalf<TcpStream>>, } #[async_trait] impl ConnectionReadHalf for ConnectionReadHalfTokio { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut reader) = self.read_half { reader.read_exact(buf).await.context(ReadData { endpoint })?; } else { panic!("should not try to read when read half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } pub struct ConnectionReadHalfTls { uuid: Uuid, endpoint: PravegaNodeUri, read_half: Option<ReadHalf<TlsStream<TcpStream>>>, } #[async_trait] impl ConnectionReadHalf for ConnectionReadHalfTls { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut reader) = self.read_half { reader.read_exact(buf).await.context(ReadData { endpoint })?; } else { panic!("should not try to read when read half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } #[async_trait] pub trait ConnectionWriteHalf: Send + Sync + Debug { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError>; fn get_id(&self) -> Uuid; } #[derive(Debug)] pub struct ConnectionWriteHalfTokio { uuid: Uuid, endpoint: PravegaNodeUri, write_half: Option<WriteHalf<TcpStream>>, } #[async_trait] impl ConnectionWriteHalf for ConnectionWriteHalfTokio { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut writer) = self.write_half { writer.write_all(payload).await.context(SendData { endpoint })?; } else { panic!("should not try to write when write half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } #[derive(Debug)] pub struct ConnectionWriteHalfTls { uuid: Uuid, endpoint: PravegaNodeUri, write_half: Option<WriteHalf<TlsStream<TcpStream>>>, } #[async_trait] impl ConnectionWriteHalf for ConnectionWriteHalfTls { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut writer) = self.write_half { writer.write_all(payload).await.context(SendData { endpoint: endpoint.clone(), })?; writer.flush().await.context(SendData { endpoint })?; } else { panic!("should not try to write when write half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } pub trait Validate { fn is_valid(&self) -> bool; } impl Validate for TcpStream { fn is_valid(&self) -> bool { self.peer_addr().map_or_else(|_e| false, |_addr| true) } } impl Validate for TlsStream<TcpStream> { fn is_valid(&self) -> bool { let (io, _session) = self.get_ref(); io.peer_addr().map_or_else(|_e| false, |_addr| true) } }
let write = Box::new(ConnectionWriteHalfTls { uuid: self.uuid, endpoint: self.endpoint.clone(), write_half: Some(write_half), }) as Box<dyn ConnectionWriteHalf>;
assignment_statement
[ { "content": "#[async_trait]\n\npub trait ClientConnection: Send + Sync {\n\n async fn read(&mut self) -> Result<Replies, ClientConnectionError>;\n\n async fn write(&mut self, request: &Requests) -> Result<(), ClientConnectionError>;\n\n fn split(&mut self) -> (ClientConnectionReadHalf, ClientConnectionWriteHalf);\n\n fn get_uuid(&self) -> Uuid;\n\n}\n\n\n\npub struct ClientConnectionImpl<'a> {\n\n pub connection: PooledConnection<'a, Box<dyn Connection>>,\n\n}\n\n\n\npub struct ClientConnectionReadHalf {\n\n read_half: Box<dyn ConnectionReadHalf>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ClientConnectionWriteHalf {\n\n write_half: Box<dyn ConnectionWriteHalf>,\n\n}\n\n\n", "file_path": "wire_protocol/src/client_connection.rs", "rank": 3, "score": 280298.62058918516 }, { "content": "#[async_trait]\n\npub trait ConnectionFactory: Send + Sync {\n\n /// establish_connection will return a Connection future that used to send and read data.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// use pravega_wire_protocol::connection_factory::{ConnectionFactory, ConnectionFactoryConfig};\n\n /// use pravega_client_shared::PravegaNodeUri;\n\n /// use pravega_client_config::connection_type::ConnectionType;\n\n /// use tokio::runtime::Runtime;\n\n ///\n\n /// fn main() {\n\n /// let mut rt = Runtime::new().unwrap();\n\n /// let endpoint = PravegaNodeUri::from(\"localhost:9090\".to_string());\n\n /// let config = ConnectionFactoryConfig::new(ConnectionType::Tokio);\n\n /// let cf = ConnectionFactory::create(config);\n\n /// let connection_future = cf.establish_connection(endpoint);\n\n /// let mut connection = rt.block_on(connection_future).unwrap();\n\n /// }\n\n /// ```\n", "file_path": "wire_protocol/src/connection_factory.rs", "rank": 4, "score": 280298.62058918516 }, { "content": "#[async_trait]\n\npub trait ControllerClient: Send + Sync {\n\n /**\n\n * API to create a scope. The future completes with true in the case the scope did not exist\n\n * when the controller executed the operation. In the case of a re-attempt to create the\n\n * same scope, the future completes with false to indicate that the scope existed when the\n\n * controller executed the operation.\n\n */\n\n async fn create_scope(&self, scope: &Scope) -> ResultRetry<bool>;\n\n\n\n /**\n\n * API to check if the scope exists. The future completes with true in case the scope exists\n\n * and a false if it does not exist.\n\n */\n\n async fn check_scope_exists(&self, scope: &Scope) -> ResultRetry<bool>;\n\n\n\n /**\n\n * API to list scopes given a continuation token..\n\n * Use the pravega_controller_client::paginator::list_scopes to paginate over all the scopes.\n\n */\n\n async fn list_scopes(&self, token: &CToken) -> ResultRetry<Option<(Vec<Scope>, CToken)>>;\n", "file_path": "controller-client/src/lib.rs", "rank": 5, "score": 234969.15587164758 }, { "content": "#[async_trait]\n\ntrait Cred: Debug + CredClone + Send + Sync {\n\n async fn get_request_metadata(&self) -> String;\n\n fn is_expired(&self) -> bool;\n\n}\n\n\n", "file_path": "config/src/credentials.rs", "rank": 6, "score": 227423.45729133073 }, { "content": "pub fn retry_sync<O, T, E>(retry_schedule: impl BackoffSchedule, mut operation: O) -> Result<T, RetryError<E>>\n\nwhere\n\n O: FnMut() -> RetryResult<T, E>,\n\n E: Error,\n\n{\n\n retry_internal(retry_schedule, |_| operation())\n\n}\n\n\n", "file_path": "retry/src/retry_sync.rs", "rank": 7, "score": 220567.77423647977 }, { "content": "/// Deserialize the Value into the type T by using cbor deserializer.\n\n/// This method would be used by the user after calling get() of table_synchronizer.\n\npub fn deserialize_from<T>(reader: &[u8]) -> Result<T, serde_cbor::error::Error>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n serde_cbor::de::from_slice(reader)\n\n}\n\n\n\nasync fn conditionally_write<R>(\n\n mut updates_generator: impl FnMut(&mut Update) -> Result<R, SynchronizerError>,\n\n table_synchronizer: &mut Synchronizer,\n\n mut retry: i32,\n\n) -> Result<R, SynchronizerError> {\n\n let mut update_result = None;\n\n\n\n while retry > 0 {\n\n let map = table_synchronizer.get_outer_map();\n\n let map_version = table_synchronizer.get_inner_map_version();\n\n\n\n let mut to_update = Update {\n\n map,\n", "file_path": "src/sync/synchronizer.rs", "rank": 8, "score": 215251.66261299103 }, { "content": "/// Serialize the <dyn ValueData> into the Vec<u8> by using cbor serializer.\n\n/// This method would be used by the insert method in table_synchronizer.\n\npub fn serialize(value: &dyn ValueData) -> Result<Vec<u8>, serde_cbor::error::Error> {\n\n let mut vec = Vec::new();\n\n value.serialize_value(&mut CborSerializer::new(&mut vec))?;\n\n Ok(vec)\n\n}\n\n\n", "file_path": "src/sync/synchronizer.rs", "rank": 9, "score": 203975.1045162899 }, { "content": "/// The trait bound for the ValueData\n\npub trait ValueData: ValueSerialize + ValueClone + Debug {}\n\n\n\nimpl<T> ValueData for T where T: 'static + Serialize + DeserializeOwned + Clone + Debug {}\n\n\n", "file_path": "src/sync/synchronizer.rs", "rank": 10, "score": 200076.998559818 }, { "content": "// This benchmark test uses a mock server that replies ok to any requests instantly. It involves\n\n// kernel latency.\n\nfn event_stream_read_mock_server(c: &mut Criterion) {\n\n let rt = tokio::runtime::Runtime::new().unwrap();\n\n let mock_server = rt.block_on(MockServer::new());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(mock_server.address)\n\n .mock(true)\n\n .build()\n\n .expect(\"creating config\");\n\n rt.spawn(async { MockServer::run(mock_server).await });\n\n let mut reader = rt.block_on(set_up_event_stream_reader(config));\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start reader with mock server performance testing\");\n\n let mut last_offset: i64 = -1;\n\n c.bench_function(\"read 100KB mock server\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run_reader(&mut reader, &mut last_offset));\n\n });\n\n });\n\n println!(\"reader performance testing finished\");\n\n}\n", "file_path": "benches/benchmark.rs", "rank": 11, "score": 192984.279872268 }, { "content": "// This benchmark test uses a mock connection that replies ok to any requests instantly. It does not\n\n// involve kernel latency.\n\nfn event_stream_writer_mock_connection(c: &mut Criterion) {\n\n let rt = tokio::runtime::Runtime::new().unwrap();\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(\"127.0.0.1:9090\".parse::<SocketAddr>().unwrap())\n\n .mock(true)\n\n .connection_type(ConnectionType::Mock(MockType::Happy))\n\n .build()\n\n .expect(\"creating config\");\n\n let mut writer = rt.block_on(set_up_event_stream_writer(config));\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start event stream writer mock connection performance testing\");\n\n c.bench_function(\"mock connection\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run(&mut writer));\n\n });\n\n });\n\n info!(\"event stream writer mock server connection testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 12, "score": 192754.9240840153 }, { "content": "fn run_byte_stream_read(reader: &mut ByteReader) {\n\n for _i in 0..EVENT_NUM {\n\n let mut read = 0;\n\n let mut buf = vec![0; EVENT_SIZE];\n\n while read != EVENT_SIZE {\n\n let size = reader.read(&mut buf[read..]).expect(\"byte stream read\");\n\n read += size;\n\n }\n\n }\n\n}\n\n\n\ncriterion_group! {\n\n name = event_writer_performance;\n\n config = Criterion::default().sample_size(10);\n\n targets = event_stream_writer_mock_server,event_stream_writer_mock_server_no_block,event_stream_writer_mock_connection,event_stream_writer_mock_connection_no_block\n\n}\n\ncriterion_group! {\n\n name = event_reader_performance;\n\n config = Criterion::default().sample_size(10);\n\n targets = event_stream_read_mock_server\n", "file_path": "benches/benchmark.rs", "rank": 13, "score": 188473.92062228083 }, { "content": "// This benchmark test uses a mock connection that replies ok to any requests instantly. It does not\n\n// involve kernel latency. It does not wait for reply.\n\nfn event_stream_writer_mock_connection_no_block(c: &mut Criterion) {\n\n let rt = tokio::runtime::Runtime::new().unwrap();\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(\"127.0.0.1:9090\".parse::<SocketAddr>().unwrap())\n\n .mock(true)\n\n .connection_type(ConnectionType::Mock(MockType::Happy))\n\n .build()\n\n .expect(\"creating config\");\n\n let mut writer = rt.block_on(set_up_event_stream_writer(config));\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start event stream writer mock connection(no block) performance testing\");\n\n c.bench_function(\"mock connection(no block)\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run_no_block(&mut writer));\n\n });\n\n });\n\n info!(\"event stream writer mock connection(no block) testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 14, "score": 188261.01364825832 }, { "content": "#[async_trait]\n\npub trait Manager {\n\n /// The customized connection must implement Send and Sized marker trait\n\n type Conn: Send + Sized;\n\n\n\n /// Define how to establish the customized connection\n\n async fn establish_connection(&self, endpoint: PravegaNodeUri)\n\n -> Result<Self::Conn, ConnectionPoolError>;\n\n\n\n /// Check whether this connection is still valid. This method will be used to filter out\n\n /// invalid connections when putting connection back to the pool\n\n fn is_valid(&self, conn: &Self::Conn) -> bool;\n\n\n\n /// Get the maximum connections in the pool\n\n fn get_max_connections(&self) -> u32;\n\n\n\n fn name(&self) -> String;\n\n}\n\n\n\n/// ConnectionPool creates a pool of connections for reuse.\n\n/// It is thread safe.\n", "file_path": "connection_pool/src/connection_pool.rs", "rank": 15, "score": 186537.24921824667 }, { "content": "///\n\n/// Trait which is used check if the Error is Retryable.\n\n///\n\npub trait Retryable {\n\n fn can_retry(&self) -> bool;\n\n}\n\n\n\n///\n\n/// `wrap_with_async_retry!` macro wraps any arbitrary async function with `pravega_rust_client_retry::retry_async::retry_async`\n\n/// This macro takes two parameters. The first parameter is the Retry policy which implements `trait BackoffSchedule`.\n\n/// The second parameter is the async function that needs to be wrapped within the retry logic.\n\n/// The function invocation will be retried only error returned by the function returns `can_retry()` as true.\n\n///\n\n/// E.g: usage\n\n///\n\n/// ```ignore\n\n/// use pravega_rust_client_retry::retry_policy::RetryWithBackoff;\n\n/// use pravega_rust_client_retry::retry_async::retry_async;\n\n/// use pravega_rust_client_retry::wrap_with_async_retry;\n\n/// //CustomError implements Retrayable trait\n\n/// async fn function_a(param1: &str, param2:u8) -> Result<(), CustomError> {\n\n///\n\n/// }\n", "file_path": "retry/src/retry_result.rs", "rank": 16, "score": 178949.99991189438 }, { "content": "/// Serialize trait helper, we need to serialize the ValueData in Insert struct into Vec<u8>.\n\npub trait ValueSerialize {\n\n fn serialize_value(\n\n &self,\n\n seralizer: &mut CborSerializer<&mut Vec<u8>>,\n\n ) -> Result<(), serde_cbor::error::Error>;\n\n}\n\n\n\nimpl<T> ValueSerialize for T\n\nwhere\n\n T: Serialize,\n\n{\n\n fn serialize_value(\n\n &self,\n\n serializer: &mut CborSerializer<&mut Vec<u8>>,\n\n ) -> Result<(), serde_cbor::error::Error> {\n\n self.serialize(serializer)\n\n }\n\n}\n\n\n", "file_path": "src/sync/synchronizer.rs", "rank": 18, "score": 178599.28331547632 }, { "content": "/// Clone trait helper.\n\npub trait ValueClone {\n\n fn clone_box(&self) -> Box<dyn ValueData>;\n\n}\n\n\n\nimpl<T> ValueClone for T\n\nwhere\n\n T: 'static + ValueData + Clone,\n\n{\n\n fn clone_box(&self) -> Box<dyn ValueData> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Clone for Box<dyn ValueData> {\n\n fn clone(&self) -> Self {\n\n self.clone_box()\n\n }\n\n}\n\n\n", "file_path": "src/sync/synchronizer.rs", "rank": 19, "score": 178589.4398006478 }, { "content": "///\n\n///Helper method to iterated over the all the Pravega streams under the provided Scope.\n\n///This method returns a stream of values,Pravega streams, produced asynchronously.\n\n///\n\n/// The below snippets show case the example uses.\n\n/// Sample 1:\n\n///```\n\n/// # use tonic::transport::Channel;\n\n/// # use pravega_controller_client::controller::controller_service_client::ControllerServiceClient;\n\n/// # use pravega_controller_client::ControllerClient;\n\n/// # async fn call_list_stream(controller_client: &dyn ControllerClient) {\n\n/// use pravega_client_shared::Scope;\n\n/// use pravega_client_shared::ScopedStream;\n\n/// use futures::future;\n\n/// use futures::stream::StreamExt;\n\n/// use pravega_controller_client::paginator::list_streams;\n\n/// let stream = list_streams(\n\n/// Scope {\n\n/// name: \"testScope\".to_string(),\n\n/// },\n\n/// controller_client,\n\n/// );\n\n/// // collect all the Streams in a single vector\n\n/// let stream_list:Vec<ScopedStream> = stream.map(|str| str.unwrap()).collect::<Vec<ScopedStream>>().await;\n\n/// # }\n\n/// ```\n\n///\n\n/// Sample 2:\n\n/// ```\n\n/// # use tonic::transport::Channel;\n\n/// # use pravega_controller_client::controller::controller_service_client::ControllerServiceClient;\n\n/// # use pravega_controller_client::ControllerClient;\n\n/// # async fn call_list_stream(controller_client: &dyn ControllerClient) {\n\n/// use pravega_client_shared::Scope;\n\n/// use pravega_client_shared::ScopedStream;\n\n/// use futures::future;\n\n/// use futures::stream::StreamExt;\n\n/// use pravega_controller_client::paginator::list_streams;\n\n/// let stream = list_streams(\n\n/// Scope {\n\n/// name: \"testScope\".to_string(),\n\n/// },\n\n/// controller_client,\n\n/// );\n\n/// futures::pin_mut!(stream);\n\n/// let pravega_stream_1 = stream.next().await;\n\n/// let pravega_stream_2 = stream.next().await;\n\n/// // A None is returned at the end of the stream.\n\n/// # }\n\n/// ```\n\n///\n\npub fn list_streams(\n\n scope: Scope,\n\n client: &dyn ControllerClient,\n\n) -> impl Stream<Item = Result<ScopedStream, RetryError<ControllerError>>> + '_ {\n\n struct State {\n\n streams: IntoIter<ScopedStream>,\n\n scope: Scope,\n\n token: CToken,\n\n }\n\n\n\n // Initial state with an empty Continuation token.\n\n let get_next_stream_async = move |mut state: State| async move {\n\n if let Some(element) = state.streams.next() {\n\n Some((Ok(element), state))\n\n } else {\n\n // execute a request to the controller.\n\n info!(\n\n \"Fetch the next set of streams under scope {} using the provided token\",\n\n state.scope\n\n );\n", "file_path": "controller-client/src/paginator.rs", "rank": 20, "score": 172118.16180925863 }, { "content": "///\n\n///Helper method to iterated over the all the Pravega streams under the provided Scope.\n\n///This method returns a stream of values,Pravega streams, produced asynchronously.\n\n///\n\n/// The below snippets show case the example uses.\n\n///\n\n/// Sample 1:\n\n/// ```\n\n/// # use tonic::transport::Channel;\n\n/// # use pravega_controller_client::controller::controller_service_client::ControllerServiceClient;\n\n/// # use pravega_controller_client::ControllerClient;\n\n/// # async fn call_list_stream_for_tag(controller_client: &dyn ControllerClient) {\n\n/// use pravega_client_shared::Scope;\n\n/// use pravega_client_shared::ScopedStream;\n\n/// use futures::future;\n\n/// use futures::stream::StreamExt;\n\n/// use pravega_controller_client::paginator::list_streams_for_tag;\n\n/// let stream = list_streams_for_tag(\n\n/// Scope {\n\n/// name: \"testScope\".to_string(),\n\n/// },\n\n/// \"tagx\".to_string(),\n\n/// controller_client,\n\n/// );\n\n/// // collect all the Streams in a single vector\n\n/// let stream_list:Vec<ScopedStream> = stream.map(|str| str.unwrap()).collect::<Vec<ScopedStream>>().await;\n\n/// # }\n\n/// ```\n\n///\n\n/// Sample 2:\n\n/// ```\n\n/// # use tonic::transport::Channel;\n\n/// # use pravega_controller_client::controller::controller_service_client::ControllerServiceClient;\n\n/// # use pravega_controller_client::ControllerClient;\n\n/// # async fn call_list_stream_for_tag(controller_client: &dyn ControllerClient) {\n\n/// use pravega_client_shared::Scope;\n\n/// use pravega_client_shared::ScopedStream;\n\n/// use futures::future;\n\n/// use futures::stream::StreamExt;\n\n/// use pravega_controller_client::paginator::list_streams_for_tag;\n\n/// let stream = list_streams_for_tag(\n\n/// Scope {\n\n/// name: \"testScope\".to_string(),\n\n/// },\n\n/// \"tagx\".to_string(),\n\n/// controller_client,\n\n/// );\n\n/// futures::pin_mut!(stream);\n\n/// let pravega_stream_1 = stream.next().await;\n\n/// let pravega_stream_2 = stream.next().await;\n\n/// // A None is returned at the end of the stream.\n\n/// # }\n\n/// ```\n\n///\n\npub fn list_streams_for_tag(\n\n scope: Scope,\n\n tag: String,\n\n client: &dyn ControllerClient,\n\n) -> impl Stream<Item = Result<ScopedStream, RetryError<ControllerError>>> + '_ {\n\n struct State {\n\n streams: IntoIter<ScopedStream>,\n\n scope: Scope,\n\n tag: String,\n\n token: CToken,\n\n }\n\n\n\n // Initial state with an empty Continuation token.\n\n let get_next_stream_async = move |mut state: State| async move {\n\n if let Some(element) = state.streams.next() {\n\n Some((Ok(element), state))\n\n } else {\n\n // execute a request to the controller.\n\n info!(\n\n \"Fetch the next set of streams with tag {} under scope {} using the provided token\",\n", "file_path": "controller-client/src/paginator.rs", "rank": 21, "score": 168311.83027703382 }, { "content": "// ManagedPool maintains a map that maps endpoint to InternalPool.\n\n// The map is a concurrent map named Dashmap, which supports multi-threading with high performance.\n\nstruct ManagedPool<T: Sized + Send> {\n\n map: DashMap<PravegaNodeUri, InternalPool<T>>,\n\n max_connections: u32,\n\n}\n\n\n\nimpl<T: Sized + Send> ManagedPool<T> {\n\n pub fn new(max_connections: u32) -> Self {\n\n let map = DashMap::new();\n\n ManagedPool { map, max_connections }\n\n }\n\n\n\n // add a connection to the internal pool\n\n fn add_connection(&self, endpoint: PravegaNodeUri, connection: InternalConn<T>) {\n\n let mut internal = self.map.entry(endpoint).or_insert_with(InternalPool::new);\n\n if self.max_connections > internal.conns.len() as u32 {\n\n internal.conns.push(connection);\n\n }\n\n }\n\n\n\n // get a connection from the internal pool. If there is no available connections, returns an error\n", "file_path": "connection_pool/src/connection_pool.rs", "rank": 22, "score": 165310.90410670414 }, { "content": "fn main() {\n\n println!(\"start event write and read example\");\n\n // assuming Pravega standalone is listening at localhost:9090\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(\"localhost:9090\")\n\n .build()\n\n .unwrap();\n\n\n\n let client_factory = ClientFactory::new(config);\n\n println!(\"client factory created\");\n\n\n\n client_factory.runtime().block_on(async {\n\n let controller_client = client_factory.controller_client();\n\n\n\n // create a scope\n\n let scope = Scope::from(\"fooScope\".to_owned());\n\n controller_client\n\n .create_scope(&scope)\n\n .await\n\n .expect(\"create scope\");\n", "file_path": "examples/event_write_and_read.rs", "rank": 23, "score": 159317.24111521858 }, { "content": "// This benchmark test uses a mock server that replies ok to any requests instantly. It involves\n\n// kernel latency.\n\nfn event_stream_writer_mock_server(c: &mut Criterion) {\n\n let rt = tokio::runtime::Runtime::new().unwrap();\n\n let mock_server = rt.block_on(MockServer::new());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(mock_server.address)\n\n .mock(true)\n\n .build()\n\n .expect(\"creating config\");\n\n let mut writer = rt.block_on(set_up_event_stream_writer(config));\n\n rt.spawn(async { MockServer::run(mock_server).await });\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start event stream writer mock server performance testing\");\n\n c.bench_function(\"mock server\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run(&mut writer));\n\n });\n\n });\n\n info!(\"event stream writer mock server performance testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 24, "score": 154243.92989445975 }, { "content": "fn byte_stream_reader_mock_server(c: &mut Criterion) {\n\n let rt = tokio::runtime::Runtime::new().unwrap();\n\n let mock_server = rt.block_on(MockServer::new());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(mock_server.address)\n\n .mock(true)\n\n .build()\n\n .expect(\"creating config\");\n\n rt.spawn(async { MockServer::run(mock_server).await });\n\n let mut reader = set_up_byte_stream_reader(config, &rt);\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start byte stream reader mock server performance testing\");\n\n c.bench_function(\"byte_stream_reader_mock_server\", |b| {\n\n b.iter(|| {\n\n run_byte_stream_read(&mut reader);\n\n });\n\n });\n\n info!(\"byte stream reader mock server testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 25, "score": 154234.04358047165 }, { "content": "fn apply_deletes_to_localmap(to_delete: &mut Update, table_synchronizer: &mut Synchronizer) {\n\n let mut i = 0;\n\n for delete in to_delete.get_remove_iter() {\n\n let delete_key = Key {\n\n key: delete.inner_key.clone(),\n\n key_version: TableKey::KEY_NO_VERSION,\n\n };\n\n let in_mem_inner_map = table_synchronizer\n\n .in_memory_map\n\n .entry(delete.outer_key.clone())\n\n .or_insert_with(HashMap::new);\n\n in_mem_inner_map.remove(&delete_key);\n\n i += 1;\n\n }\n\n debug!(\"Deletes {} entries in local map \", i);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n", "file_path": "src/sync/synchronizer.rs", "rank": 26, "score": 153938.91383971347 }, { "content": "fn test_read_from_tail_of_stream(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testReadTailScopeRG\".to_owned());\n\n let stream_name = Stream::from(\"testTailRG\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 10;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream =\n\n create_scope_stream(client_factory.controller_client(), &scope_name, &stream_name, 1).await;\n\n new_stream\n\n });\n\n\n\n let rg_config = ReaderGroupConfigBuilder::default()\n\n .read_from_tail_of_stream(str)\n\n .build();\n", "file_path": "integration_test/src/event_reader_tests.rs", "rank": 27, "score": 151532.2030950565 }, { "content": "fn test_read_offline_stream(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testReaderOffline\".to_owned());\n\n let stream_name = Stream::from(\"test1\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 10;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream =\n\n create_scope_stream(client_factory.controller_client(), &scope_name, &stream_name, 1).await;\n\n new_stream\n\n });\n\n\n\n let rg_config = ReaderGroupConfigBuilder::default()\n\n .read_from_head_of_stream(str)\n\n .build();\n", "file_path": "integration_test/src/reader_group_tests.rs", "rank": 28, "score": 151532.2030950565 }, { "content": "fn test_read_from_head_of_stream(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testReadHeadScopeRG\".to_owned());\n\n let stream_name = Stream::from(\"testHeadRG\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 10;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream =\n\n create_scope_stream(client_factory.controller_client(), &scope_name, &stream_name, 4).await;\n\n // write events only if the stream is created.\n\n if new_stream {\n\n // write events\n\n write_events(\n\n scope_name.clone(),\n\n stream_name.clone(),\n", "file_path": "integration_test/src/event_reader_tests.rs", "rank": 29, "score": 151532.2030950565 }, { "content": "// This benchmark test uses a mock server that replies ok to any requests instantly. It involves\n\n// kernel latency. It does not wait for reply.\n\nfn event_stream_writer_mock_server_no_block(c: &mut Criterion) {\n\n let rt = tokio::runtime::Runtime::new().unwrap();\n\n let mock_server = rt.block_on(MockServer::new());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(mock_server.address)\n\n .mock(true)\n\n .build()\n\n .expect(\"creating config\");\n\n let mut writer = rt.block_on(set_up_event_stream_writer(config));\n\n rt.spawn(async { MockServer::run(mock_server).await });\n\n let _ = tracing_subscriber::fmt::try_init();\n\n info!(\"start event stream writer mock server(no block) performance testing\");\n\n c.bench_function(\"mock server(no block)\", |b| {\n\n b.iter(|| {\n\n rt.block_on(run_no_block(&mut writer));\n\n });\n\n });\n\n info!(\"event stream writer mock server(no block) performance testing finished\");\n\n}\n\n\n", "file_path": "benches/benchmark.rs", "rank": 30, "score": 150872.5009535619 }, { "content": "pub fn test_index_stream(config: PravegaStandaloneServiceConfig) {\n\n // spin up Pravega standalone\n\n let scope = Scope::from(\"testScopeIndexStream\".to_owned());\n\n let stream = Stream::from(\"testStreamIndexStream\".to_owned());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.runtime();\n\n handle.block_on(utils::create_scope_stream(\n\n client_factory.controller_client(),\n\n &scope,\n\n &stream,\n\n 1,\n\n ));\n\n\n\n let scoped_stream = ScopedStream { scope, stream };\n", "file_path": "integration_test/src/index_stream_tests.rs", "rank": 31, "score": 148987.0665785747 }, { "content": "// Add a bound `T: Fields` to every type parameter T.\n\nfn add_trait_bounds(mut generics: Generics) -> Generics {\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n type_param.bounds.push(parse_quote!(Fields));\n\n }\n\n }\n\n generics\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 32, "score": 147294.66435946216 }, { "content": "pub fn retry_internal<O, T, E>(\n\n retry_schedule: impl BackoffSchedule,\n\n mut operation: O,\n\n) -> Result<T, RetryError<E>>\n\nwhere\n\n O: FnMut(u64) -> RetryResult<T, E>,\n\n E: Error,\n\n{\n\n let mut iterator = retry_schedule;\n\n let mut current_try = 1;\n\n let mut total_delay = Duration::default();\n\n // Must use return(for early return).\n\n loop {\n\n match operation(current_try) {\n\n RetryResult::Success(value) => return Ok(value),\n\n RetryResult::Retry(error) => {\n\n if let Some(delay) = iterator.next() {\n\n sleep(delay);\n\n current_try += 1;\n\n total_delay += delay;\n", "file_path": "retry/src/retry_sync.rs", "rank": 33, "score": 146645.91754695016 }, { "content": "#[proc_macro_derive(Fields)]\n\npub fn derive_fields(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n // Parse the input tokens into a syntax tree.\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n // Used in the quasi-quotation below as `#name`.\n\n let name = input.ident;\n\n\n\n // Add a bound `T: Fields` to every type parameter T.\n\n let generics = add_trait_bounds(input.generics);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n // Generate an expression to get the key value pairs of the struct field.\n\n let expr = key_value_pairs(&input.data);\n\n let expanded = quote! {\n\n // The generated impl.\n\n impl #impl_generics pravega_client::index::Fields for #name #ty_generics #where_clause {\n\n fn get_field_values(&self) -> Vec<(&'static str, u64)> {\n\n vec!{#expr}\n\n }\n\n }\n\n };\n\n\n\n // Hand the output tokens back to the compiler.\n\n proc_macro::TokenStream::from(expanded)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 34, "score": 139455.44934709373 }, { "content": "pub trait Value {\n\n fn value(&self) -> u64;\n\n}\n\n\n\nimpl Value for u64 {\n\n fn value(&self) -> u64 {\n\n self.to_owned()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod test {\n\n use super::*;\n\n use crate as pravega_client;\n\n\n\n use pravega_client_macros::Fields;\n\n\n\n #[derive(Fields, Debug, PartialOrd, PartialEq)]\n\n struct FieldsTest {\n\n time: u64,\n", "file_path": "src/index/mod.rs", "rank": 35, "score": 139225.9320647918 }, { "content": "pub trait Fields {\n\n fn get_field_values(&self) -> Vec<(&'static str, u64)>;\n\n}\n\n\n", "file_path": "src/index/mod.rs", "rank": 36, "score": 139225.9320647918 }, { "content": "pub trait Reply {\n\n fn get_request_id(&self) -> i64;\n\n fn is_failure(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n\n/*\n\n * bincode serialize and deserialize config\n\n */\n\nlazy_static! {\n\n static ref CONFIG: Config = {\n\n let mut config = bincode2::config();\n\n config.big_endian();\n\n config.limit(MAX_WIRECOMMAND_SIZE.into());\n\n config.array_length(LengthOption::U32);\n\n config.string_length(LengthOption::U16);\n\n config\n\n };\n\n}\n", "file_path": "wire_protocol/src/commands.rs", "rank": 37, "score": 136321.22212029647 }, { "content": "pub trait Command {\n\n const TYPE_CODE: i32;\n\n fn write_fields(&self) -> Result<Vec<u8>, CommandError>;\n\n fn read_from(input: &[u8]) -> Result<Self, CommandError>\n\n where\n\n Self: Sized;\n\n}\n\n\n\n/**\n\n * trait for Request\n\n */\n", "file_path": "wire_protocol/src/commands.rs", "rank": 38, "score": 136321.22212029647 }, { "content": "pub trait Request {\n\n fn get_request_id(&self) -> i64;\n\n fn must_log(&self) -> bool {\n\n true\n\n }\n\n}\n\n\n\n/**\n\n * trait for Reply\n\n */\n", "file_path": "wire_protocol/src/commands.rs", "rank": 39, "score": 136321.22212029647 }, { "content": "fn check_exist() -> bool {\n\n let path = Path::new(BASE).join(LIBRARY);\n\n if path.exists() {\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "integration_test/build.rs", "rank": 40, "score": 134478.3764000383 }, { "content": "pub fn test_event_stream_writer(config: PravegaStandaloneServiceConfig) {\n\n // spin up Pravega standalone\n\n let scope_name = Scope::from(\"testScopeWriter\".to_owned());\n\n let stream_name = Stream::from(\"testStreamWriter\".to_owned());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.runtime();\n\n handle.block_on(utils::create_scope_stream(\n\n client_factory.controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 1,\n\n ));\n\n\n\n let scoped_stream = ScopedStream {\n", "file_path": "integration_test/src/event_writer_tests.rs", "rank": 41, "score": 133770.3683286868 }, { "content": "pub fn test_byte_stream(config: PravegaStandaloneServiceConfig) {\n\n // spin up Pravega standalone\n\n let scope_name = Scope::from(\"testScopeByteStream\".to_owned());\n\n let stream_name = Stream::from(\"testStreamByteStream\".to_owned());\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.runtime_handle();\n\n handle.block_on(utils::create_scope_stream(\n\n client_factory.controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 1,\n\n ));\n\n\n\n let scoped_stream = ScopedStream {\n", "file_path": "integration_test/src/byte_reader_writer_tests.rs", "rank": 42, "score": 133770.3683286868 }, { "content": "pub fn test_event_stream_reader(config: PravegaStandaloneServiceConfig) {\n\n info!(\"test event stream reader\");\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let async_client_factory = client_factory.to_async();\n\n let runtime = client_factory.runtime();\n\n test_read_from_tail_of_stream(&async_client_factory);\n\n test_read_from_head_of_stream(&async_client_factory);\n\n test_read_large_events(&async_client_factory);\n\n test_multi_reader_multi_segments_tail_read(&async_client_factory);\n\n runtime.block_on(test_read_api(&async_client_factory));\n\n runtime.block_on(test_stream_scaling(&async_client_factory));\n\n runtime.block_on(test_release_segment(&async_client_factory));\n\n runtime.block_on(test_release_segment_at(&async_client_factory));\n\n test_multiple_readers(&async_client_factory);\n\n test_reader_offline(&async_client_factory);\n\n test_segment_rebalance(&async_client_factory);\n\n info!(\"test event stream reader finished\");\n\n}\n\n\n", "file_path": "integration_test/src/event_reader_tests.rs", "rank": 43, "score": 133770.3683286868 }, { "content": "pub trait Decode {\n\n type Item;\n\n fn read_from(raw_input: &[u8]) -> Result<Self::Item, CommandError>;\n\n}\n\n\n\nimpl Encode for Requests {\n\n fn write_fields(&self) -> Result<Vec<u8>, CommandError> {\n\n let mut res = Vec::new();\n\n match self {\n\n Requests::Padding(padding_command) => {\n\n res.extend_from_slice(&PaddingCommand::TYPE_CODE.to_be_bytes());\n\n let se = padding_command.write_fields()?;\n\n res.extend_from_slice(&(se.len() as i32).to_be_bytes());\n\n res.extend(se);\n\n }\n\n Requests::PartialEvent(partial_event_cmd) => {\n\n res.extend_from_slice(&PartialEventCommand::TYPE_CODE.to_be_bytes());\n\n let se = partial_event_cmd.write_fields()?;\n\n res.extend_from_slice(&(se.len() as i32).to_be_bytes());\n\n res.extend(se);\n", "file_path": "wire_protocol/src/wire_commands.rs", "rank": 44, "score": 133602.0745599913 }, { "content": "pub trait Encode {\n\n fn write_fields(&self) -> Result<Vec<u8>, CommandError>;\n\n}\n\n\n", "file_path": "wire_protocol/src/wire_commands.rs", "rank": 45, "score": 133602.0745599913 }, { "content": "///\n\n///Helper method to iterate over the all the Pravega Scopes.\n\n///This method returns a stream of values, Pravega scopes, produced asynchronously.\n\n///\n\n/// The below snippets show case the example uses.\n\n/// Sample 1:\n\n///```\n\n/// # use tonic::transport::Channel;\n\n/// # use pravega_controller_client::controller::controller_service_client::ControllerServiceClient;\n\n/// # use pravega_controller_client::ControllerClient;\n\n/// # async fn call_list_scope(controller_client: &dyn ControllerClient) {\n\n/// use pravega_client_shared::Scope;\n\n/// use pravega_client_shared::ScopedStream;\n\n/// use futures::future;\n\n/// use futures::stream::StreamExt;\n\n/// use pravega_controller_client::paginator::list_scopes;\n\n/// let stream = list_scopes(\n\n/// controller_client,\n\n/// );\n\n/// // collect all the Scopes in a single vector\n\n/// let scope_list:Vec<Scope> = stream.map(|str| str.unwrap()).collect::<Vec<Scope>>().await;\n\n/// # }\n\n/// ```\n\n///\n\n/// Sample 2:\n\n/// ```\n\n/// # use tonic::transport::Channel;\n\n/// # use pravega_controller_client::controller::controller_service_client::ControllerServiceClient;\n\n/// # use pravega_controller_client::ControllerClient;\n\n/// # async fn call_list_scope(controller_client: &dyn ControllerClient) {\n\n/// use pravega_client_shared::Scope;\n\n/// use pravega_client_shared::ScopedStream;\n\n/// use futures::future;\n\n/// use futures::stream::StreamExt;\n\n/// use pravega_controller_client::paginator::list_scopes;\n\n/// let stream = list_scopes(\n\n/// controller_client,\n\n/// );\n\n/// futures::pin_mut!(stream);\n\n/// let pravega_scope_1 = stream.next().await;\n\n/// let pravega_scope_1 = stream.next().await;\n\n/// // A None is returned at the end of the stream.\n\n/// # }\n\n/// ```\n\n///\n\npub fn list_scopes(\n\n client: &dyn ControllerClient,\n\n) -> impl Stream<Item = Result<Scope, RetryError<ControllerError>>> + '_ {\n\n struct State {\n\n scopes: IntoIter<Scope>,\n\n token: CToken,\n\n }\n\n\n\n // Initial state with an empty Continuation token.\n\n let get_next_stream_async = move |mut state: State| async move {\n\n if let Some(element) = state.scopes.next() {\n\n Some((Ok(element), state))\n\n } else {\n\n // execute a request to the controller.\n\n info!(\"Fetch the next set of scopes using the provided token\",);\n\n let res: ResultRetry<Option<(Vec<Scope>, CToken)>> = client.list_scopes(&state.token).await;\n\n match res {\n\n Ok(None) => None,\n\n Ok(Some((list, ct))) => {\n\n // create a consuming iterator\n", "file_path": "controller-client/src/paginator.rs", "rank": 46, "score": 131186.75003168703 }, { "content": "pub trait PravegaService {\n\n /**\n\n * Create and start a PravegaService\n\n */\n\n fn start(config: PravegaStandaloneServiceConfig) -> Self;\n\n\n\n /**\n\n * Stop a given service. If the service is already stopped,nothing would happen.\n\n */\n\n fn stop(&mut self) -> Result<(), std::io::Error>;\n\n\n\n /**\n\n * Enable DEBUG level log of Pravega standalone\n\n */\n\n fn enable_debug_log(enable: bool);\n\n\n\n /**\n\n * Enable Auth for Pravega standalone\n\n */\n\n fn enable_auth(enable: bool);\n", "file_path": "integration_test/src/pravega_service.rs", "rank": 47, "score": 131051.25827449653 }, { "content": "pub fn test_transactional_event_stream_writer(config: PravegaStandaloneServiceConfig) {\n\n info!(\"test TransactionalEventStreamWriter\");\n\n // spin up Pravega standalone\n\n let scope_name = Scope::from(\"testScopeTxnWriter\".to_owned());\n\n let stream_name = Stream::from(\"testStreamTxnWriter\".to_owned());\n\n let scoped_stream = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.runtime();\n\n handle.block_on(setup_test(\n\n &scope_name,\n\n &stream_name,\n", "file_path": "integration_test/src/transactional_event_writer_tests.rs", "rank": 48, "score": 129255.63236023393 }, { "content": "fn check_standalone_status() -> bool {\n\n let output = Command::new(\"sh\")\n\n .arg(\"-c\")\n\n .arg(\"netstat -ltn 2> /dev/null | grep 9090 || ss -ltn 2> /dev/null | grep 9090\")\n\n .output()\n\n .expect(\"failed to execute process\");\n\n // if length is not zero, controller is listening on port 9090\n\n !output.stdout.is_empty()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::env;\n\n use std::net::SocketAddr;\n\n\n\n use wirecommand_tests::*;\n\n\n\n use crate::pravega_service::PravegaStandaloneServiceConfig;\n\n\n\n use super::*;\n", "file_path": "integration_test/src/lib.rs", "rank": 49, "score": 129225.72964762611 }, { "content": "pub fn disconnection_test_wrapper() {\n\n let rt = tokio::runtime::Runtime::new().expect(\"create runtime\");\n\n rt.block_on(test_retry_with_no_connection());\n\n rt.shutdown_timeout(Duration::from_millis(100));\n\n\n\n let config = PravegaStandaloneServiceConfig::new(false, false, false);\n\n let mut pravega = PravegaStandaloneService::start(config);\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(PravegaNodeUri::from(\"127.0.0.1:9090\"))\n\n .connection_type(ConnectionType::Tokio)\n\n .build()\n\n .expect(\"build client config\");\n\n let cf = ClientFactory::new(config);\n\n let rt = cf.runtime();\n\n test_retry_while_start_pravega(&cf);\n\n assert_eq!(check_standalone_status(), true);\n\n test_retry_with_unexpected_reply(&cf);\n\n pravega.stop().unwrap();\n\n wait_for_standalone_with_timeout(false, 10);\n\n\n", "file_path": "integration_test/src/disconnection_tests.rs", "rank": 50, "score": 126232.66445351785 }, { "content": "pub fn wirecommand_test_wrapper() {\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .build()\n\n .expect(\"build client config\");\n\n let cf = ClientFactory::new(config);\n\n let h = cf.runtime();\n\n h.block_on(wirecommand_tests(&cf));\n\n}\n\n\n\npub async fn wirecommand_tests(factory: &ClientFactory) {\n\n let timeout_second = time::Duration::from_secs(30);\n\n\n\n timeout(timeout_second, test_hello(factory)).await.unwrap();\n\n\n\n timeout(timeout_second, test_keep_alive(factory)).await.unwrap();\n\n\n\n timeout(timeout_second, test_setup_append(factory)).await.unwrap();\n\n\n\n timeout(timeout_second, test_create_segment(factory))\n", "file_path": "integration_test/src/wirecommand_tests.rs", "rank": 51, "score": 126232.66445351785 }, { "content": "struct TokioConnectionFactory {\n\n tls_enabled: bool,\n\n certs: Vec<String>,\n\n disable_cert_verification: bool,\n\n}\n\n\n\nimpl TokioConnectionFactory {\n\n fn new(tls_enabled: bool, certs: Vec<String>, disable_cert_verification: bool) -> Self {\n\n TokioConnectionFactory {\n\n tls_enabled,\n\n certs,\n\n disable_cert_verification,\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl ConnectionFactory for TokioConnectionFactory {\n\n async fn establish_connection(\n\n &self,\n", "file_path": "wire_protocol/src/connection_factory.rs", "rank": 52, "score": 121728.34889294449 }, { "content": "struct MockConnectionFactory {\n\n segments: Arc<Mutex<HashMap<String, SegmentInfo>>>,\n\n writers: Arc<Mutex<HashMap<u128, String>>>,\n\n table_segment_index: Arc<Mutex<TableSegmentIndex>>,\n\n table_segment: Arc<Mutex<TableSegment>>,\n\n mock_type: MockType,\n\n}\n\n\n\nimpl MockConnectionFactory {\n\n pub fn new(mock_type: MockType) -> Self {\n\n MockConnectionFactory {\n\n segments: Arc::new(Mutex::new(HashMap::new())),\n\n writers: Arc::new(Mutex::new(HashMap::new())),\n\n table_segment_index: Arc::new(Mutex::new(HashMap::new())),\n\n table_segment: Arc::new(Mutex::new(HashMap::new())),\n\n mock_type,\n\n }\n\n }\n\n}\n\n\n", "file_path": "wire_protocol/src/connection_factory.rs", "rank": 53, "score": 121728.34889294449 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\nstruct Tombstone {}\n\n\n\n/// The Update contains a nested map and a version map, which are the same map in\n\n/// synchronizer but will be updated instantly when caller calls Insert or Remove method.\n\n/// It is used to update the server side of table and its updates will be applied to\n\n/// synchronizer once the updates are successfully stored on the server side.\n\npub struct Update {\n\n map: HashMap<String, HashMap<String, Value>>,\n\n map_version: HashMap<String, Value>,\n\n insert: Vec<Insert>,\n\n remove: Vec<Remove>,\n\n}\n\n\n\nimpl Update {\n\n pub fn new(\n\n map: HashMap<String, HashMap<String, Value>>,\n\n map_version: HashMap<String, Value>,\n\n insert: Vec<Insert>,\n\n remove: Vec<Remove>,\n\n ) -> Self {\n", "file_path": "src/sync/synchronizer.rs", "rank": 54, "score": 120772.91935303254 }, { "content": "fn test_read_large_events(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testReaderScaling\".to_owned());\n\n let stream_name = Stream::from(\"testReadLargeEvents\".to_owned());\n\n\n\n const NUM_EVENTS: usize = 1000;\n\n const EVENT_SIZE: usize = 1000;\n\n\n\n let new_stream = h.block_on(create_scope_stream(\n\n client_factory.controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 1,\n\n ));\n\n // write events only if the stream is created. This is useful if we are running the reader tests\n\n // multiple times.\n\n if new_stream {\n\n h.block_on(write_events(\n\n scope_name.clone(),\n\n stream_name.clone(),\n", "file_path": "integration_test/src/event_reader_tests.rs", "rank": 55, "score": 118931.28437053898 }, { "content": "fn test_read_offline_with_offset(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testReaderOfflineWithOffset\".to_owned());\n\n let stream_name = Stream::from(\"test\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 10;\n\n const EVENT_SIZE: usize = 10;\n\n // create scope and stream.\n\n h.block_on(async {\n\n let new_stream =\n\n create_scope_stream(client_factory.controller_client(), &scope_name, &stream_name, 1).await;\n\n new_stream\n\n });\n\n\n\n let rg_config = ReaderGroupConfigBuilder::default()\n\n .read_from_head_of_stream(str)\n\n .build();\n", "file_path": "integration_test/src/reader_group_tests.rs", "rank": 56, "score": 118931.28437053898 }, { "content": "// This is used to parse the key received from the server.\n\nstruct InternalKey {\n\n pub key: String,\n\n}\n\n\n\nimpl InternalKey {\n\n fn split(&self) -> (String, Option<String>) {\n\n let outer_name_length: usize = self.key[..PREFIX_LENGTH].parse().expect(\"parse prefix length\");\n\n assert!(self.key.len() >= PREFIX_LENGTH + outer_name_length);\n\n let outer = self.key[PREFIX_LENGTH..PREFIX_LENGTH + outer_name_length]\n\n .parse::<String>()\n\n .expect(\"parse outer key\");\n\n\n\n if self.key.len() > PREFIX_LENGTH + outer_name_length {\n\n // there is a slash separating outer_key and_inner key\n\n let inner = self.key[PREFIX_LENGTH + outer_name_length + 1..]\n\n .parse::<String>()\n\n .expect(\"parse inner key\");\n\n (outer, Some(inner))\n\n } else {\n\n (outer, None)\n", "file_path": "src/sync/synchronizer.rs", "rank": 57, "score": 117673.3858698933 }, { "content": "// An internal connection struct that stores the uuid of the connection\n\nstruct InternalConn<T> {\n\n uuid: Uuid,\n\n conn: T,\n\n}\n\n\n", "file_path": "connection_pool/src/connection_pool.rs", "rank": 58, "score": 117497.16711916598 }, { "content": "// An InternalPool that maintains a vector that stores all the connections.\n\nstruct InternalPool<T> {\n\n conns: Vec<InternalConn<T>>,\n\n}\n\n\n\nimpl<T: Send + Sized> InternalPool<T> {\n\n fn new() -> Self {\n\n InternalPool { conns: vec![] }\n\n }\n\n}\n\n\n\nimpl<T> fmt::Debug for InternalPool<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"InternalPool\")\n\n .field(\"pool size\", &self.conns.len())\n\n .finish()\n\n }\n\n}\n\n\n\n/// A smart pointer wrapping a Connection so that the inner Connection can return to the ConnectionPool once\n\n/// this pointer is dropped.\n", "file_path": "connection_pool/src/connection_pool.rs", "rank": 59, "score": 117485.50096812283 }, { "content": "fn apply_inserts_to_localmap(\n\n to_update: &mut Update,\n\n new_version: Vec<Version>,\n\n table_synchronizer: &mut Synchronizer,\n\n) {\n\n let mut i = 0;\n\n for update in to_update.get_insert_iter() {\n\n if let Some(ref inner_key) = update.inner_key {\n\n let new_key = Key {\n\n key: inner_key.to_owned(),\n\n key_version: *new_version.get(i).expect(\"get new version\"),\n\n };\n\n let inner_map = to_update.map.get(&update.outer_key).expect(\"get inner map\");\n\n let new_value = inner_map.get(inner_key).expect(\"get the Value\").clone();\n\n\n\n let in_mem_inner_map = table_synchronizer\n\n .in_memory_map\n\n .entry(update.outer_key.clone())\n\n .or_insert_with(HashMap::new);\n\n in_mem_inner_map.insert(new_key, new_value);\n", "file_path": "src/sync/synchronizer.rs", "rank": 60, "score": 112027.50863374898 }, { "content": "fn test_multi_reader_multi_segments_tail_read(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testMultiReaderMultiSegmentsTailRead\".to_owned());\n\n let stream_name = Stream::from(\"testMultiReaderMultiSegmentsTailRead\".to_owned());\n\n\n\n const NUM_EVENTS: usize = 2000;\n\n const EVENT_SIZE: usize = 1024;\n\n\n\n let new_stream = h.block_on(create_scope_stream(\n\n client_factory.controller_client(),\n\n &scope_name,\n\n &stream_name,\n\n 2,\n\n ));\n\n // write events only if the stream is created. This is useful if we are running the reader tests\n\n // multiple times.\n\n let scope_name_clone = scope_name.clone();\n\n let stream_name_clone = stream_name.clone();\n\n let factory = client_factory.clone();\n\n if new_stream {\n", "file_path": "integration_test/src/event_reader_tests.rs", "rank": 61, "score": 111966.84010160019 }, { "content": "/// The retry policy that can retry something with\n\n/// backoff policy.\n\npub trait BackoffSchedule: Iterator<Item = Duration> {}\n\n\n\n/// Any implementation which implements the Iterator trait would also implement BackoffSchedule.\n\nimpl<T> BackoffSchedule for T where T: Iterator<Item = Duration> {}\n\n\n\n/// The retry policy that can retry something with\n\n/// exp backoff policy.\n\n#[derive(Debug, Clone, Copy, Default, PartialEq)]\n\npub struct RetryWithBackoff {\n\n current: u64,\n\n base: u64,\n\n max_delay: Option<Duration>,\n\n}\n\n\n\nimpl RetryWithBackoff {\n\n /// Constructs a new exponential back-off strategy,\n\n /// using default setting.\n\n pub fn default() -> RetryWithBackoff {\n\n let delay = Some(Duration::from_millis(10000));\n\n RetryWithBackoff {\n", "file_path": "retry/src/retry_policy.rs", "rank": 62, "score": 111105.91481788283 }, { "content": "#[test]\n\nfn test_read_segment() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let read_segment_command = WireCommands::Requests(Requests::ReadSegment(ReadSegmentCommand {\n\n segment: segment_name,\n\n offset: 0,\n\n suggested_length: 10,\n\n delegation_token: token,\n\n request_id: 1,\n\n }));\n\n test_command(read_segment_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 63, "score": 109632.31314111836 }, { "content": "#[test]\n\nfn test_table_read() {\n\n let mut entries = Vec::<(TableKey, TableValue)>::new();\n\n let key_data = String::from(\"key-1\").into_bytes();\n\n let value_data = String::from(\"value-1\").into_bytes();\n\n entries.push((TableKey::new(key_data, 1), TableValue::new(value_data)));\n\n let table_entries = TableEntries { entries };\n\n let segment_name = String::from(\"segment-1\");\n\n let table_read = WireCommands::Replies(Replies::TableRead(TableReadCommand {\n\n request_id: 1,\n\n segment: segment_name,\n\n entries: table_entries,\n\n }));\n\n\n\n test_command(table_read);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 64, "score": 109632.31314111836 }, { "content": "#[test]\n\nfn test_segment_read() {\n\n let segment_name = String::from(\"segment-1\");\n\n let data = String::from(\"event-1\").into_bytes();\n\n let segment_read_command = WireCommands::Replies(Replies::SegmentRead(SegmentReadCommand {\n\n segment: segment_name,\n\n offset: 0,\n\n at_tail: true,\n\n end_of_segment: true,\n\n data,\n\n request_id: 1,\n\n }));\n\n test_command(segment_read_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 65, "score": 109632.31314111836 }, { "content": "#[test]\n\nfn test_read_table() {\n\n let segment = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let mut keys = Vec::<TableKey>::new();\n\n let key_data = String::from(\"key-1\").into_bytes();\n\n keys.push(TableKey::new(key_data, 1));\n\n let read_table_command = WireCommands::Requests(Requests::ReadTable(ReadTableCommand {\n\n request_id: 1,\n\n segment,\n\n delegation_token: token,\n\n keys,\n\n }));\n\n\n\n test_command(read_table_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 66, "score": 109632.31314111836 }, { "content": "#[derive(Fields, Debug, PartialOrd, PartialEq)]\n\nstruct TestFields2 {\n\n pos: u64,\n\n id: u64,\n\n timestamp: u64,\n\n}\n\n\n", "file_path": "integration_test/src/index_stream_tests.rs", "rank": 67, "score": 109543.92114889773 }, { "content": "#[derive(Fields, Debug, PartialOrd, PartialEq)]\n\nstruct TestFields1 {\n\n id: u64,\n\n timestamp: u64,\n\n pos: u64,\n\n}\n\n\n", "file_path": "integration_test/src/index_stream_tests.rs", "rank": 68, "score": 109543.92114889773 }, { "content": "#[derive(Fields, Debug, PartialOrd, PartialEq)]\n\nstruct TestFields0 {\n\n id: u64,\n\n timestamp: u64,\n\n}\n\n\n", "file_path": "integration_test/src/index_stream_tests.rs", "rank": 69, "score": 109543.92114889773 }, { "content": "#[test]\n\nfn test_uses_default_setting() {\n\n let mut s = RetryWithBackoff::default();\n\n\n\n assert_eq!(s.next(), Some(Duration::from_millis(1)));\n\n assert_eq!(s.next(), Some(Duration::from_millis(10)));\n\n assert_eq!(s.next(), Some(Duration::from_millis(100)));\n\n assert_eq!(s.next(), Some(Duration::from_millis(1000)));\n\n}\n\n\n", "file_path": "retry/src/retry_policy.rs", "rank": 70, "score": 107173.20523618333 }, { "content": "pub fn test_tablesynchronizer(config: PravegaStandaloneServiceConfig) {\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.runtime();\n\n handle.block_on(test_insert(&client_factory));\n\n handle.block_on(test_remove(&client_factory));\n\n handle.block_on(test_insert_with_two_table_synchronizers(&client_factory));\n\n handle.block_on(test_remove_with_two_table_synchronizers(&client_factory));\n\n handle.block_on(test_insert_and_get_with_customize_struct(&client_factory));\n\n handle.block_on(test_fetching_updates_delta(&client_factory));\n\n}\n\n\n\nasync fn test_insert(client_factory: &ClientFactory) {\n\n info!(\"test insert\");\n\n let scope = Scope {\n", "file_path": "integration_test/src/synchronizer_tests.rs", "rank": 71, "score": 107101.46363501644 }, { "content": "pub fn test_table(config: PravegaStandaloneServiceConfig) {\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n\n\n let client_factory = ClientFactory::new(config);\n\n let handle = client_factory.runtime();\n\n handle.block_on(test_single_key_operations(&client_factory));\n\n handle.block_on(test_multiple_key_operations(&client_factory));\n\n handle.block_on(test_multiple_key_remove_operations(&client_factory));\n\n handle.block_on(test_iterators(&client_factory));\n\n}\n\n\n\nasync fn test_single_key_operations(client_factory: &ClientFactory) {\n\n let scope = Scope {\n\n name: \"tableScope\".to_string(),\n\n };\n", "file_path": "integration_test/src/table_tests.rs", "rank": 72, "score": 107101.46363501644 }, { "content": "#[test]\n\nfn test_table_keys_read() {\n\n let segment = String::from(\"segment-1\");\n\n let mut keys = Vec::<TableKey>::new();\n\n let key_data = String::from(\"key-1\").into_bytes();\n\n keys.push(TableKey::new(key_data, 1));\n\n let continuation_token: Vec<u8> = vec![1, 2, 3];\n\n let table_keys_read_command = WireCommands::Replies(Replies::TableKeysRead(TableKeysReadCommand {\n\n request_id: 1,\n\n segment,\n\n keys,\n\n continuation_token,\n\n }));\n\n test_command(table_keys_read_command);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 73, "score": 107092.26558410114 }, { "content": "#[test]\n\nfn test_read_table_keys() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let continuation_token: Vec<u8> = vec![1, 2, 3];\n\n let read_table_keys = WireCommands::Requests(Requests::ReadTableKeys(ReadTableKeysCommand {\n\n request_id: 0,\n\n segment: segment_name,\n\n delegation_token: token,\n\n suggested_key_count: 3,\n\n continuation_token,\n\n }));\n\n test_command(read_table_keys);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 74, "score": 107092.26558410114 }, { "content": "#[test]\n\nfn test_read_table_entries() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let continuation_token: Vec<u8> = vec![1, 2, 3];\n\n let read_table_entries = WireCommands::Requests(Requests::ReadTableEntries(ReadTableEntriesCommand {\n\n request_id: 0,\n\n segment: segment_name,\n\n delegation_token: token,\n\n suggested_entry_count: 3,\n\n continuation_token,\n\n }));\n\n test_command(read_table_entries);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 75, "score": 107092.26558410114 }, { "content": "#[test]\n\nfn test_table_entries_read() {\n\n let segment_name = String::from(\"segment-1\");\n\n let continuation_token: Vec<u8> = vec![1, 2, 3];\n\n let mut entries = Vec::<(TableKey, TableValue)>::new();\n\n let key_data = String::from(\"key-1\").into_bytes();\n\n let value_data = String::from(\"value-1\").into_bytes();\n\n entries.push((TableKey::new(key_data, 1), TableValue::new(value_data)));\n\n let table_entries = TableEntries { entries };\n\n let table_entries_read = WireCommands::Replies(Replies::TableEntriesRead(TableEntriesReadCommand {\n\n request_id: 1,\n\n segment: segment_name,\n\n entries: table_entries,\n\n continuation_token,\n\n }));\n\n test_command(table_entries_read);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 76, "score": 107092.26558410114 }, { "content": "#[test]\n\nfn test_create_stream_error() {\n\n let mut rt = Runtime::new().unwrap();\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(\"127.0.0.1:9090\".parse::<SocketAddr>().unwrap())\n\n .retry_policy(RetryWithBackoff::default().max_delay(Duration::from_micros(1)))\n\n .build()\n\n .expect(\"build client config\");\n\n let client = ControllerClientImpl::new(config, rt.handle().clone());\n\n\n\n let request = StreamConfiguration {\n\n scoped_stream: ScopedStream {\n\n scope: Scope::from(\"testScope123\".to_owned()),\n\n stream: Stream::from(\"testStream\".to_owned()),\n\n },\n\n scaling: Scaling {\n\n scale_type: ScaleType::FixedNumSegments,\n\n target_rate: 0,\n\n scale_factor: 0,\n\n min_num_segments: 1,\n\n },\n", "file_path": "controller-client/src/test.rs", "rank": 77, "score": 106817.36839093902 }, { "content": "#[test]\n\nfn test_stream_segment_info() {\n\n let segment_name = String::from(\"segment-1\");\n\n let stream_segment_info = WireCommands::Replies(Replies::StreamSegmentInfo(StreamSegmentInfoCommand {\n\n request_id: 0,\n\n segment_name,\n\n exists: false,\n\n is_sealed: false,\n\n is_deleted: false,\n\n last_modified: 0,\n\n write_offset: 0,\n\n start_offset: 0,\n\n }));\n\n test_command(stream_segment_info);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 78, "score": 106817.36839093902 }, { "content": "fn get_segments_for_stream(\n\n stream: &ScopedStream,\n\n created_streams: &RwLockReadGuard<HashMap<ScopedStream, StreamConfiguration>>,\n\n) -> Result<Vec<ScopedSegment>, RetryError<ControllerError>> {\n\n let stream_config = created_streams.get(stream);\n\n if stream_config.is_none() {\n\n return Err(RetryError {\n\n error: ControllerError::OperationError {\n\n can_retry: false, // do not retry.\n\n operation: \"get segments for stream\".into(),\n\n error_msg: \"stream does not exist.\".into(),\n\n },\n\n total_delay: Duration::from_millis(1),\n\n tries: 0,\n\n });\n\n }\n\n\n\n let scaling_policy = stream_config.unwrap().scaling.clone();\n\n\n\n if scaling_policy.scale_type != ScaleType::FixedNumSegments {\n", "file_path": "controller-client/src/mock_controller.rs", "rank": 79, "score": 106817.36839093902 }, { "content": "fn remove_suffix(value: &mut String, suffix: &str) {\n\n if value.ends_with(suffix) {\n\n let n = value.len();\n\n value.truncate(n - suffix.len());\n\n }\n\n}\n\n\n", "file_path": "integration_test/build.rs", "rank": 80, "score": 106524.90317641417 }, { "content": "pub fn test_controller_apis(config: PravegaStandaloneServiceConfig) {\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n\n\n let controller = client_factory.controller_client();\n\n let handle = client_factory.runtime();\n\n // Create a Scope that is used by all the tests.\n\n let scope_result = handle.block_on(controller.create_scope(&Scope::from(SCOPE.to_owned())));\n\n info!(\"Response for create_scope is {:?}\", scope_result);\n\n // Invoke the tests.\n\n handle.block_on(test_scope_stream(controller));\n\n handle.block_on(test_stream_tags(controller));\n\n handle.block_on(test_scale_stream(controller));\n\n}\n\n\n", "file_path": "integration_test/src/controller_tests.rs", "rank": 81, "score": 105304.56990556917 }, { "content": "#[test]\n\nfn test_table_entries_delta_read() {\n\n let segment_name = String::from(\"segment-1\");\n\n let mut entries = Vec::<(TableKey, TableValue)>::new();\n\n let key_data = String::from(\"key-1\").into_bytes();\n\n let value_data = String::from(\"value-1\").into_bytes();\n\n entries.push((TableKey::new(key_data, 1), TableValue::new(value_data)));\n\n let table_entries = TableEntries { entries };\n\n let table_entries_delta_read =\n\n WireCommands::Replies(Replies::TableEntriesDeltaRead(TableEntriesDeltaReadCommand {\n\n request_id: 0,\n\n segment: segment_name,\n\n entries: table_entries,\n\n should_clear: false,\n\n reached_end: false,\n\n last_position: 0,\n\n }));\n\n test_command(table_entries_delta_read);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 82, "score": 104704.740921672 }, { "content": "#[test]\n\nfn test_read_table_entries_delta() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let read_table_entries_delta =\n\n WireCommands::Requests(Requests::ReadTableEntriesDelta(ReadTableEntriesDeltaCommand {\n\n request_id: 0,\n\n segment: segment_name,\n\n delegation_token: token,\n\n from_position: 0,\n\n suggested_entry_count: 3,\n\n }));\n\n test_command(read_table_entries_delta);\n\n}\n", "file_path": "wire_protocol/src/tests.rs", "rank": 83, "score": 104704.740921672 }, { "content": "#[test]\n\nfn test_get_stream_segment_info() {\n\n let segment_name = String::from(\"segment-1\");\n\n let token = String::from(\"delegation_token\");\n\n let get_stream_segment_info =\n\n WireCommands::Requests(Requests::GetStreamSegmentInfo(GetStreamSegmentInfoCommand {\n\n request_id: 1,\n\n segment_name,\n\n delegation_token: token,\n\n }));\n\n test_command(get_stream_segment_info);\n\n}\n\n\n", "file_path": "wire_protocol/src/tests.rs", "rank": 84, "score": 104438.09714004528 }, { "content": "fn wait_for_standalone_with_timeout(expected_status: bool, timeout_second: i32) {\n\n for _i in 0..timeout_second {\n\n if expected_status == check_standalone_status() {\n\n return;\n\n }\n\n thread::sleep(time::Duration::from_secs(1));\n\n }\n\n panic!(\n\n \"timeout {} exceeded, Pravega standalone is in status {} while expected {}\",\n\n timeout_second, !expected_status, expected_status\n\n );\n\n}\n\n\n", "file_path": "integration_test/src/lib.rs", "rank": 85, "score": 103735.05589216048 }, { "content": "pub fn test_reader_group(config: PravegaStandaloneServiceConfig) {\n\n info!(\"test reader group\");\n\n let config = ClientConfigBuilder::default()\n\n .controller_uri(MOCK_CONTROLLER_URI)\n\n .is_auth_enabled(config.auth)\n\n .is_tls_enabled(config.tls)\n\n .build()\n\n .expect(\"creating config\");\n\n let client_factory = ClientFactory::new(config);\n\n let async_client_factory = client_factory.to_async();\n\n test_read_offline_with_offset(&async_client_factory);\n\n\n\n info!(\"test reader group finished\");\n\n}\n\n\n", "file_path": "integration_test/src/reader_group_tests.rs", "rank": 86, "score": 103599.10463489318 }, { "content": "type SegmentReadResult = Result<SegmentDataBuffer, ReaderErrorWithOffset>;\n\n\n\nconst REBALANCE_INTERVAL: Duration = Duration::from_secs(10);\n\n\n\nconst READ_BUFFER_SIZE: i32 = 8 * 1024 * 1024; // max size for a single Event\n\n\n\ncfg_if::cfg_if! {\n\n if #[cfg(test)] {\n\n use crate::event::reader_group_state::MockReaderGroupState as ReaderGroupState;\n\n } else {\n\n use crate::event::reader_group_state::ReaderGroupState;\n\n }\n\n}\n\n\n\n/// Read events from Stream.\n\n///\n\n/// An event reader fetches data from its assigned segments as a SegmentSlice,\n\n/// where a SegmentSlice represents data from a Pravega Segment. It provides the following APIs.\n\n/// 1. A method to initialize the event reader [EventReader#init](EventReader#init)\n\n/// 2. A method to obtain a SegmentSlice to read events from a Pravega segment. The user can use the\n", "file_path": "src/event/reader.rs", "rank": 87, "score": 102195.17442177457 }, { "content": "fn check_auth_token_expired(reply: &Replies) -> Result<(), RawClientError> {\n\n if let Replies::AuthTokenCheckFailed(ref cmd) = reply {\n\n if cmd.is_token_expired() {\n\n return Err(RawClientError::AuthTokenExpired { reply: reply.clone() });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use pravega_client_config::connection_type::ConnectionType;\n\n use pravega_wire_protocol::commands::{HelloCommand, ReadSegmentCommand, SegmentReadCommand};\n\n use pravega_wire_protocol::connection_factory::{ConnectionFactory, ConnectionFactoryConfig};\n\n use pravega_wire_protocol::wire_commands::Encode;\n\n use std::io::{Read, Write};\n\n use std::net::{SocketAddr, TcpListener};\n\n use std::thread;\n\n use tokio::runtime::Runtime;\n", "file_path": "src/segment/raw_client.rs", "rank": 88, "score": 101862.30328511339 }, { "content": "fn key_value_pairs(data: &Data) -> TokenStream {\n\n match *data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => fields\n\n .named\n\n .iter()\n\n .map(|f| {\n\n let name = f.ident.as_ref().unwrap();\n\n let name_str = format!(\"{}\", name);\n\n\n\n quote_spanned! {f.span()=>\n\n (#name_str, pravega_client::index::Value::value(&self.#name)),\n\n }\n\n })\n\n .collect(),\n\n Fields::Unnamed(ref _fields) => {\n\n quote! {\n\n compile_error!(\"expected named fields\");\n\n }\n\n }\n\n Fields::Unit => {\n\n quote! {\n\n compile_error!(\"expected named fields\");\n\n }\n\n }\n\n },\n\n Data::Enum(_) | Data::Union(_) => unimplemented!(),\n\n }\n\n}\n", "file_path": "macros/src/lib.rs", "rank": 89, "score": 93052.44251127061 }, { "content": " };\n\n controller_client\n\n .create_stream(&stream_config)\n\n .await\n\n .expect(\"create stream\");\n\n println!(\"stream created\");\n\n\n\n // create event stream writer\n\n let stream = ScopedStream::from(\"fooScope/barStream\");\n\n let mut event_writer = client_factory.create_event_writer(stream.clone());\n\n println!(\"event writer created\");\n\n\n\n // write payload\n\n let payload = \"hello world\".to_string().into_bytes();\n\n let result = event_writer.write_event(payload).await;\n\n assert!(result.await.is_ok());\n\n println!(\"event writer sent and flushed data\");\n\n\n\n // create event stream reader\n\n let rg = client_factory.create_reader_group(\"rg\".to_string(), stream).await;\n", "file_path": "examples/event_write_and_read.rs", "rank": 90, "score": 91260.20829833474 }, { "content": " let mut reader = rg.create_reader(\"r1\".to_string()).await;\n\n println!(\"event reader created\");\n\n\n\n // read from segment\n\n if let Some(mut slice) = reader\n\n .acquire_segment()\n\n .await\n\n .expect(\"Failed to acquire segment since the reader is offline\")\n\n {\n\n let read_event = slice.next();\n\n assert!(read_event.is_some(), \"event slice should have event to read\");\n\n assert_eq!(b\"hello world\", read_event.unwrap().value.as_slice());\n\n println!(\"event reader read data\");\n\n } else {\n\n println!(\"no data to read from the Pravega stream\");\n\n assert!(false, \"read should return the written event.\")\n\n }\n\n reader\n\n .reader_offline()\n\n .await\n\n .expect(\"failed to mark the reader offline\");\n\n println!(\"event write and read example finished\");\n\n });\n\n}\n", "file_path": "examples/event_write_and_read.rs", "rank": 91, "score": 91246.97308918633 }, { "content": "/*\n\n * Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n */\n\n\n\nuse pravega_client::client_factory::ClientFactory;\n\nuse pravega_client_config::ClientConfigBuilder;\n\nuse pravega_client_shared::{\n\n Retention, RetentionType, ScaleType, Scaling, Scope, ScopedStream, Stream, StreamConfiguration,\n\n};\n\n\n", "file_path": "examples/event_write_and_read.rs", "rank": 92, "score": 91236.35353585581 }, { "content": " println!(\"scope created\");\n\n\n\n // create a stream containing only one segment\n\n let stream = Stream::from(\"barStream\".to_owned());\n\n let stream_config = StreamConfiguration {\n\n scoped_stream: ScopedStream {\n\n scope: scope.clone(),\n\n stream: stream.clone(),\n\n },\n\n scaling: Scaling {\n\n scale_type: ScaleType::FixedNumSegments,\n\n target_rate: 0,\n\n scale_factor: 0,\n\n min_num_segments: 1,\n\n },\n\n retention: Retention {\n\n retention_type: RetentionType::None,\n\n retention_param: 0,\n\n },\n\n tags: None,\n", "file_path": "examples/event_write_and_read.rs", "rank": 93, "score": 91231.79382758832 }, { "content": "pub fn create_channel<U>(capacity: usize) -> (ChannelSender<U>, ChannelReceiver<U>) {\n\n let (tx, rx) = unbounded_channel();\n\n let semaphore = Semaphore::new(true, capacity);\n\n let semaphore_arc = Arc::new(semaphore);\n\n let sender = ChannelSender {\n\n sender: tx,\n\n semaphore: semaphore_arc.clone(),\n\n capacity,\n\n };\n\n let receiver = ChannelReceiver {\n\n receiver: rx,\n\n semaphore: semaphore_arc,\n\n capacity,\n\n };\n\n (sender, receiver)\n\n}\n\n\n\npub struct CapacityGuard {\n\n semaphore: Arc<Semaphore>,\n\n pub size: usize,\n", "file_path": "channel/src/lib.rs", "rank": 94, "score": 89975.32684095149 }, { "content": "/// A Python module for Pravega implemented in Rust.\n\nfn pravega_client(py: Python, m: &PyModule) -> PyResult<()> {\n\n let _ = tracing_subscriber::fmt::try_init();\n\n m.add_class::<StreamManager>()?;\n\n m.add_class::<StreamWriter>()?;\n\n m.add_class::<StreamTxnWriter>()?;\n\n m.add_class::<StreamTransaction>()?;\n\n m.add_class::<StreamReader>()?;\n\n m.add_class::<StreamReaderGroupConfig>()?;\n\n m.add_class::<StreamReaderGroup>()?;\n\n m.add_class::<StreamScalingPolicy>()?;\n\n m.add_class::<StreamRetentionPolicy>()?;\n\n m.add_class::<ByteStream>()?;\n\n let txn_exception = py.get_type::<TxnFailedException>();\n\n txn_exception.setattr(\"__doc__\", TXNFAILED_EXCEPTION_DOCSTRING)?;\n\n m.add(\"TxnFailedException\", txn_exception)?;\n\n Ok(())\n\n}\n", "file_path": "python/src/lib.rs", "rank": 95, "score": 87890.49408771667 }, { "content": "fn test_reader_offline(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testScope\".to_owned());\n\n let stream_name = Stream::from(\"testReaderOffline\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 10;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream =\n\n create_scope_stream(client_factory.controller_client(), &scope_name, &stream_name, 4).await;\n\n // write events only if the stream is created.\n\n if new_stream {\n\n // write events\n\n write_events(\n\n scope_name.clone(),\n\n stream_name.clone(),\n", "file_path": "integration_test/src/event_reader_tests.rs", "rank": 96, "score": 87811.3729835169 }, { "content": "fn test_multiple_readers(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testScope\".to_owned());\n\n let stream_name = Stream::from(\"testMultiReader\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 50;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream =\n\n create_scope_stream(client_factory.controller_client(), &scope_name, &stream_name, 4).await;\n\n // write events only if the stream is created.\n\n if new_stream {\n\n // write events\n\n write_events(\n\n scope_name.clone(),\n\n stream_name.clone(),\n", "file_path": "integration_test/src/event_reader_tests.rs", "rank": 97, "score": 87811.3729835169 }, { "content": "fn test_segment_rebalance(client_factory: &ClientFactoryAsync) {\n\n let h = client_factory.runtime_handle();\n\n let scope_name = Scope::from(\"testScope\".to_owned());\n\n let stream_name = Stream::from(\"testsegrebalance\".to_owned());\n\n let str = ScopedStream {\n\n scope: scope_name.clone(),\n\n stream: stream_name.clone(),\n\n };\n\n const NUM_EVENTS: usize = 50;\n\n const EVENT_SIZE: usize = 10;\n\n\n\n h.block_on(async {\n\n let new_stream =\n\n create_scope_stream(client_factory.controller_client(), &scope_name, &stream_name, 4).await;\n\n // write events only if the stream is created.\n\n if new_stream {\n\n // write events with random routing keys.\n\n write_events(\n\n scope_name.clone(),\n\n stream_name.clone(),\n", "file_path": "integration_test/src/event_reader_tests.rs", "rank": 98, "score": 87811.3729835169 }, { "content": "fn set_up_byte_stream_reader(config: ClientConfig, rt: &Runtime) -> ByteReader {\n\n let scope_name: Scope = Scope::from(\"testByteReaderPerf\".to_string());\n\n let stream_name = Stream::from(\"testByteReaderPerf\".to_string());\n\n let client_factory = ClientFactory::new(config.clone());\n\n let controller_client = client_factory.controller_client();\n\n rt.block_on(create_scope_stream(\n\n controller_client,\n\n &scope_name,\n\n &stream_name,\n\n 1,\n\n ));\n\n let scoped_stream = ScopedStream::from(\"testByteReaderPerf/testByteReaderPerf\");\n\n client_factory.create_byte_reader(scoped_stream)\n\n}\n\n\n\nasync fn create_scope_stream(\n\n controller_client: &dyn ControllerClient,\n\n scope_name: &Scope,\n\n stream_name: &Stream,\n\n segment_number: i32,\n", "file_path": "benches/benchmark.rs", "rank": 99, "score": 83714.7920606395 } ]
Rust
nrf-softdevice/src/flash.rs
chris-ricketts/nrf-softdevice
6ee09a134d9366029462963650dcd7e3921d6c1d
use core::future::Future; use core::marker::PhantomData; use core::sync::atomic::{AtomicBool, Ordering}; use embedded_storage::nor_flash::{ErrorType, NorFlashError, NorFlashErrorKind}; use embedded_storage_async::nor_flash::{AsyncNorFlash, AsyncReadNorFlash}; use crate::raw; use crate::util::{DropBomb, Signal}; use crate::{RawError, Softdevice}; #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[non_exhaustive] pub enum FlashError { Failed, AddressMisaligned, BufferMisaligned, } impl NorFlashError for FlashError { fn kind(&self) -> NorFlashErrorKind { match self { Self::Failed => NorFlashErrorKind::Other, Self::AddressMisaligned => NorFlashErrorKind::NotAligned, Self::BufferMisaligned => NorFlashErrorKind::NotAligned, } } } pub struct Flash { _private: PhantomData<*mut ()>, } static FLASH_TAKEN: AtomicBool = AtomicBool::new(false); impl Flash { const PAGE_SIZE: usize = 4096; pub fn take(_sd: &Softdevice) -> Flash { if FLASH_TAKEN .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) .is_err() { panic!("nrf_softdevice::Softdevice::take_flash() called multiple times.") } Flash { _private: PhantomData, } } } static SIGNAL: Signal<Result<(), FlashError>> = Signal::new(); pub(crate) fn on_flash_success() { SIGNAL.signal(Ok(())) } pub(crate) fn on_flash_error() { SIGNAL.signal(Err(FlashError::Failed)) } impl ErrorType for Flash { type Error = FlashError; } impl AsyncReadNorFlash for Flash { const READ_SIZE: usize = 1; type ReadFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn read<'a>(&'a mut self, address: u32, data: &'a mut [u8]) -> Self::ReadFuture<'a> { async move { data.copy_from_slice(unsafe { core::slice::from_raw_parts(address as *const u8, data.len()) }); Ok(()) } } fn capacity(&self) -> usize { 256 * 4096 } } impl AsyncNorFlash for Flash { const WRITE_SIZE: usize = 4; const ERASE_SIZE: usize = 4096; type WriteFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn write<'a>(&'a mut self, offset: u32, data: &'a [u8]) -> Self::WriteFuture<'a> { async move { let data_ptr = data.as_ptr(); let data_len = data.len() as u32; let address = offset as usize; if address % 4 != 0 { return Err(FlashError::AddressMisaligned); } if (data_ptr as u32) % 4 != 0 || data_len % 4 != 0 { return Err(FlashError::BufferMisaligned); } let words_ptr = data_ptr as *const u32; let words_len = data_len / 4; let bomb = DropBomb::new(); let ret = unsafe { raw::sd_flash_write(address as _, words_ptr, words_len) }; let ret = match RawError::convert(ret) { Ok(()) => SIGNAL.wait().await, Err(_e) => { warn!("sd_flash_write err {:?}", _e); Err(FlashError::Failed) } }; bomb.defuse(); ret } } type EraseFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn erase<'a>(&'a mut self, from: u32, to: u32) -> Self::EraseFuture<'a> { async move { if from as usize % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } if to as usize % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } let bomb = DropBomb::new(); for address in (from as usize..to as usize).step_by(Self::PAGE_SIZE) { let page_number = (address / Self::PAGE_SIZE) as u32; let ret = unsafe { raw::sd_flash_page_erase(page_number) }; match RawError::convert(ret) { Ok(()) => match SIGNAL.wait().await { Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); bomb.defuse(); return Err(_e); } _ => {} }, Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); bomb.defuse(); return Err(FlashError::Failed); } } } bomb.defuse(); Ok(()) } } }
use core::future::Future; use core::marker::PhantomData; use core::sync::atomic::{AtomicBool, Ordering}; use embedded_storage::nor_flash::{ErrorType, NorFlashError, NorFlashErrorKind}; use embedded_storage_async::nor_flash::{AsyncNorFlash, AsyncReadNorFlash}; use crate::raw; use crate::util::{DropBomb, Signal}; use crate::{RawError, Softdevice}; #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[non_exhaustive] pub enum FlashError { Failed, AddressMisaligned, BufferMisaligned, } impl NorFlashError for FlashError { fn kind(&self) -> NorFlashErrorKind { match self { Self::Failed => NorFlashErrorKind::Other, Self::AddressMisaligned => NorFlashErrorKind::NotAligned, Self::BufferMisaligned => NorFlashErrorKind::NotAligned, } } } pub struct Flash { _private: PhantomData<*mut ()>, } static FLASH_TAKEN: AtomicBool = AtomicBool::new(false); impl Flash { const PAGE_SIZE: usize = 4096; pub fn take(_sd: &Softdevice) -> Flash { if FLASH_TAKEN .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) .is_err() { panic!("nrf_softdevice::Softdevice::take_flash() called multiple times.") } Flash { _private: PhantomData, } } } static SIGNAL: Signal<Result<(), FlashError>> = Signal::new(); pub(crate) fn on_flash_success() { SIGNAL.signal(Ok(())) } pub(crate) fn on_flash_error() { SIGNAL.signal(Err(FlashError::Failed)) } impl ErrorType for Flash { type Error = FlashError; } impl AsyncReadNorFlash for Flash { const READ_SIZE: usize = 1; type ReadFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn read<'a>(&'a mut self, address: u32, da
w_parts(address as *const u8, data.len()) }); Ok(()) } } fn capacity(&self) -> usize { 256 * 4096 } } impl AsyncNorFlash for Flash { const WRITE_SIZE: usize = 4; const ERASE_SIZE: usize = 4096; type WriteFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn write<'a>(&'a mut self, offset: u32, data: &'a [u8]) -> Self::WriteFuture<'a> { async move { let data_ptr = data.as_ptr(); let data_len = data.len() as u32; let address = offset as usize; if address % 4 != 0 { return Err(FlashError::AddressMisaligned); } if (data_ptr as u32) % 4 != 0 || data_len % 4 != 0 { return Err(FlashError::BufferMisaligned); } let words_ptr = data_ptr as *const u32; let words_len = data_len / 4; let bomb = DropBomb::new(); let ret = unsafe { raw::sd_flash_write(address as _, words_ptr, words_len) }; let ret = match RawError::convert(ret) { Ok(()) => SIGNAL.wait().await, Err(_e) => { warn!("sd_flash_write err {:?}", _e); Err(FlashError::Failed) } }; bomb.defuse(); ret } } type EraseFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn erase<'a>(&'a mut self, from: u32, to: u32) -> Self::EraseFuture<'a> { async move { if from as usize % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } if to as usize % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } let bomb = DropBomb::new(); for address in (from as usize..to as usize).step_by(Self::PAGE_SIZE) { let page_number = (address / Self::PAGE_SIZE) as u32; let ret = unsafe { raw::sd_flash_page_erase(page_number) }; match RawError::convert(ret) { Ok(()) => match SIGNAL.wait().await { Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); bomb.defuse(); return Err(_e); } _ => {} }, Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); bomb.defuse(); return Err(FlashError::Failed); } } } bomb.defuse(); Ok(()) } } }
ta: &'a mut [u8]) -> Self::ReadFuture<'a> { async move { data.copy_from_slice(unsafe { core::slice::from_ra
function_block-random_span
[ { "content": "pub fn get_value(_sd: &Softdevice, handle: u16, buf: &mut [u8]) -> Result<usize, GetValueError> {\n\n let mut value = raw::ble_gatts_value_t {\n\n p_value: buf.as_mut_ptr(),\n\n len: buf.len() as _,\n\n offset: 0,\n\n };\n\n let ret = unsafe {\n\n raw::sd_ble_gatts_value_get(raw::BLE_CONN_HANDLE_INVALID as u16, handle, &mut value)\n\n };\n\n RawError::convert(ret)?;\n\n\n\n if value.len as usize > buf.len() {\n\n return Err(GetValueError::Truncated);\n\n }\n\n\n\n Ok(value.len as _)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 0, "score": 231440.60740104964 }, { "content": "/// Get cryptographically-securerandom bytes.\n\npub fn random_bytes(_sd: &Softdevice, buf: &mut [u8]) -> Result<(), RandomError> {\n\n if buf.len() > u8::MAX as usize {\n\n return Err(RandomError::BufferTooBig);\n\n }\n\n\n\n let ret = unsafe { raw::sd_rand_application_vector_get(buf[..].as_mut_ptr(), buf.len() as u8) };\n\n match RawError::convert(ret) {\n\n Ok(()) => Ok(()),\n\n Err(RawError::SocRandNotEnoughValues) => Err(RandomError::NotEnoughEntropy),\n\n Err(e) => Err(e.into()),\n\n }\n\n}\n", "file_path": "nrf-softdevice/src/random.rs", "rank": 1, "score": 230024.79323427242 }, { "content": "fn allocate_index<T>(f: impl FnOnce(u8, &mut ConnectionState) -> T) -> Result<T, OutOfConnsError> {\n\n unsafe {\n\n for (i, s) in STATES.iter().enumerate() {\n\n let state = &mut *s.get();\n\n if state.refcount == 0 && state.conn_handle.is_none() {\n\n return Ok(f(i as u8, state));\n\n }\n\n }\n\n Err(OutOfConnsError)\n\n }\n\n}\n\n\n\n// conn_handle -> index mapping. Used to make stuff go faster\n\nconst INDEX_NONE: Cell<Option<u8>> = Cell::new(None);\n\nstatic mut INDEX_BY_HANDLE: [Cell<Option<u8>>; CONNS_MAX] = [INDEX_NONE; CONNS_MAX];\n\n\n", "file_path": "nrf-softdevice/src/ble/connection.rs", "rank": 2, "score": 219945.7053665837 }, { "content": "/// Get temperature reading in Celsius\n\n///\n\n/// Note this blocks for ~50us\n\npub fn temperature_celsius(_sd: &Softdevice) -> Result<I30F2, TempError> {\n\n let mut temp: i32 = 0;\n\n let ret = unsafe { raw::sd_temp_get(&mut temp) };\n\n RawError::convert(ret)?;\n\n Ok(I30F2::from_bits(temp))\n\n}\n", "file_path": "nrf-softdevice/src/temperature.rs", "rank": 3, "score": 191535.04066925927 }, { "content": "pub fn get_address(_sd: &Softdevice) -> Address {\n\n unsafe {\n\n let mut addr: raw::ble_gap_addr_t = mem::zeroed();\n\n let ret = raw::sd_ble_gap_addr_get(&mut addr);\n\n unwrap!(RawError::convert(ret), \"sd_ble_gap_addr_get\");\n\n Address::from_raw(addr)\n\n }\n\n}\n\n\n", "file_path": "nrf-softdevice/src/ble/mod.rs", "rank": 4, "score": 184557.6822036316 }, { "content": "pub fn register<S: Server>(sd: &Softdevice) -> Result<S, RegisterError> {\n\n S::register(sd)\n\n}\n\n\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 5, "score": 178267.17835304423 }, { "content": "pub fn set_address(_sd: &Softdevice, addr: &Address) {\n\n unsafe {\n\n let addr = addr.into_raw();\n\n let ret = raw::sd_ble_gap_addr_set(&addr);\n\n unwrap!(RawError::convert(ret), \"sd_ble_gap_addr_set\");\n\n }\n\n}\n", "file_path": "nrf-softdevice/src/ble/mod.rs", "rank": 6, "score": 176484.57485905 }, { "content": "pub fn register_service<S: Service>(_sd: &Softdevice) -> Result<S, RegisterError> {\n\n let uuid = S::uuid();\n\n let mut service_handle: u16 = 0;\n\n let ret = unsafe {\n\n raw::sd_ble_gatts_service_add(\n\n raw::BLE_GATTS_SRVC_TYPE_PRIMARY as u8,\n\n uuid.as_raw_ptr(),\n\n &mut service_handle as _,\n\n )\n\n };\n\n RawError::convert(ret)?;\n\n\n\n S::register(service_handle, |char, initial_value| {\n\n let mut cccd_attr_md: raw::ble_gatts_attr_md_t = unsafe { mem::zeroed() };\n\n cccd_attr_md.read_perm = raw::ble_gap_conn_sec_mode_t {\n\n _bitfield_1: raw::ble_gap_conn_sec_mode_t::new_bitfield_1(1, 1),\n\n };\n\n cccd_attr_md.write_perm = raw::ble_gap_conn_sec_mode_t {\n\n _bitfield_1: raw::ble_gap_conn_sec_mode_t::new_bitfield_1(1, 1),\n\n };\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 7, "score": 174685.9374381496 }, { "content": "/// Multiple notifications can be queued. Will fail when the queue is full.\n\npub fn notify_value(conn: &Connection, handle: u16, val: &[u8]) -> Result<(), NotifyValueError> {\n\n let conn_handle = conn.with_state(|state| state.check_connected())?;\n\n\n\n let mut len: u16 = val.len() as _;\n\n let params = raw::ble_gatts_hvx_params_t {\n\n handle,\n\n type_: raw::BLE_GATT_HVX_NOTIFICATION as u8,\n\n offset: 0,\n\n p_data: val.as_ptr() as _,\n\n p_len: &mut len,\n\n };\n\n let ret = unsafe { raw::sd_ble_gatts_hvx(conn_handle, &params) };\n\n RawError::convert(ret)?;\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum IndicateValueError {\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 8, "score": 165900.38170391376 }, { "content": "pub fn set_value(_sd: &Softdevice, handle: u16, val: &[u8]) -> Result<(), SetValueError> {\n\n let mut value = raw::ble_gatts_value_t {\n\n p_value: val.as_ptr() as _,\n\n len: val.len() as _,\n\n offset: 0,\n\n };\n\n let ret = unsafe {\n\n raw::sd_ble_gatts_value_set(raw::BLE_CONN_HANDLE_INVALID as u16, handle, &mut value)\n\n };\n\n RawError::convert(ret)?;\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum NotifyValueError {\n\n Disconnected,\n\n Raw(RawError),\n\n}\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 9, "score": 165888.72016247292 }, { "content": "pub fn gen_bindings(\n\n tmp_dir: &PathBuf,\n\n src_dir: &PathBuf,\n\n dst: &PathBuf,\n\n mut f: impl FnMut(String) -> String,\n\n) {\n\n let mut wrapper = String::new();\n\n\n\n for entry in WalkDir::new(src_dir)\n\n .follow_links(true)\n\n .into_iter()\n\n .filter_map(|e| e.ok())\n\n {\n\n let _f_name = entry.path().to_string_lossy();\n\n if entry.file_type().is_file() {\n\n if entry.file_name().to_string_lossy() == \"nrf_nvic.h\" {\n\n continue;\n\n }\n\n\n\n let data = fs::read_to_string(entry.path()).unwrap();\n", "file_path": "nrf-softdevice-gen/src/main.rs", "rank": 10, "score": 138126.34780854103 }, { "content": "fn start_adv(adv: RawAdvertisement<'_>, config: &Config) -> Result<(), AdvertiseError> {\n\n let mut adv_params: raw::ble_gap_adv_params_t = unsafe { mem::zeroed() };\n\n adv_params.properties.type_ = adv.kind;\n\n adv_params.primary_phy = config.primary_phy as u8;\n\n adv_params.secondary_phy = config.secondary_phy as u8;\n\n adv_params.duration = config.timeout.map(|t| t.max(1)).unwrap_or(0);\n\n adv_params.max_adv_evts = config.max_events.map(|t| t.max(1)).unwrap_or(0);\n\n adv_params.interval = config.interval;\n\n\n\n let map_data = |data: Option<&[u8]>| {\n\n if let Some(data) = data {\n\n assert!(data.len() < u16::MAX as usize);\n\n raw::ble_data_t {\n\n p_data: data.as_ptr() as _,\n\n len: data.len() as u16,\n\n }\n\n } else {\n\n raw::ble_data_t {\n\n p_data: ptr::null_mut(),\n\n len: 0,\n", "file_path": "nrf-softdevice/src/ble/peripheral.rs", "rank": 11, "score": 135716.88583493 }, { "content": "fn get_app_ram_base() -> u32 {\n\n extern \"C\" {\n\n static mut __sdata: u32;\n\n }\n\n\n\n unsafe { &mut __sdata as *mut u32 as u32 }\n\n}\n\n\n", "file_path": "nrf-softdevice/src/softdevice.rs", "rank": 12, "score": 135416.5765830295 }, { "content": "/// This will fail if an indication is already in progress\n\npub fn indicate_value(\n\n conn: &Connection,\n\n handle: u16,\n\n val: &[u8],\n\n) -> Result<(), IndicateValueError> {\n\n let conn_handle = conn.with_state(|state| state.check_connected())?;\n\n\n\n let mut len: u16 = val.len() as _;\n\n let params = raw::ble_gatts_hvx_params_t {\n\n handle,\n\n type_: raw::BLE_GATT_HVX_INDICATION as u8,\n\n offset: 0,\n\n p_data: val.as_ptr() as _,\n\n p_len: &mut len,\n\n };\n\n let ret = unsafe { raw::sd_ble_gatts_hvx(conn_handle, &params) };\n\n RawError::convert(ret)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 13, "score": 135404.7554529835 }, { "content": "pub fn try_write_without_response(\n\n conn: &Connection,\n\n handle: u16,\n\n buf: &[u8],\n\n) -> Result<(), TryWriteError> {\n\n let conn_handle = conn.with_state(|state| state.check_connected())?;\n\n\n\n assert!(buf.len() <= u16::MAX as usize);\n\n let params = raw::ble_gattc_write_params_t {\n\n write_op: raw::BLE_GATT_OP_WRITE_CMD as u8,\n\n flags: 0,\n\n handle,\n\n p_value: buf.as_ptr(),\n\n len: buf.len() as u16,\n\n offset: 0,\n\n };\n\n\n\n let ret = unsafe { raw::sd_ble_gattc_write(conn_handle, &params) };\n\n match RawError::convert(ret) {\n\n Err(RawError::Resources) => Err(TryWriteError::BufferFull),\n", "file_path": "nrf-softdevice/src/ble/gatt_client.rs", "rank": 14, "score": 130394.17010714865 }, { "content": "fn on_soc_evt(evt: u32) {\n\n let evt = match SocEvent::try_from(evt) {\n\n Ok(evt) => evt,\n\n Err(_) => panic!(\"Unknown soc evt {:?}\", evt),\n\n };\n\n\n\n info!(\"soc evt {:?}\", evt);\n\n match evt {\n\n SocEvent::FlashOperationError => crate::flash::on_flash_error(),\n\n SocEvent::FlashOperationSuccess => crate::flash::on_flash_success(),\n\n _ => {}\n\n }\n\n}\n\n\n\n// TODO actually derive this from the headers + the ATT_MTU\n\nconst BLE_EVT_MAX_SIZE: u16 = 128;\n\n\n\npub(crate) async fn run() -> ! {\n\n poll_fn(|cx| unsafe {\n\n SWI2_WAKER.register(cx.waker());\n", "file_path": "nrf-softdevice/src/events.rs", "rank": 15, "score": 130244.10389680642 }, { "content": "#[defmt::global_logger]\n\nstruct Logger;\n\n\n\n/// Global logger lock.\n\nstatic TAKEN: AtomicBool = AtomicBool::new(false);\n\nstatic INTERRUPTS_TOKEN: AtomicU8 = AtomicU8::new(0);\n\nstatic mut ENCODER: defmt::Encoder = defmt::Encoder::new();\n\n\n\nunsafe impl defmt::Logger for Logger {\n\n fn acquire() {\n\n let token = unsafe { critical_section::acquire() };\n\n\n\n if !TAKEN.load(Ordering::Relaxed) {\n\n // no need for CAS because interrupts are disabled\n\n TAKEN.store(true, Ordering::Relaxed);\n\n\n\n INTERRUPTS_TOKEN.store(token, Ordering::Relaxed);\n\n\n\n // safety: accessing the `static mut` is OK because we have disabled interrupts.\n\n unsafe { ENCODER.start_frame(do_write) }\n\n } else {\n", "file_path": "nrf-softdevice-defmt-rtt/src/lib.rs", "rank": 16, "score": 126081.26335345613 }, { "content": "#[repr(C)]\n\nstruct Header {\n\n id: [u8; 16],\n\n max_up_channels: usize,\n\n max_down_channels: usize,\n\n up_channel: Channel,\n\n}\n\n\n\nconst MODE_MASK: usize = 0b11;\n\n/// Block the application if the RTT buffer is full, wait for the host to read data.\n\nconst MODE_BLOCK_IF_FULL: usize = 2;\n\n/// Don't block if the RTT buffer is full. Truncate data to output as much as fits.\n\nconst MODE_NON_BLOCKING_TRIM: usize = 1;\n\n\n\n// TODO make configurable\n\n// NOTE use a power of 2 for best performance\n\nconst SIZE: usize = 1024;\n\n\n\n// make sure we only get shared references to the header/channel (avoid UB)\n\n/// # Safety\n\n/// `Channel` API is not re-entrant; this handle should not be held from different execution\n", "file_path": "nrf-softdevice-defmt-rtt/src/lib.rs", "rank": 17, "score": 126074.9400593052 }, { "content": "struct CriticalSection;\n\ncritical_section::custom_impl!(CriticalSection);\n\n\n\nunsafe impl critical_section::Impl for CriticalSection {\n\n unsafe fn acquire() -> u8 {\n\n let nvic = &*NVIC::ptr();\n\n let nested_cs = CS_FLAG.load(Ordering::SeqCst);\n\n\n\n if !nested_cs {\n\n raw_critical_section(|| {\n\n CS_FLAG.store(true, Ordering::Relaxed);\n\n\n\n // Store the state of irqs.\n\n CS_MASK = nvic.icer[0].read();\n\n\n\n // Disable only not-reserved irqs.\n\n nvic.icer[0].write(!RESERVED_IRQS);\n\n });\n\n }\n\n\n", "file_path": "nrf-softdevice/src/critical_section_impl.rs", "rank": 18, "score": 123160.83190619113 }, { "content": "enum State<T> {\n\n None,\n\n Waiting(Waker),\n\n Signaled(T),\n\n}\n\n\n\nunsafe impl<T: Send> Send for Signal<T> {}\n\nunsafe impl<T: Send> Sync for Signal<T> {}\n\n\n\nimpl<T: Send> Signal<T> {\n\n pub const fn new() -> Self {\n\n Self {\n\n state: UnsafeCell::new(State::None),\n\n }\n\n }\n\n\n\n pub fn signal(&self, val: T) {\n\n unsafe {\n\n cortex_m::interrupt::free(|_| {\n\n let state = &mut *self.state.get();\n", "file_path": "nrf-softdevice/src/util/signal.rs", "rank": 19, "score": 119866.59478398695 }, { "content": "struct WaitFuture<'a, T> {\n\n signal: &'a Signal<T>,\n\n}\n\n\n\nimpl<'a, T: Send> Future for WaitFuture<'a, T> {\n\n type Output = T;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {\n\n unsafe {\n\n cortex_m::interrupt::free(|_| {\n\n let state = &mut *self.signal.state.get();\n\n match state {\n\n State::None => {\n\n *state = State::Waiting(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n State::Waiting(w) if w.will_wake(cx.waker()) => Poll::Pending,\n\n State::Waiting(_) => panic!(\"waker overflow\"),\n\n State::Signaled(_) => match mem::replace(state, State::None) {\n\n State::Signaled(res) => Poll::Ready(res),\n\n _ => unreachable!(),\n\n },\n\n }\n\n })\n\n }\n\n }\n\n}\n", "file_path": "nrf-softdevice/src/util/signal.rs", "rank": 20, "score": 112930.28710858915 }, { "content": "fn to_asm<T: ToAsm>(t: T) -> u32 {\n\n t.to_asm()\n\n}\n\n\n\nimpl ToAsm for u32 {\n\n fn to_asm(self) -> u32 {\n\n self\n\n }\n\n}\n\n\n\nimpl ToAsm for u16 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\nimpl ToAsm for u8 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 21, "score": 112208.84062311566 }, { "content": "fn to_asm<T: ToAsm>(t: T) -> u32 {\n\n t.to_asm()\n\n}\n\n\n\nimpl ToAsm for u32 {\n\n fn to_asm(self) -> u32 {\n\n self\n\n }\n\n}\n\n\n\nimpl ToAsm for u16 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\nimpl ToAsm for u8 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n", "file_path": "nrf-softdevice-mbr/src/bindings.rs", "rank": 22, "score": 112208.84062311566 }, { "content": "fn to_asm<T: ToAsm>(t: T) -> u32 {\n\n t.to_asm()\n\n}\n\n\n\nimpl ToAsm for u32 {\n\n fn to_asm(self) -> u32 {\n\n self\n\n }\n\n}\n\n\n\nimpl ToAsm for u16 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\nimpl ToAsm for u8 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 23, "score": 112208.84062311566 }, { "content": "fn to_asm<T: ToAsm>(t: T) -> u32 {\n\n t.to_asm()\n\n}\n\n\n\nimpl ToAsm for u32 {\n\n fn to_asm(self) -> u32 {\n\n self\n\n }\n\n}\n\n\n\nimpl ToAsm for u16 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\nimpl ToAsm for u8 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n", "file_path": "nrf-softdevice-gen/src/main.rs", "rank": 24, "score": 112208.84062311566 }, { "content": "fn to_asm<T: ToAsm>(t: T) -> u32 {\n\n t.to_asm()\n\n}\n\n\n\nimpl ToAsm for u32 {\n\n fn to_asm(self) -> u32 {\n\n self\n\n }\n\n}\n\n\n\nimpl ToAsm for u16 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\nimpl ToAsm for u8 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 25, "score": 112208.84062311566 }, { "content": "fn to_asm<T: ToAsm>(t: T) -> u32 {\n\n t.to_asm()\n\n}\n\n\n\nimpl ToAsm for u32 {\n\n fn to_asm(self) -> u32 {\n\n self\n\n }\n\n}\n\n\n\nimpl ToAsm for u16 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\nimpl ToAsm for u8 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 26, "score": 112208.84062311566 }, { "content": "fn to_asm<T: ToAsm>(t: T) -> u32 {\n\n t.to_asm()\n\n}\n\n\n\nimpl ToAsm for u32 {\n\n fn to_asm(self) -> u32 {\n\n self\n\n }\n\n}\n\n\n\nimpl ToAsm for u16 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\nimpl ToAsm for u8 {\n\n fn to_asm(self) -> u32 {\n\n self as u32\n\n }\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 27, "score": 112208.84062311566 }, { "content": "#[alloc_error_handler]\n\nfn alloc_error(_layout: Layout) -> ! {\n\n panic!(\"Alloc error\");\n\n}\n\n\n\ndefmt::timestamp! {\"{=u64}\", {\n\n static COUNT: AtomicUsize = AtomicUsize::new(0);\n\n // NOTE(no-CAS) `timestamps` runs with interrupts disabled\n\n let n = COUNT.load(Ordering::Relaxed);\n\n COUNT.store(n + 1, Ordering::Relaxed);\n\n n as u64\n\n }\n\n}\n", "file_path": "examples/src/example_common.rs", "rank": 28, "score": 111625.4705096816 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_adv_report_type_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_adv_report_type_t>(),\n\n 2usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_adv_report_type_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_adv_report_type_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_adv_report_type_t))\n\n );\n\n}\n\nimpl ble_gap_adv_report_type_t {\n\n #[inline]\n\n pub fn connectable(&self) -> u16 {\n\n unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u16) }\n\n }\n\n #[inline]\n\n pub fn set_connectable(&mut self, val: u16) {\n\n unsafe {\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 29, "score": 109276.2443566108 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_adv_report_type_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_adv_report_type_t>(),\n\n 2usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_adv_report_type_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_adv_report_type_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_adv_report_type_t))\n\n );\n\n}\n\nimpl ble_gap_adv_report_type_t {\n\n #[inline]\n\n pub fn connectable(&self) -> u16 {\n\n unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u16) }\n\n }\n\n #[inline]\n\n pub fn set_connectable(&mut self, val: u16) {\n\n unsafe {\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 30, "score": 109276.2443566108 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_adv_report_type_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_adv_report_type_t>(),\n\n 2usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_adv_report_type_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_adv_report_type_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_adv_report_type_t))\n\n );\n\n}\n\nimpl ble_gap_adv_report_type_t {\n\n #[inline]\n\n pub fn connectable(&self) -> u16 {\n\n unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u16) }\n\n }\n\n #[inline]\n\n pub fn set_connectable(&mut self, val: u16) {\n\n unsafe {\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 31, "score": 109276.2443566108 }, { "content": "fn do_write(bytes: &[u8]) {\n\n unsafe { handle().write_all(bytes) }\n\n}\n\n\n", "file_path": "nrf-softdevice-defmt-rtt/src/lib.rs", "rank": 32, "score": 108900.66037678836 }, { "content": "fn cfg_set(id: u32, cfg: &raw::ble_cfg_t) {\n\n let app_ram_base = get_app_ram_base();\n\n let ret = unsafe { raw::sd_ble_cfg_set(id, cfg, app_ram_base) };\n\n match RawError::convert(ret) {\n\n Ok(()) => {}\n\n Err(RawError::NoMem) => {}\n\n Err(err) => panic!(\"sd_ble_cfg_set {:?} err {:?}\", id, err),\n\n }\n\n}\n\n\n\nstatic ENABLED: AtomicBool = AtomicBool::new(false);\n\nstatic SOFTDEVICE: Forever<Softdevice> = Forever::new();\n\n\n\nimpl Softdevice {\n\n /// Enable the softdevice.\n\n ///\n\n /// This function takes ownership of the softdevice-reserved peripherals to ensure application code doesn't attempt to use them after enabling.\n\n ///\n\n /// # Panics\n\n /// - Panics if the requested configuration requires more memory than reserved for the softdevice. In that case, you can give more memory to the softdevice by editing the RAM start address in `memory.x`. The required start address is logged prior to panic.\n", "file_path": "nrf-softdevice/src/softdevice.rs", "rank": 33, "score": 107476.04143333861 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 34, "score": 107258.86469669561 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 35, "score": 107258.86469669561 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 36, "score": 107258.86469669561 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 37, "score": 107258.86469669561 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 38, "score": 107258.86469669561 }, { "content": "fn on_user_mem_release(_ble_evt: *const raw::ble_evt_t) {\n\n trace!(\"on_user_mem_release\");\n\n}\n", "file_path": "nrf-softdevice/src/ble/common.rs", "rank": 39, "score": 106368.23656083581 }, { "content": "fn on_user_mem_request(_ble_evt: *const raw::ble_evt_t) {\n\n trace!(\"on_user_mem_request\");\n\n}\n", "file_path": "nrf-softdevice/src/ble/common.rs", "rank": 40, "score": 106368.23656083581 }, { "content": "#[test]\n\nfn bindgen_test_layout_sd_mbr_command_irq_forward_address_set_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<sd_mbr_command_irq_forward_address_set_t>())).address\n", "file_path": "nrf-softdevice-mbr/src/bindings.rs", "rank": 41, "score": 105294.64853158922 }, { "content": "#[test]\n\nfn bindgen_test_layout_sd_mbr_command_irq_forward_address_set_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<sd_mbr_command_irq_forward_address_set_t>())).address\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 42, "score": 105294.64853158922 }, { "content": "#[test]\n\nfn bindgen_test_layout_sd_mbr_command_irq_forward_address_set_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<sd_mbr_command_irq_forward_address_set_t>())).address\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 43, "score": 105294.64853158922 }, { "content": "#[test]\n\nfn bindgen_test_layout_sd_mbr_command_irq_forward_address_set_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<sd_mbr_command_irq_forward_address_set_t>())).address\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 44, "score": 105294.64853158922 }, { "content": "#[test]\n\nfn bindgen_test_layout_sd_mbr_command_irq_forward_address_set_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<sd_mbr_command_irq_forward_address_set_t>())).address\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 45, "score": 105294.64853158922 }, { "content": "#[test]\n\nfn bindgen_test_layout_sd_mbr_command_irq_forward_address_set_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<sd_mbr_command_irq_forward_address_set_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(sd_mbr_command_irq_forward_address_set_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<sd_mbr_command_irq_forward_address_set_t>())).address\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 46, "score": 105294.64853158922 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 47, "score": 103420.12364413093 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 48, "score": 103420.12364413093 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 49, "score": 103420.12364413093 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 50, "score": 103420.12364413093 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 51, "score": 103420.12364413093 }, { "content": "fn index_by_handle(conn_handle: u16) -> &'static Cell<Option<u8>> {\n\n unsafe { &INDEX_BY_HANDLE[conn_handle as usize] }\n\n}\n", "file_path": "nrf-softdevice/src/ble/connection.rs", "rank": 52, "score": 100750.52281873088 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 53, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 54, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 55, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 56, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 57, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 58, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 59, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 60, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 61, "score": 99936.0534906122 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 62, "score": 99936.0534906122 }, { "content": " uint8_t type; /**< UUID type, see @ref BLE_UUID_TYPES. If type is @ref BLE_UUID_TYPE_UNKNOWN, the value of uuid is undefined. */\n", "file_path": "softdevice/s140/headers/ble_types.h", "rank": 63, "score": 98619.48566145693 }, { "content": " uint8_t type; /**< UUID type, see @ref BLE_UUID_TYPES. If type is @ref BLE_UUID_TYPE_UNKNOWN, the value of uuid is undefined. */\n", "file_path": "softdevice/s122/headers/ble_types.h", "rank": 64, "score": 98619.48566145693 }, { "content": " uint8_t type; /**< UUID type, see @ref BLE_UUID_TYPES. If type is @ref BLE_UUID_TYPE_UNKNOWN, the value of uuid is undefined. */\n", "file_path": "softdevice/s132/headers/ble_types.h", "rank": 65, "score": 98619.48566145693 }, { "content": " uint8_t type; /**< UUID type, see @ref BLE_UUID_TYPES. If type is @ref BLE_UUID_TYPE_UNKNOWN, the value of uuid is undefined. */\n", "file_path": "softdevice/s113/headers/ble_types.h", "rank": 66, "score": 98619.48566145693 }, { "content": " uint8_t type; /**< UUID type, see @ref BLE_UUID_TYPES. If type is @ref BLE_UUID_TYPE_UNKNOWN, the value of uuid is undefined. */\n", "file_path": "softdevice/s112/headers/ble_types.h", "rank": 67, "score": 98619.48566145693 }, { "content": "#[proc_macro_attribute]\n\npub fn gatt_server(_args: TokenStream, item: TokenStream) -> TokenStream {\n\n let mut struc = syn::parse_macro_input!(item as syn::ItemStruct);\n\n\n\n let struct_fields = match &mut struc.fields {\n\n syn::Fields::Named(n) => n,\n\n _ => {\n\n struc\n\n .ident\n\n .span()\n\n .unwrap()\n\n .error(\"gatt_server structs must have named fields, not tuples.\")\n\n .emit();\n\n return TokenStream::new();\n\n }\n\n };\n\n let fields = struct_fields\n\n .named\n\n .iter()\n\n .cloned()\n\n .collect::<Vec<syn::Field>>();\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 68, "score": 96889.60909868113 }, { "content": "#[proc_macro_attribute]\n\npub fn gatt_client(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let args = syn::parse_macro_input!(args as syn::AttributeArgs);\n\n let mut struc = syn::parse_macro_input!(item as syn::ItemStruct);\n\n\n\n let args = match ServiceArgs::from_list(&args) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return e.write_errors().into();\n\n }\n\n };\n\n\n\n let mut chars = Vec::new();\n\n\n\n let struct_fields = match &mut struc.fields {\n\n syn::Fields::Named(n) => n,\n\n _ => {\n\n struc\n\n .ident\n\n .span()\n\n .unwrap()\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 69, "score": 96889.60909868113 }, { "content": "#[proc_macro_attribute]\n\npub fn gatt_service(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let args = syn::parse_macro_input!(args as syn::AttributeArgs);\n\n let mut struc = syn::parse_macro_input!(item as syn::ItemStruct);\n\n\n\n let args = match ServiceArgs::from_list(&args) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return e.write_errors().into();\n\n }\n\n };\n\n\n\n let mut chars = Vec::new();\n\n\n\n let struct_fields = match &mut struc.fields {\n\n syn::Fields::Named(n) => n,\n\n _ => {\n\n struc\n\n .ident\n\n .span()\n\n .unwrap()\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 70, "score": 96889.60909868113 }, { "content": " uint8_t type; /**< User memory type, see @ref BLE_USER_MEM_TYPES. */\n", "file_path": "softdevice/s122/headers/ble.h", "rank": 71, "score": 85409.23767892859 }, { "content": " uint8_t type; /**< User memory type, see @ref BLE_USER_MEM_TYPES. */\n", "file_path": "softdevice/s112/headers/ble.h", "rank": 72, "score": 85409.23767892859 }, { "content": " uint8_t type; /**< User memory type, see @ref BLE_USER_MEM_TYPES. */\n", "file_path": "softdevice/s132/headers/ble.h", "rank": 73, "score": 85409.23767892859 }, { "content": " uint8_t type; /**< User memory type, see @ref BLE_USER_MEM_TYPES. */\n", "file_path": "softdevice/s140/headers/ble.h", "rank": 74, "score": 85409.23767892859 }, { "content": " uint8_t type; /**< User memory type, see @ref BLE_USER_MEM_TYPES. */\n", "file_path": "softdevice/s113/headers/ble.h", "rank": 75, "score": 85409.23767892859 }, { "content": " uint32_t address; /**< The base address of the interrupt vector table for forwarded interrupts.*/\n", "file_path": "softdevice/mbr/headers/nrf_mbr.h", "rank": 76, "score": 84207.11705344167 }, { "content": " uint8_t type; /**< Type of authorize operation, see @ref BLE_GATTS_AUTHORIZE_TYPES. */\n", "file_path": "softdevice/s112/headers/ble_gatts.h", "rank": 77, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Indication or Notification, see @ref BLE_GATT_HVX_TYPES. */\n", "file_path": "softdevice/s112/headers/ble_gattc.h", "rank": 78, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Indication or Notification, see @ref BLE_GATT_HVX_TYPES. */\n", "file_path": "softdevice/s113/headers/ble_gattc.h", "rank": 79, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Indication or Notification, see @ref BLE_GATT_HVX_TYPES. */\n", "file_path": "softdevice/s122/headers/ble_gattc.h", "rank": 80, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Advertising type. See @ref BLE_GAP_ADV_TYPES. */\n", "file_path": "softdevice/s113/headers/ble_gap.h", "rank": 81, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Indication or Notification, see @ref BLE_GATT_HVX_TYPES. */\n", "file_path": "softdevice/s132/headers/ble_gatts.h", "rank": 82, "score": 84113.8640447148 }, { "content": " ble_gap_adv_report_type_t type; /**< Advertising report type. See @ref ble_gap_adv_report_type_t. */\n", "file_path": "softdevice/s122/headers/ble_gap.h", "rank": 83, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Indication or Notification, see @ref BLE_GATT_HVX_TYPES. */\n", "file_path": "softdevice/s132/headers/ble_gattc.h", "rank": 84, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Type of authorize operation, see @ref BLE_GATTS_AUTHORIZE_TYPES. */\n", "file_path": "softdevice/s113/headers/ble_gatts.h", "rank": 85, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Type of authorize operation, see @ref BLE_GATTS_AUTHORIZE_TYPES. */\n", "file_path": "softdevice/s122/headers/ble_gatts.h", "rank": 86, "score": 84113.8640447148 }, { "content": " uint8_t type; /**< Advertising type. See @ref BLE_GAP_ADV_TYPES. */\n", "file_path": "softdevice/s112/headers/ble_gap.h", "rank": 87, "score": 84113.8640447148 }, { "content": "}\n\n\n\n// Note: this type MUST be layout-compatible with raw::ble_gap_addr_t\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct Address {\n\n // bit 0: is resolved private address\n\n // bits 7-1: type\n\n pub flags: u8,\n\n pub bytes: [u8; 6],\n\n}\n\n\n\nimpl Address {\n\n pub const fn new(address_type: AddressType, bytes: [u8; 6]) -> Self {\n\n Self {\n\n flags: (address_type as u8) << 1,\n\n bytes,\n\n }\n\n }\n\n\n", "file_path": "nrf-softdevice/src/ble/types.rs", "rank": 94, "score": 40.22922665007299 }, { "content": "use core::cell::Cell;\n\nuse core::cell::UnsafeCell;\n\n\n\nuse raw::ble_gap_conn_params_t;\n\n\n\nuse crate::ble::types::{Address, AddressType, Role};\n\nuse crate::raw;\n\nuse crate::RawError;\n\n\n\n#[cfg(any(feature = \"s113\", feature = \"s132\", feature = \"s140\"))]\n\nconst BLE_GAP_DATA_LENGTH_DEFAULT: u8 = 27; // The stack's default data length. <27-251>\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub(crate) struct OutOfConnsError;\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub struct DisconnectedError;\n\n\n", "file_path": "nrf-softdevice/src/ble/connection.rs", "rank": 95, "score": 35.80079317451973 }, { "content": "}\n\n\n\nimpl<P: Packet> From<DisconnectedError> for TxError<P> {\n\n fn from(_err: DisconnectedError) -> Self {\n\n TxError::Disconnected\n\n }\n\n}\n\n\n\nimpl<P: Packet> From<RawError> for TxError<P> {\n\n fn from(err: RawError) -> Self {\n\n TxError::Raw(err)\n\n }\n\n}\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum RxError {\n\n Disconnected,\n\n AllocateFailed,\n\n Raw(RawError),\n\n}\n", "file_path": "nrf-softdevice/src/ble/l2cap.rs", "rank": 96, "score": 35.33833861743252 }, { "content": "use crate::{raw, RawError, Softdevice};\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum RandomError {\n\n BufferTooBig,\n\n NotEnoughEntropy,\n\n Raw(RawError),\n\n}\n\n\n\nimpl From<RawError> for RandomError {\n\n fn from(err: RawError) -> Self {\n\n Self::Raw(err)\n\n }\n\n}\n\n\n\n/// Get cryptographically-securerandom bytes.\n", "file_path": "nrf-softdevice/src/random.rs", "rank": 97, "score": 35.206969746980754 }, { "content": "#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum WriteError {\n\n Disconnected,\n\n Timeout,\n\n Gatt(GattError),\n\n Raw(RawError),\n\n}\n\n\n\nimpl From<DisconnectedError> for WriteError {\n\n fn from(_: DisconnectedError) -> Self {\n\n Self::Disconnected\n\n }\n\n}\n\n\n\nimpl From<GattError> for WriteError {\n\n fn from(err: GattError) -> Self {\n\n Self::Gatt(err)\n\n }\n\n}\n", "file_path": "nrf-softdevice/src/ble/gatt_client.rs", "rank": 98, "score": 34.58669371632404 }, { "content": " match *self.state.get() {\n\n State::None => false,\n\n State::Done => false,\n\n State::Running => panic!(\"Portall::call() called reentrantly\"),\n\n State::Waiting(func) => {\n\n (*func)(val);\n\n true\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn wait_once<'a, R, F>(&'a self, mut func: F) -> impl Future<Output = R> + 'a\n\n where\n\n F: FnMut(T) -> R + 'a,\n\n {\n\n assert_thread_mode();\n\n\n\n async move {\n\n let signal = Signal::new();\n", "file_path": "nrf-softdevice/src/util/portal.rs", "rank": 99, "score": 34.46657247961447 } ]
Rust
tests/elements/test_from_telemetry_stage.rs
dmrolfs/proctor
9b2fac5e80e4a8874906a85302af7b34b4433f46
use std::path::PathBuf; use anyhow::Result; use cast_trait_object::DynCastExt; use chrono::{DateTime, Utc}; use claim::*; use once_cell::sync::Lazy; use pretty_assertions::assert_eq; use proctor::elements; use proctor::graph::{stage, Connect, Graph, SinkShape}; use proctor::phases::collection::{make_telemetry_cvs_source, SourceSetting}; use serde::{Deserialize, Serialize}; use serde_test::{assert_tokens, Token}; use super::DEFAULT_LAST_DEPLOYMENT; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] struct Data { #[serde( default, rename = "task.last_failure", serialize_with = "proctor::serde::date::serialize_optional_datetime_format", deserialize_with = "proctor::serde::date::deserialize_optional_datetime" )] pub last_failure: Option<DateTime<Utc>>, #[serde(rename = "cluster.is_deploying")] pub is_deploying: bool, #[serde( rename = "cluster.last_deployment", serialize_with = "proctor::serde::date::serialize_format", deserialize_with = "proctor::serde::date::deserialize" )] pub last_deployment: DateTime<Utc>, } impl Default for Data { fn default() -> Self { Self { last_failure: None, is_deploying: true, last_deployment: *DEFAULT_LAST_DEPLOYMENT, } } } static NOW: Lazy<DateTime<Utc>> = Lazy::new(|| Utc::now()); static NOW_REP: Lazy<String> = Lazy::new(|| format!("{}", NOW.format("%+"))); #[test] fn test_data_serde() { let data = Data { last_failure: Some(NOW.clone()), is_deploying: true, last_deployment: NOW.clone(), }; assert_tokens( &data, &vec![ Token::Struct { name: "Data", len: 3 }, Token::Str("task.last_failure"), Token::Some, Token::Str(&NOW_REP), Token::Str("cluster.is_deploying"), Token::Bool(true), Token::Str("cluster.last_deployment"), Token::Str(&NOW_REP), Token::StructEnd, ], ); } #[tokio::test(flavor = "multi_thread", worker_threads = 4)] async fn test_make_from_telemetry_stage() -> Result<()> { once_cell::sync::Lazy::force(&proctor::tracing::TEST_TRACING); let main_span = tracing::info_span!("test_make_from_telemetry_stage"); let _main_span_guard = main_span.enter(); let base_path = assert_ok!(std::env::current_dir()); let path = base_path.join(PathBuf::from("./tests/data/eligibility.csv")); let setting = SourceSetting::Csv { path }; let mut source = assert_ok!(make_telemetry_cvs_source::<Data, _>("local", &setting)); let convert = elements::make_from_telemetry("convert", true).await; let mut sink = stage::Fold::<_, Data, Vec<Data>>::new("sink", Vec::default(), |mut acc, item| { acc.push(item); acc }); let rx_acc = assert_some!(sink.take_final_rx()); let source_stage = assert_some!(source.stage.take()); (source_stage.outlet(), convert.inlet()).connect().await; (convert.outlet(), sink.inlet()).connect().await; let mut g = Graph::default(); g.push_back(source_stage.dyn_upcast()).await; g.push_back(convert.dyn_upcast()).await; g.push_back(Box::new(sink)).await; assert_ok!(g.run().await); let actual = assert_ok!(rx_acc.await); let expected = vec![ Data { last_failure: None, is_deploying: true, last_deployment: DateTime::parse_from_str("2014-11-28T10:11:37.246310806Z", "%+")?.with_timezone(&Utc), }, Data { last_failure: Some(DateTime::parse_from_str("2014-11-28T12:45:59.324310806Z", "%+")?.with_timezone(&Utc)), is_deploying: false, last_deployment: DateTime::parse_from_str("2021-03-08T23:57:12.918473937Z", "%+")?.with_timezone(&Utc), }, ]; assert_eq!(actual, expected); Ok(()) }
use std::path::PathBuf; use anyhow::Result; use cast_trait_object::DynCastExt; use chrono::{DateTime, Utc}; use claim::*; use once_cell::sync::Lazy; use pretty_assertions::assert_eq; use proctor::elements; use proctor::graph::{stage, Connect, Graph, SinkShape}; use proctor::phases::collection::{make_telemetry_cvs_source, SourceSetting}; use serde::{Deserialize, Serialize}; use serde_test::{assert_tokens, Token}; use super::DEFAULT_LAST_DEPLOYMENT; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] struct Data { #[serde( default, rename = "task.last_failure", serialize_with = "proctor::serde::date::serialize_optional_datetime_format", deserialize_with = "proctor::serde::date::deserialize_optional_datetime" )] pub last_failure: Option<DateTime<Utc>>, #[serde(rename = "cluster.is_deploying")] pub is_deploying: bool, #[serde( rename = "cluster.last_deployment", serialize_with = "proctor::serde::date::serialize_format", deserialize_with = "proctor::serde::date::deserialize" )] pub last_deployment: DateTime<Utc>, } impl Default for Data { fn default() -> Self { Self { last_failure: None, is_deploying: true, last_deployment: *DEFAULT_LAST_DEPLOYMENT, } } } static NOW: Lazy<DateTime<Utc>> = Lazy::new(|| Utc::now()); static NOW_REP: Lazy<String> = Lazy::new(|| format!("{}", NOW.format("%+"))); #[test] fn test_data_serde() {
assert_tokens( &data, &vec![ Token::Struct { name: "Data", len: 3 }, Token::Str("task.last_failure"), Token::Some, Token::Str(&NOW_REP), Token::Str("cluster.is_deploying"), Token::Bool(true), Token::Str("cluster.last_deployment"), Token::Str(&NOW_REP), Token::StructEnd, ], ); } #[tokio::test(flavor = "multi_thread", worker_threads = 4)] async fn test_make_from_telemetry_stage() -> Result<()> { once_cell::sync::Lazy::force(&proctor::tracing::TEST_TRACING); let main_span = tracing::info_span!("test_make_from_telemetry_stage"); let _main_span_guard = main_span.enter(); let base_path = assert_ok!(std::env::current_dir()); let path = base_path.join(PathBuf::from("./tests/data/eligibility.csv")); let setting = SourceSetting::Csv { path }; let mut source = assert_ok!(make_telemetry_cvs_source::<Data, _>("local", &setting)); let convert = elements::make_from_telemetry("convert", true).await; let mut sink = stage::Fold::<_, Data, Vec<Data>>::new("sink", Vec::default(), |mut acc, item| { acc.push(item); acc }); let rx_acc = assert_some!(sink.take_final_rx()); let source_stage = assert_some!(source.stage.take()); (source_stage.outlet(), convert.inlet()).connect().await; (convert.outlet(), sink.inlet()).connect().await; let mut g = Graph::default(); g.push_back(source_stage.dyn_upcast()).await; g.push_back(convert.dyn_upcast()).await; g.push_back(Box::new(sink)).await; assert_ok!(g.run().await); let actual = assert_ok!(rx_acc.await); let expected = vec![ Data { last_failure: None, is_deploying: true, last_deployment: DateTime::parse_from_str("2014-11-28T10:11:37.246310806Z", "%+")?.with_timezone(&Utc), }, Data { last_failure: Some(DateTime::parse_from_str("2014-11-28T12:45:59.324310806Z", "%+")?.with_timezone(&Utc)), is_deploying: false, last_deployment: DateTime::parse_from_str("2021-03-08T23:57:12.918473937Z", "%+")?.with_timezone(&Utc), }, ]; assert_eq!(actual, expected); Ok(()) }
let data = Data { last_failure: Some(NOW.clone()), is_deploying: true, last_deployment: NOW.clone(), };
assignment_statement
[ { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize_format<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let datetime_rep = format!(\"{}\", date.format(FORMAT));\n\n serializer.serialize_str(datetime_rep.as_str())\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 0, "score": 224787.33818049572 }, { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize_optional_datetime_format<S>(date: &Option<DateTime<Utc>>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let s = date.map(|d| format!(\"{}\", d.format(FORMAT)));\n\n\n\n match s {\n\n None => serializer.serialize_none(),\n\n Some(s) => serializer.serialize_some(&s),\n\n }\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 1, "score": 211989.7685877534 }, { "content": "#[tracing::instrument(level = \"debug\", skip(deserializer))]\n\npub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n{\n\n // deserializer.deserialize_str(DateTimeFromIso8601Rfc3339FormatVisitor)\n\n deserializer.deserialize_any(DateTimeVisitor)\n\n}\n\n\n\n// pub fn deserialize_format<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>\n\n// where\n\n// D: de::Deserializer<'de>,\n\n// {\n\n// deserializer.deserialize_str(DateTimeFromIso8601Rfc3339FormatVisitor)\n\n// }\n\n\n", "file_path": "src/serde/date.rs", "rank": 2, "score": 209807.841359213 }, { "content": "pub trait AppData: Debug + Clone + Send + Sync + 'static {}\n\n\n\n/// AppData is automatically derived for types compatible with graph stage processing. If needed,\n\n/// the AppData trait may also be included in the #[derive] specification.\n\nimpl<T: Debug + Clone + Send + Sync + 'static> AppData for T {}\n", "file_path": "src/app_data.rs", "rank": 3, "score": 203624.20511695865 }, { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n // todo changing DateTime<Utc> serde to match From<TelemetryValue>\n\n // let datetime_table = format!(\"{}\", date.format(FORMAT));\n\n let datetime_table = table_from_datetime(date);\n\n let mut map = serializer.serialize_map(Some(datetime_table.len()))?;\n\n for (k, v) in datetime_table {\n\n map.serialize_entry(&k, &v)?;\n\n }\n\n map.end()\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 4, "score": 198771.51241806065 }, { "content": "#[tracing::instrument(level = \"debug\", skip(deserializer))]\n\npub fn deserialize_optional_datetime<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n{\n\n deserializer.deserialize_option(OptionalDateTimeMapVisitor)\n\n}\n\n\n\n// pub fn deserialize_optional_datetime_format<'de, D>(deserializer: D) ->\n\n// Result<Option<DateTime<Utc>>, D::Error> where\n\n// D: de::Deserializer<'de>,\n\n// {\n\n// deserializer.deserialize_option(OptionalDateTimeFormatVisitor)\n\n// }\n\n\n", "file_path": "src/serde/date.rs", "rank": 5, "score": 197255.13900364563 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Data {\n\n #[serde(default)]\n\n pub pos: Option<i64>,\n\n #[serde(default)]\n\n pub value: Option<f64>,\n\n #[serde(default)]\n\n pub cat: String,\n\n}\n\n\n\nimpl Into<Telemetry> for Data {\n\n fn into(self) -> Telemetry {\n\n let mut telemetry = Telemetry::default();\n\n telemetry.insert(\"pos\".to_string(), self.pos.into());\n\n telemetry.insert(\"value\".to_string(), self.value.into());\n\n telemetry.insert(\"cat\".to_string(), self.cat.into());\n\n telemetry\n\n }\n\n}\n\n\n\nimpl TryFrom<Telemetry> for Data {\n", "file_path": "tests/elements/test_basic_clearinghouse_subscription.rs", "rank": 8, "score": 185383.22469623273 }, { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize_optional_datetime_map<S>(date: &Option<DateTime<Utc>>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let s = date.as_ref().map(table_from_datetime);\n\n\n\n match s {\n\n None => serializer.serialize_none(),\n\n Some(s) => serializer.serialize_some(&s),\n\n }\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 9, "score": 184473.8729676744 }, { "content": "#[proc_macro_derive(AppData)]\n\npub fn app_data_derive(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n impl_app_data_macro(&ast)\n\n}\n\n\n", "file_path": "proctor_derive/src/lib.rs", "rank": 10, "score": 182477.78288873413 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Data {\n\n #[serde(\n\n rename = \"task.last_failure\",\n\n default,\n\n serialize_with = \"proctor::serde::date::serialize_optional_datetime_map\",\n\n deserialize_with = \"proctor::serde::date::deserialize_optional_datetime\"\n\n )]\n\n pub last_failure: Option<DateTime<Utc>>,\n\n #[serde(rename = \"cluster.is_deploying\")]\n\n pub is_deploying: bool,\n\n #[serde(rename = \"cluster.last_deployment\", with = \"proctor::serde\")]\n\n pub latest_deployment: DateTime<Utc>,\n\n}\n\n\n\nimpl Default for Data {\n\n fn default() -> Self {\n\n Self {\n\n last_failure: None,\n\n is_deploying: true,\n\n latest_deployment: Utc.datetime_from_str(\"1970-08-30 11:32:09\", \"%Y-%m-%d %H:%M:%S\").unwrap(),\n", "file_path": "tests/elements/test_make_telemetry_cvs_source.rs", "rank": 11, "score": 182203.85145054723 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nstruct Data {\n\n pub last_failure: Option<DateTime<Utc>>,\n\n pub is_deploying: bool,\n\n pub latest_deployment: DateTime<Utc>,\n\n}\n\n\n\nimpl Default for Data {\n\n fn default() -> Self {\n\n Self {\n\n last_failure: None,\n\n is_deploying: true,\n\n latest_deployment: Utc.datetime_from_str(\"1970-08-30 11:32:09\", \"%Y-%m-%d %H:%M:%S\").unwrap(),\n\n }\n\n }\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 4)]\n\nasync fn test_make_telemetry_rest_api_source() -> Result<()> {\n\n once_cell::sync::Lazy::force(&proctor::tracing::TEST_TRACING);\n\n // fixtures::init_tracing(\"test_make_telemetry_rest_api_source\");\n", "file_path": "tests/elements/test_make_telemetry_rest_api_source.rs", "rank": 12, "score": 179176.31905536726 }, { "content": "#[async_trait]\n\npub trait Connect<T> {\n\n async fn connect(self);\n\n}\n\n\n\n#[async_trait]\n\nimpl<T: AppData> Connect<T> for (Outlet<T>, Inlet<T>) {\n\n async fn connect(mut self) {\n\n let outlet = self.0;\n\n let inlet = self.1;\n\n connect_out_to_in(outlet, inlet).await\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<T: AppData> Connect<T> for (&Outlet<T>, &Inlet<T>) {\n\n async fn connect(mut self) {\n\n let outlet = self.0.clone();\n\n let inlet = self.1.clone();\n\n connect_out_to_in(outlet, inlet).await\n\n }\n", "file_path": "src/graph/port.rs", "rank": 13, "score": 166170.39393907753 }, { "content": "pub fn deserialize_from_str<'de, S, D>(deserializer: D) -> Result<S, D::Error>\n\nwhere\n\n S: FromStr,\n\n S::Err: fmt::Display,\n\n D: Deserializer<'de>,\n\n{\n\n let s: String = Deserialize::deserialize(deserializer)?;\n\n S::from_str(&s).map_err(de::Error::custom)\n\n}\n\n\n", "file_path": "src/serde/mod.rs", "rank": 14, "score": 164075.0074599661 }, { "content": "pub fn deserialize_duration_secs<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Duration, D::Error> {\n\n let secs: u64 = Deserialize::deserialize(deserializer)?;\n\n Ok(Duration::from_secs(secs))\n\n}\n", "file_path": "src/serde/mod.rs", "rank": 15, "score": 163411.28400503716 }, { "content": "pub fn serialize_duration_secs<S: Serializer>(that: &Duration, serializer: S) -> Result<S::Ok, S::Error> {\n\n serializer.serialize_u64(that.as_secs())\n\n}\n\n\n", "file_path": "src/serde/mod.rs", "rank": 16, "score": 162203.89886485907 }, { "content": "#[test]\n\nfn test_item_serde() {\n\n Lazy::force(&proctor::tracing::TEST_TRACING);\n\n let main_span = tracing::info_span!(\"test_item_serde\");\n\n let _ = main_span.enter();\n\n\n\n let item = TestItem::new(std::f64::consts::PI, 3, DT_1.clone());\n\n let json = serde_json::to_string(&item).unwrap();\n\n assert_eq!(\n\n json,\n\n format!(\n\n r#\"{{\"input_messages_per_sec\":{},\"inbox_lag\":3,\"my_timestamp\":{}}}\"#,\n\n std::f64::consts::PI,\n\n *DT_1_TS\n\n )\n\n );\n\n\n\n let telemetry = Telemetry::try_from(&item).unwrap();\n\n assert_eq!(\n\n telemetry,\n\n maplit::hashmap! {\n", "file_path": "tests/test_policy_phase.rs", "rank": 17, "score": 159508.54001327994 }, { "content": "#[test]\n\n#[ignore = \"intermittent (false?) error wrt timestamp map order and key resolution\"]\n\nfn test_context_serde() {\n\n Lazy::force(&proctor::tracing::TEST_TRACING);\n\n let main_span = tracing::info_span!(\"test_context_serde\");\n\n let _ = main_span.enter();\n\n\n\n let now = Timestamp::now();\n\n let corr: Id<TestPolicyPhaseContext> = LabeledRealtimeIdGenerator::default().next_id();\n\n // let corr =\n\n // LabeledRealtimeIdGenerator<TestPolicyPhaseContext>::single_node(IdPrettifier::default());\n\n\n\n let context = TestPolicyPhaseContext {\n\n timestamp: now.clone(),\n\n correlation_id: corr.clone(),\n\n task_status: TestTaskStatus { last_failure: None },\n\n cluster_status: TestClusterStatus {\n\n location_code: 3,\n\n is_deploying: false,\n\n last_deployment: DT_1.clone(),\n\n },\n\n custom: TableValue::new(),\n", "file_path": "tests/test_policy_phase.rs", "rank": 18, "score": 159508.1553978323 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\nstruct PolicyData {\n\n pub location_code: u32,\n\n}\n\n\n", "file_path": "tests/test_policy_phase.rs", "rank": 19, "score": 159042.46269373794 }, { "content": "/// Register a subscriber as global default to process span data.\n\n///\n\n/// It should be only called once!\n\npub fn init_subscriber(subscriber: impl Subscriber + Sync + Send) {\n\n LogTracer::init().expect(\"Failed to set logger\");\n\n set_global_default(subscriber).expect(\"Failed to set subscriber\");\n\n}\n", "file_path": "src/tracing.rs", "rank": 20, "score": 158890.26275963138 }, { "content": "pub fn serialize_to_str<T, S>(that: T, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n T: AsRef<str>,\n\n S: Serializer,\n\n{\n\n serializer.serialize_str(that.as_ref())\n\n}\n\n\n", "file_path": "src/serde/mod.rs", "rank": 21, "score": 158627.21305053966 }, { "content": "#[test]\n\nfn test_policy_serde_and_render() {\n\n #[derive(Debug, PartialEq, Serialize, Deserialize)]\n\n struct TemplateData {\n\n pub basis: String,\n\n pub health_lag: i64,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub max_healthy_cpu_load: Option<f64>,\n\n #[serde(flatten, skip_serializing_if = \"BTreeMap::is_empty\")]\n\n pub custom: BTreeMap<String, String>,\n\n }\n\n\n\n impl Default for TemplateData {\n\n fn default() -> Self {\n\n Self {\n\n basis: \"policy_basis\".to_string(),\n\n health_lag: 30_000,\n\n max_healthy_cpu_load: None,\n\n custom: BTreeMap::default(),\n\n }\n\n }\n", "file_path": "tests/elements/test_policy_filter.rs", "rank": 22, "score": 153646.37967565889 }, { "content": "#[derive(Debug, Default, Clone, Serialize, Deserialize)]\n\nstruct PolicyTemplateData {\n\n pub location_code: u32,\n\n #[serde(default)]\n\n pub lag: Option<u32>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub cat: Option<String>,\n\n #[serde(flatten, skip_serializing_if = \"BTreeMap::is_empty\")]\n\n pub custom: BTreeMap<String, String>,\n\n}\n\n\n", "file_path": "tests/elements/test_policy_filter.rs", "rank": 23, "score": 152513.22919895753 }, { "content": "#[proc_macro_derive(ProctorContext)]\n\npub fn proctor_context_derive(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n impl_proctor_context_macro(&ast)\n\n}\n\n\n", "file_path": "proctor_derive/src/lib.rs", "rank": 24, "score": 149428.93561486359 }, { "content": "fn impl_app_data_macro(ast: &syn::DeriveInput) -> TokenStream {\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n impl AppData for #name {}\n\n };\n\n gen.into()\n\n}\n\n\n", "file_path": "proctor_derive/src/lib.rs", "rank": 25, "score": 148466.72905411283 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Data {\n\n pub pos: Option<usize>,\n\n pub value: Option<f64>,\n\n pub cat: String,\n\n}\n\n\n\nimpl Default for Data {\n\n fn default() -> Self {\n\n Self { pos: None, value: None, cat: \"\".to_string() }\n\n }\n\n}\n\n\n\nimpl Into<Telemetry> for Data {\n\n fn into(self) -> Telemetry {\n\n let mut telemetry = Telemetry::default();\n\n telemetry.insert(\"pos\".to_string(), self.pos.into());\n\n telemetry.insert(\"value\".to_string(), self.value.into());\n\n telemetry.insert(\"cat\".to_string(), self.cat.into());\n\n telemetry\n\n }\n", "file_path": "examples/sandbox.rs", "rank": 26, "score": 147268.08182126767 }, { "content": "#[async_trait]\n\npub trait ProctorContext: AppData + SubscriptionRequirements + PolarClass + Serialize + DeserializeOwned {\n\n type Error: std::error::Error + From<anyhow::Error> + Send + Sync;\n\n\n\n fn custom(&self) -> telemetry::TableType;\n\n\n\n #[tracing::instrument(level = \"info\", skip(tx_clearinghouse_api))]\n\n async fn connect_context(\n\n subscription: TelemetrySubscription, tx_clearinghouse_api: &ClearinghouseApi,\n\n ) -> Result<SubscriptionChannel<Self>, Self::Error> {\n\n let channel = SubscriptionChannel::new(subscription.name())\n\n .await\n\n .map_err(|err| err.into())?;\n\n\n\n let (cmd, rx_ack) = ClearinghouseCmd::subscribe(subscription, channel.subscription_receiver.clone());\n\n\n\n tx_clearinghouse_api.send(cmd).map_err(|err| err.into())?;\n\n rx_ack.await.map_err(|err| err.into())?;\n\n\n\n Ok(channel)\n\n }\n\n}\n", "file_path": "src/elements/mod.rs", "rank": 27, "score": 142405.602077676 }, { "content": "#[inline]\n\npub fn track_errors(stage: &str, error: &ProctorError) {\n\n GRAPH_ERRORS.with_label_values(&[stage, error.label().as_ref()]).inc()\n\n}\n\n\n\n/// A Graph represents a runnable stream processing graph.\n\n///\n\n/// A Graph has one or `Source` nodes, zero or more `Through` nodes and one or more `Sink` nodes.\n\n/// Each node is connected to each other via their respective `Inlet` and `Outlet` `Ports`.\n\n///\n\n/// In order to use a `Graph`, its nodes must be registered and connected. Then the `Graph` may be\n\n/// ran, which will spawn asynchronous tasks for each node (via `tokio::spawn`). Once run, the\n\n/// underlying graph nodes will executed until they source(s) complete or the graph is aborted.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use proctor::graph::stage::{self, Stage};\n\n/// use proctor::graph::{self, Connect, Graph};\n\n/// use proctor::graph::{SinkShape, SourceShape, ThroughShape};\n\n///\n", "file_path": "src/graph.rs", "rank": 28, "score": 141763.01745938044 }, { "content": "pub trait ThroughStage<In, Out>: Stage + ThroughShape<In = In, Out = Out> + 'static {}\n\nimpl<In, Out, T: 'static + Stage + ThroughShape<In = In, Out = Out>> ThroughStage<In, Out> for T {}\n\n\n\npub static STAGE_EVAL_TIME: Lazy<HistogramVec> = Lazy::new(|| {\n\n HistogramVec::new(\n\n HistogramOpts::new(\n\n \"stage_eval_time\",\n\n \"Time spent in a stage's event evaluation cycle in seconds\",\n\n ),\n\n &[\"stage\"],\n\n )\n\n .expect(\"failed creating stage_eval_time metric\")\n\n});\n\n\n", "file_path": "src/graph/stage.rs", "rank": 29, "score": 139527.6445023386 }, { "content": "#[inline]\n\npub fn start_stage_eval_time(stage: &str) -> HistogramTimer {\n\n STAGE_EVAL_TIME.with_label_values(&[stage]).start_timer()\n\n}\n\n\n\n/// Behavior driving graph stage lifecycle.\n\n///\n\n/// macro dyn_upcast enables the upcast conversion of concrete stages into the base Stage type when\n\n/// placed in a graph. See https://github.com/Lej77/cast_trait_object README for background.\n", "file_path": "src/graph/stage.rs", "rank": 30, "score": 138977.73788744674 }, { "content": "pub trait SinkStage<In>: Stage + SinkShape<In = In> + 'static {}\n\nimpl<In, T: 'static + Stage + SinkShape<In = In>> SinkStage<In> for T {}\n\n\n", "file_path": "src/graph/stage.rs", "rank": 31, "score": 138448.5851743107 }, { "content": "pub trait SourceStage<Out>: Stage + SourceShape<Out = Out> + 'static {}\n\nimpl<Out, T: 'static + Stage + SourceShape<Out = Out>> SourceStage<Out> for T {}\n\n\n", "file_path": "src/graph/stage.rs", "rank": 32, "score": 138448.5851743107 }, { "content": "#[tracing::instrument(level = \"debug\")]\n\nfn datetime_from_table(datetime: HashMap<String, i64>) -> DateTime<Utc> {\n\n Utc.timestamp(\n\n datetime.get(SECS_KEY).copied().unwrap_or(0),\n\n datetime.get(NANOS_KEY).copied().unwrap_or(0) as u32,\n\n )\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 33, "score": 133990.59733131411 }, { "content": "#[tracing::instrument(level = \"debug\")]\n\nfn table_from_datetime(datetime: &DateTime<Utc>) -> HashMap<String, i64> {\n\n maplit::hashmap! {\n\n SECS_KEY.to_string() => datetime.timestamp(),\n\n NANOS_KEY.to_string() => datetime.timestamp_subsec_nanos() as i64,\n\n }\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 34, "score": 133990.59733131411 }, { "content": "#[serde_as]\n\n#[derive(PolarClass, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct TestItem {\n\n #[polar(attribute)]\n\n #[serde(flatten)]\n\n pub flow: TestFlowMetrics,\n\n\n\n #[polar(attribute)]\n\n pub inbox_lag: i32, // todo: TelemetryValue cannot deser U32!!!! why!?\n\n\n\n #[serde_as(as = \"TimestampSeconds\")]\n\n // #[serde(with = \"proctor::serde\")]\n\n pub my_timestamp: DateTime<Utc>,\n\n}\n\n\n\nimpl TestItem {\n\n pub fn new(input_messages_per_sec: f64, inbox_lag: i32, ts: DateTime<Utc>) -> Self {\n\n Self {\n\n flow: TestFlowMetrics { input_messages_per_sec },\n\n inbox_lag,\n\n my_timestamp: ts,\n\n }\n", "file_path": "tests/test_policy_phase.rs", "rank": 35, "score": 123705.40579621446 }, { "content": "#[derive(PolarClass, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct TestItem {\n\n #[polar(attribute)]\n\n pub flow: TestFlowMetrics,\n\n\n\n #[serde(with = \"proctor::serde\")]\n\n pub timestamp: DateTime<Utc>,\n\n\n\n #[polar(attribute)]\n\n pub inbox_lag: u32,\n\n}\n\n\n\nimpl TestItem {\n\n pub fn new(input_messages_per_sec: f64, ts: DateTime<Utc>, inbox_lag: u32) -> Self {\n\n Self {\n\n flow: TestFlowMetrics { input_messages_per_sec },\n\n timestamp: ts.into(),\n\n inbox_lag,\n\n }\n\n }\n\n\n", "file_path": "tests/elements/test_policy_filter.rs", "rank": 36, "score": 121731.72348248264 }, { "content": "#[derive(PolarClass, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct TestContext {\n\n #[polar(attribute)]\n\n pub location_code: u32,\n\n custom: telemetry::TableValue,\n\n}\n\n\n\nimpl TestContext {\n\n pub fn new(location_code: u32) -> Self {\n\n Self {\n\n location_code,\n\n custom: telemetry::TableValue::default(),\n\n }\n\n }\n\n\n\n pub fn with_custom(self, custom: telemetry::TableValue) -> Self {\n\n Self { custom, ..self }\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "tests/elements/test_policy_filter.rs", "rank": 37, "score": 121731.72348248264 }, { "content": "#[derive(PolarClass, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct TestFlowMetrics {\n\n #[polar(attribute)]\n\n pub input_messages_per_sec: f64,\n\n}\n\n\n", "file_path": "tests/test_policy_phase.rs", "rank": 38, "score": 121731.72348248264 }, { "content": "struct TestFlow {\n\n pub graph_handle: JoinHandle<()>,\n\n pub tx_item_source_api: stage::ActorSourceApi<TestItem>,\n\n pub tx_env_source_api: stage::ActorSourceApi<TestContext>,\n\n pub tx_policy_api: elements::PolicyFilterApi<TestContext, PolicyTemplateData>,\n\n pub rx_policy_monitor: elements::PolicyFilterMonitor<TestItem, TestContext>,\n\n pub tx_sink_api: stage::FoldApi<Vec<PolicyOutcome<TestItem, TestContext>>>,\n\n pub rx_sink: Option<oneshot::Receiver<Vec<PolicyOutcome<TestItem, TestContext>>>>,\n\n}\n\n\n\nimpl TestFlow {\n\n pub async fn new(policy: impl AsRef<str>, data: PolicyTemplateData) -> Result<Self, PolicyError> {\n\n Self::with_query(policy, data, \"eligible\").await\n\n }\n\n\n\n pub async fn with_query(\n\n policy: impl AsRef<str>, data: PolicyTemplateData, query: impl Into<String>,\n\n ) -> Result<Self, PolicyError> {\n\n let item_source = stage::ActorSource::<TestItem>::new(\"item_source\");\n\n let tx_item_source_api = item_source.tx_api();\n", "file_path": "tests/elements/test_policy_filter.rs", "rank": 39, "score": 121717.29960990846 }, { "content": "#[derive(Debug)]\n\nstruct TestPolicy {\n\n sources: Vec<PolicySource>,\n\n template_data: PolicyTemplateData,\n\n query: String,\n\n}\n\n\n\nimpl TestPolicy {\n\n #[tracing::instrument(\n\n level=\"info\",\n\n name=\"TestPolicy::with_query\",\n\n skip(policy, query),\n\n fields(policy=%policy.as_ref())\n\n )]\n\n pub fn with_query(policy: impl AsRef<str>, template_data: PolicyTemplateData, query: impl Into<String>) -> Self {\n\n let polar = polar_core::polar::Polar::new();\n\n let mut registry = PolicyRegistry::new();\n\n let source = assert_ok!(PolicySource::from_template_string(\n\n TestPolicy::base_template_name(),\n\n policy\n\n ));\n", "file_path": "tests/elements/test_policy_filter.rs", "rank": 40, "score": 121717.29960990846 }, { "content": "#[derive(Debug)]\n\nstruct TestPolicyB {\n\n sources: Vec<PolicySource>,\n\n data: HashMap<String, String>,\n\n subscription_extension: HashSet<String>,\n\n}\n\n\n\nimpl TestPolicyB {\n\n pub fn new<S: AsRef<str>>(policy: S, data: HashMap<String, String>) -> Self {\n\n Self::new_with_extension(policy, data, HashSet::<String>::new())\n\n }\n\n\n\n #[tracing::instrument(\n\n level = \"info\",\n\n name = \"TestPolicyB::new_with_extension\",\n\n skip(policy, subscription_extension)\n\n )]\n\n pub fn new_with_extension<S0, S1>(\n\n policy: S0, data: HashMap<String, String>, subscription_extension: HashSet<S1>,\n\n ) -> Self\n\n where\n", "file_path": "tests/test_policy_phase.rs", "rank": 41, "score": 121717.29960990846 }, { "content": "/// Compose multiple layers into a `tracing`'s subscriber.\n\n///\n\n/// # Implementation Notes\n\n///\n\n/// We are using `impl Subscriber` as return type to avoid having to spell out the actual type of\n\n/// the returned subscriber, which is indeed quite complex.\n\n/// We need to explicitly call out that returned subscriber is `Send` and `Sync` to make it\n\n/// possible to pass it to `init_subscriber` later on.\n\npub fn get_subscriber<S0, S1, W>(name: S0, env_filter: S1, sink: W) -> impl Subscriber + Sync + Send\n\nwhere\n\n S0: Into<String>,\n\n S1: AsRef<str>,\n\n W: for<'a> MakeWriter<'a> + Send + Sync + 'static,\n\n{\n\n let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));\n\n\n\n let (flame_subscriber, _guard) = FlameLayer::with_file(\"./tracing.folded\").unwrap();\n\n\n\n let formatting_layer = BunyanFormattingLayer::new(name.into(), sink);\n\n\n\n Registry::default()\n\n .with(env_filter)\n\n .with(flame_subscriber)\n\n .with(JsonStorageLayer)\n\n .with(formatting_layer)\n\n}\n\n\n", "file_path": "src/tracing.rs", "rank": 42, "score": 119866.41681544094 }, { "content": "#[derive(PolarClass, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct TestFlowMetrics {\n\n #[polar(attribute)]\n\n pub input_messages_per_sec: f64,\n\n}\n\n\n", "file_path": "tests/elements/test_policy_filter.rs", "rank": 43, "score": 119853.8959104669 }, { "content": "#[derive(Debug)]\n\nstruct TestPolicyA<T> {\n\n custom_fields: Option<HashSet<String>>,\n\n policies: Vec<PolicySource>,\n\n policy_template_data: Option<PolicyData>,\n\n data_marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T> TestPolicyA<T> {\n\n pub fn new(settings: &PolicySettings<PolicyData>, policy_template_data: Option<PolicyData>) -> Self {\n\n Self {\n\n custom_fields: None,\n\n policies: settings.policies.clone(),\n\n policy_template_data,\n\n data_marker: PhantomData,\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn with_custom(self, custom_fields: HashSet<String>) -> Self {\n\n Self { custom_fields: Some(custom_fields), ..self }\n", "file_path": "tests/test_policy_phase.rs", "rank": 44, "score": 118892.99570911983 }, { "content": "fn impl_proctor_context_macro(ast: &syn::DeriveInput) -> TokenStream {\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n impl ProctorContext for #name {}\n\n };\n\n gen.into()\n\n}", "file_path": "proctor_derive/src/lib.rs", "rank": 45, "score": 116240.24658348624 }, { "content": "fn make_test_policy<T>(\n\n settings: &PolicySettings<PolicyData>,\n\n) -> impl Policy<T, TestPolicyPhaseContext, (T, TestPolicyPhaseContext), TemplateData = PolicyData>\n\nwhere\n\n T: AppData + ToPolar + Serialize + DeserializeOwned,\n\n{\n\n let data = settings.template_data.clone();\n\n TestPolicyA::new(settings, data)\n\n}\n\n\n\nconst POLICY_A_TEMPLATE_NAME: &'static str = \"policy_a\";\n\n\n", "file_path": "tests/test_policy_phase.rs", "rank": 46, "score": 115998.00497669225 }, { "content": "#[tracing::instrument(level = \"info\", skip(templates))]\n\npub fn render_template_policy<'a, T, D>(templates: T, name: &str, data: Option<&D>) -> Result<String, PolicyError>\n\nwhere\n\n T: IntoIterator<Item = &'a PolicySource>,\n\n D: Serialize + Debug,\n\n{\n\n tracing::info!(\"rendering policy string as template with data.\");\n\n\n\n // I tried to facilitate registry caching in policy, but handlebars' lifetime parameter\n\n // (underlying the PolicyRegistry) hampers the ergonomics of policy definition.\n\n // Not a performance impact since policy loading only happens on bootstrap or during\n\n // a corresponding, intermittent command.\n\n let mut registry = PolicyRegistry::new();\n\n for s in templates {\n\n let policy_template: String = s.try_into()?;\n\n registry.register_template_string(s.name().as_ref(), policy_template)?;\n\n }\n\n tracing::debug!(?registry, \"policy templates registered with handlebars registry\");\n\n let policy = registry.render(name, &data)?;\n\n tracing::info!(rendered_policy=%policy, \"rendered {} policy from template and data.\", name);\n\n Ok(policy)\n\n}\n\n\n", "file_path": "src/elements/policy_filter/policy.rs", "rank": 47, "score": 114453.16840346612 }, { "content": "fn zero_to_three() -> impl Stream<Item = u32> {\n\n stream! {\n\n for i in 0..3 {\n\n yield i;\n\n }\n\n }\n\n}\n", "file_path": "examples/async_stream.rs", "rank": 48, "score": 113903.25813755613 }, { "content": "#[allow(dead_code)]\n\nstruct TestFlow<T, C, D> {\n\n pub id_generator: CorrelationGenerator,\n\n pub graph_handle: JoinHandle<()>,\n\n pub tx_data_source_api: stage::ActorSourceApi<Telemetry>,\n\n pub tx_context_source_api: stage::ActorSourceApi<Telemetry>,\n\n pub tx_clearinghouse_api: collection::ClearinghouseApi,\n\n pub tx_eligibility_api: elements::PolicyFilterApi<C, D>,\n\n pub rx_eligibility_monitor: elements::PolicyFilterMonitor<T, C>,\n\n pub tx_sink_api: stage::FoldApi<Vec<PolicyOutcome<T, C>>>,\n\n pub rx_sink: Option<oneshot::Receiver<Vec<PolicyOutcome<T, C>>>>,\n\n}\n\n\n\nimpl<T, C, D> TestFlow<T, C, D>\n\nwhere\n\n T: AppData + DeserializeOwned + ToPolar,\n\n C: ProctorContext,\n\n D: Debug + Clone + Serialize + DeserializeOwned + Send + Sync + 'static,\n\n{\n\n pub async fn new<P>(telemetry_subscription: TelemetrySubscription, policy: P) -> anyhow::Result<Self>\n\n where\n", "file_path": "tests/test_policy_phase.rs", "rank": 49, "score": 110952.66064629638 }, { "content": "struct DateTimeVisitor;\n\n\n\nimpl<'de> de::Visitor<'de> for DateTimeVisitor {\n\n type Value = DateTime<Utc>;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"a datetime table\")\n\n }\n\n\n\n #[tracing::instrument(level = \"debug\", skip(self))]\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Utc.datetime_from_str(value, FORMAT).map_err(serde::de::Error::custom)\n\n }\n\n\n\n #[tracing::instrument(level = \"debug\", skip(self, access))]\n\n fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>\n\n where\n", "file_path": "src/serde/date.rs", "rank": 50, "score": 109292.86159674305 }, { "content": "#[async_trait]\n\npub trait Port {\n\n fn stage(&self) -> &str;\n\n fn name(&self) -> &str;\n\n fn full_name(&self) -> String {\n\n format!(\"{}::{}\", self.stage(), self.name())\n\n }\n\n\n\n /// Closes this half of a channel without dropping it.\n\n /// This prevents any further messages from being sent on the port while still enabling the\n\n /// receiver to drain messages that are buffered. Any outstanding Permit values will still be\n\n /// able to send messages.\n\n /// To guarantee that no messages are dropped, after calling close(), recv() must be called\n\n /// until None is returned. If there are outstanding Permit values, the recv method will not\n\n /// return None until those are released\n\n async fn close(&mut self);\n\n}\n\n\n", "file_path": "src/graph/port.rs", "rank": 51, "score": 106001.33423703809 }, { "content": "pub trait WithMonitor {\n\n type Receiver;\n\n fn rx_monitor(&self) -> Self::Receiver;\n\n}\n", "file_path": "src/graph/stage.rs", "rank": 52, "score": 106001.33423703809 }, { "content": "pub trait WithApi {\n\n type Sender;\n\n fn tx_api(&self) -> Self::Sender;\n\n}\n\n\n", "file_path": "src/graph/stage.rs", "rank": 53, "score": 106001.33423703809 }, { "content": "#[tracing::instrument(level = \"info\")]\n\npub fn register_proctor_metrics(registry: &Registry) -> Result<(), ProctorError> {\n\n registry.register(Box::new(graph::GRAPH_ERRORS.clone()))?;\n\n registry.register(Box::new(graph::STAGE_INGRESS_COUNTS.clone()))?;\n\n registry.register(Box::new(graph::STAGE_EGRESS_COUNTS.clone()))?;\n\n registry.register(Box::new(graph::stage::STAGE_EVAL_TIME.clone()))?;\n\n registry.register(Box::new(policy_filter::POLICY_FILTER_EVAL_TIME.clone()))?;\n\n registry.register(Box::new(clearinghouse::SUBSCRIPTIONS_GAUGE.clone()))?;\n\n registry.register(Box::new(clearinghouse::PUBLICATIONS.clone()))?;\n\n Ok(())\n\n}\n", "file_path": "src/metrics.rs", "rank": 54, "score": 105407.8138085076 }, { "content": "fn type_name_of_val<T>(_val: &T) -> &'static str {\n\n std::any::type_name::<T>()\n\n}\n\n\n\nimpl<T, C, A, P, D> stage::WithApi for PolicyFilter<T, C, A, P, D>\n\nwhere\n\n P: QueryPolicy<Item = T, Context = C, Args = A, TemplateData = D>,\n\n{\n\n type Sender = PolicyFilterApi<C, D>;\n\n\n\n #[inline]\n\n fn tx_api(&self) -> Self::Sender {\n\n self.tx_api.clone()\n\n }\n\n}\n\n\n\nimpl<T, C, A, P, D> stage::WithMonitor for PolicyFilter<T, C, A, P, D>\n\nwhere\n\n P: QueryPolicy<Item = T, Context = C, Args = A, TemplateData = D>,\n\n{\n", "file_path": "src/elements/policy_filter.rs", "rank": 55, "score": 105243.97461192522 }, { "content": "#[tracing::instrument(level = \"info\", name = \"test deser\")]\n\nfn test_exchange_rate_deser() {\n\n use chrono::Utc;\n\n\n\n let json = r#\"{\n\n\"Realtime Currency Exchange Rate\": {\n\n\"1. From_Currency Code\": \"ETH\",\n\n\"2. From_Currency Name\": \"Ethereum\",\n\n\"3. To_Currency Code\": \"USD\",\n\n\"4. To_Currency Name\": \"United States Dollar\",\n\n\"5. Exchange Rate\": \"1858.11000000\",\n\n\"6. Last Refreshed\": \"2021-02-13 01:57:08\",\n\n\"7. Time Zone\": \"UTC\",\n\n\"8. Bid Price\": \"1858.10000000\",\n\n\"9. Ask Price\": \"1858.11000000\"\n\n}\n\n}\"#;\n\n\n\n let doc = serde_json::from_str::<HashMap<String, ExchangeRate>>(json).unwrap();\n\n let actual = doc.values().next().unwrap();\n\n tracing::trace!(?actual, \"json deser result\");\n", "file_path": "examples/eth_scan.rs", "rank": 56, "score": 104916.67545171453 }, { "content": "struct OptionalDateTimeMapVisitor;\n\n\n\nimpl<'de> de::Visitor<'de> for OptionalDateTimeMapVisitor {\n\n type Value = Option<DateTime<Utc>>;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"null or a datetime serialized value\")\n\n }\n\n\n\n #[tracing::instrument(level = \"debug\", skip(self))]\n\n fn visit_none<E>(self) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(None)\n\n }\n\n\n\n #[tracing::instrument(level = \"debug\", skip(self, deserializer))]\n\n fn visit_some<D>(self, deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>\n\n where\n", "file_path": "src/serde/date.rs", "rank": 57, "score": 104629.80991536716 }, { "content": "pub trait SinkShape {\n\n type In;\n\n fn inlet(&self) -> Inlet<Self::In>;\n\n}\n\n\n", "file_path": "src/graph/shape.rs", "rank": 58, "score": 103496.96079579687 }, { "content": "/// A bidirectional flow of elements that consequently has two inputs and two outputs,\n\n/// arranged like this:\n\n///\n\n/// {{{\n\n/// +------+\n\n/// In1 ~>| |~> Out1\n\n/// | bidi |\n\n/// Out2 <~| |<~ In2\n\n/// +------+\n\n/// }}}\n\npub trait BidiShape {\n\n type In1;\n\n type Out1;\n\n type In2;\n\n type Out2;\n\n\n\n fn inlet_1(&self) -> Inlet<Self::In1>;\n\n fn outlet_1(&self) -> Outlet<Self::Out1>;\n\n fn inlet_2(&self) -> Inlet<Self::In2>;\n\n fn outlet_2(&self) -> Outlet<Self::Out2>;\n\n}\n\n\n", "file_path": "src/graph/shape.rs", "rank": 59, "score": 103496.96079579687 }, { "content": "pub trait SourceShape {\n\n type Out;\n\n fn outlet(&self) -> Outlet<Self::Out>;\n\n}\n\n\n", "file_path": "src/graph/shape.rs", "rank": 60, "score": 103496.96079579687 }, { "content": "pub trait FromTelemetryStage<Out>: Stage + ThroughShape<In = Telemetry, Out = Out> + 'static {}\n\n\n\nimpl<Out, T> FromTelemetryStage<Out> for T where T: Stage + ThroughShape<In = Telemetry, Out = Out> + 'static {}\n\n\n\n#[tracing::instrument(level = \"info\", skip(name))]\n\npub async fn make_from_telemetry<Out>(name: impl Into<String>, log_conversion_failure: bool) -> FromTelemetryShape<Out>\n\nwhere\n\n Out: AppData + DeserializeOwned,\n\n{\n\n let name: SharedString = SharedString::Owned(name.into());\n\n let stage_name = name.clone();\n\n let from_telemetry =\n\n stage::FilterMap::<_, Telemetry, Out>::new(format!(\"{}_from_telemetry\", name), move |telemetry| {\n\n let span = tracing::info_span!(\"converting telemetry into data item\", ?telemetry);\n\n let _ = span.enter();\n\n\n\n match telemetry.try_into() {\n\n Ok(converted) => {\n\n tracing::trace!(?converted, \"data item derived from telemetry.\");\n\n Some(converted)\n", "file_path": "src/elements/from_telemetry.rs", "rank": 61, "score": 100431.0148302451 }, { "content": "pub trait FanInShape2: SourceShape {\n\n type In0;\n\n type In1;\n\n\n\n fn inlet_0(&self) -> Inlet<Self::In0>;\n\n fn inlet_1(&self) -> Inlet<Self::In1>;\n\n}\n\n\n\npub struct InletsShape<T>(pub Arc<Mutex<Vec<Inlet<T>>>>);\n\n\n\nimpl<T: Send> InletsShape<T> {\n\n pub fn new(inlets: Vec<Inlet<T>>) -> Self {\n\n Self(Arc::new(Mutex::new(inlets)))\n\n }\n\n\n\n pub async fn len(&self) -> usize {\n\n self.0.lock().await.len()\n\n }\n\n\n\n pub async fn get(&self, index: usize) -> Option<Inlet<T>> {\n", "file_path": "src/graph/shape.rs", "rank": 62, "score": 96725.85972057689 }, { "content": "pub trait ContinueTicking: Send {\n\n fn next(&mut self) -> bool;\n\n}\n\n\n\n#[derive(fmt::Debug, Clone, Copy, PartialEq)]\n\npub enum Constraint {\n\n None,\n\n ByCount {\n\n count: usize,\n\n limit: usize,\n\n },\n\n ByTime {\n\n stop: Option<tokio::time::Instant>,\n\n limit: Duration,\n\n },\n\n}\n\n\n\nimpl ContinueTicking for Constraint {\n\n fn next(&mut self) -> bool {\n\n match self {\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 63, "score": 94633.39782453777 }, { "content": "pub trait UniformFanOutShape: SinkShape {\n\n type Out;\n\n fn outlets(&self) -> OutletsShape<Self::Out>;\n\n}\n", "file_path": "src/graph/shape.rs", "rank": 64, "score": 94633.39782453777 }, { "content": "pub trait UniformFanInShape: SourceShape {\n\n type In;\n\n // todo use once associated type defaults are stable\n\n // type InletShape = Arc<Mutex<Inlet<Self::In>>>;\n\n // type InletsShape = Arc<Mutex<Vec<Self::InletShape>>>;\n\n\n\n fn inlets(&self) -> InletsShape<Self::In>;\n\n}\n\n\n\npub type OutletsShape<T> = Vec<Outlet<T>>;\n\n\n", "file_path": "src/graph/shape.rs", "rank": 65, "score": 94633.39782453777 }, { "content": "pub trait ThroughShape: SourceShape + SinkShape {}\n\nimpl<T: SourceShape + SinkShape> ThroughShape for T {}\n\n\n", "file_path": "src/graph/shape.rs", "rank": 66, "score": 92792.51593425263 }, { "content": "#[dyn_upcast]\n\n#[async_trait]\n\npub trait Stage: fmt::Debug + Send + Sync {\n\n fn name(&self) -> SharedString;\n\n async fn check(&self) -> ProctorResult<()>;\n\n async fn run(&mut self) -> ProctorResult<()>;\n\n async fn close(self: Box<Self>) -> ProctorResult<()>;\n\n}\n\n\n", "file_path": "src/graph/stage.rs", "rank": 67, "score": 89884.05566714279 }, { "content": "#[inline]\n\nfn track_ingress(stage: &str, port_name: &str) {\n\n STAGE_INGRESS_COUNTS.with_label_values(&[stage, port_name]).inc()\n\n}\n\n\n", "file_path": "src/graph/port.rs", "rank": 68, "score": 89679.62070638104 }, { "content": "#[inline]\n\nfn track_egress(stage: &str, port_name: &str) {\n\n STAGE_EGRESS_COUNTS.with_label_values(&[stage, port_name]).inc()\n\n}\n\n\n\npub const PORT_DATA: &str = \"data\";\n\npub const PORT_CONTEXT: &str = \"context\";\n\n\n", "file_path": "src/graph/port.rs", "rank": 69, "score": 89679.62070638104 }, { "content": "mod basic_3_fan_in_merge;\n\nmod complex_fan_in;\n", "file_path": "tests/graph/main.rs", "rank": 70, "score": 85865.52604065988 }, { "content": "use std::time::Duration;\n\n\n\nuse anyhow::Result;\n\nuse proctor::graph::stage::{self, tick};\n\nuse proctor::graph::{Connect, Graph, SinkShape, SourceShape, UniformFanInShape};\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 4)]\n\nasync fn test_complex_multi_stage_merge_5() -> Result<()> {\n\n once_cell::sync::Lazy::force(&proctor::tracing::TEST_TRACING);\n\n // fixtures::init_tracing(\"test_complex_multi_stage_merge_5\");\n\n let main_span = tracing::info_span!(\"test_complex_multi_stage_merge_5\");\n\n let _main_span_guard = main_span.enter();\n\n\n\n let src_0 = stage::Tick::with_constraint(\n\n \"src_ONES\",\n\n Duration::from_nanos(0),\n\n Duration::from_nanos(1),\n\n 1,\n\n tick::Constraint::by_count(10),\n\n );\n", "file_path": "tests/graph/complex_fan_in.rs", "rank": 71, "score": 83061.31713836535 }, { "content": " (tick_2.outlet(), map_2.inlet()).connect().await;\n\n (tick_3.outlet(), map_3.inlet()).connect().await;\n\n (tick_4.outlet(), map_4.inlet()).connect().await;\n\n\n\n (&src_0.outlet(), &merge.inlets().get(0).await.unwrap()).connect().await;\n\n (&map_1.outlet(), &merge.inlets().get(1).await.unwrap()).connect().await;\n\n (&map_2.outlet(), &merge.inlets().get(2).await.unwrap()).connect().await;\n\n (&map_3.outlet(), &merge.inlets().get(3).await.unwrap()).connect().await;\n\n (&map_4.outlet(), &merge.inlets().get(4).await.unwrap()).connect().await;\n\n\n\n let mut sum = stage::Fold::new(\"sum\", 0, |acc, x| acc + x);\n\n let rx_sum = sum.take_final_rx().unwrap();\n\n (merge.outlet(), sum.inlet()).connect().await;\n\n\n\n let mut g = Graph::default();\n\n g.push_back(Box::new(src_0)).await;\n\n g.push_back(Box::new(tick_1)).await;\n\n g.push_back(Box::new(tick_2)).await;\n\n g.push_back(Box::new(tick_3)).await;\n\n g.push_back(Box::new(tick_4)).await;\n", "file_path": "tests/graph/complex_fan_in.rs", "rank": 72, "score": 83051.25993474163 }, { "content": " \"src_SEVENS\",\n\n Duration::from_nanos(0),\n\n Duration::from_nanos(1),\n\n 3,\n\n tick::Constraint::by_count(777),\n\n );\n\n let map_3 = stage::Map::new(\"map_3\", |o| o + 4);\n\n\n\n let tick_4 = stage::Tick::with_constraint(\n\n \"src_13\",\n\n Duration::from_nanos(0),\n\n Duration::from_nanos(1),\n\n (),\n\n tick::Constraint::by_count(13),\n\n );\n\n let map_4 = stage::Map::new(\"map_4\", |()| 13);\n\n\n\n let merge = stage::MergeN::new(\"merge\", 5);\n\n\n\n (tick_1.outlet(), map_1.inlet()).connect().await;\n", "file_path": "tests/graph/complex_fan_in.rs", "rank": 73, "score": 83046.67594174655 }, { "content": "\n\n let tick_1 = stage::Tick::with_constraint(\n\n \"src_TENS\",\n\n Duration::from_nanos(0),\n\n Duration::from_nanos(1),\n\n \"10\",\n\n tick::Constraint::by_count(100),\n\n );\n\n let map_1 = stage::Map::new(\"map_1\", |s: &str| s.parse::<i32>().unwrap());\n\n\n\n let tick_2 = stage::Tick::with_constraint(\n\n \"src_HUNDREDS\",\n\n Duration::from_nanos(0),\n\n Duration::from_nanos(1),\n\n Duration::from_millis(100),\n\n tick::Constraint::by_count(1000),\n\n );\n\n let map_2 = stage::Map::new(\"map_2\", |d: Duration| d.as_millis() as i32);\n\n\n\n let tick_3 = stage::Tick::with_constraint(\n", "file_path": "tests/graph/complex_fan_in.rs", "rank": 74, "score": 83042.8076245666 }, { "content": " g.push_back(Box::new(map_1)).await;\n\n g.push_back(Box::new(map_2)).await;\n\n g.push_back(Box::new(map_3)).await;\n\n g.push_back(Box::new(map_4)).await;\n\n g.push_back(Box::new(merge)).await;\n\n g.push_back(Box::new(sum)).await;\n\n g.run().await?;\n\n\n\n match rx_sum.await {\n\n Ok(actual) => {\n\n let e1 = 1 * 10;\n\n let e10 = 10 * 100;\n\n let e100 = 100 * 1000;\n\n let e7 = 7 * 777;\n\n let e13 = 13 * 13;\n\n assert_eq!(actual, e1 + e10 + e100 + e7 + e13);\n\n Ok(())\n\n }\n\n Err(_err) => panic!(\"failed to receive final sum\"),\n\n }\n\n}\n", "file_path": "tests/graph/complex_fan_in.rs", "rank": 75, "score": 83042.8076245666 }, { "content": "#[tracing::instrument(level = \"info\", skip(name))]\n\npub fn make_telemetry_cvs_source<T, S>(name: S, setting: &SourceSetting) -> Result<TelemetrySource, CollectionError>\n\nwhere\n\n T: Serialize + DeserializeOwned + Debug,\n\n S: Into<String>,\n\n{\n\n if let SourceSetting::Csv { path } = setting {\n\n let name = name.into();\n\n let mut telemetry_name = format!(\"telemetry_{}\", name.as_str());\n\n\n\n if let Some(file_name) = path.file_name() {\n\n match file_name.to_str() {\n\n None => (),\n\n Some(file_name) => telemetry_name.push_str(format!(\"_{}\", file_name).as_str()),\n\n }\n\n }\n\n\n\n let csv_span = tracing::info_span!(\"sourcing CSV\", %telemetry_name, ?path);\n\n let _csv_span_guard = csv_span.enter();\n\n\n\n let mut records: Vec<Telemetry> = vec![];\n", "file_path": "src/phases/collection/source.rs", "rank": 76, "score": 81432.40164179965 }, { "content": "use anyhow::Result;\n\nuse proctor::graph::{stage, Connect, Graph, SinkShape, SourceShape, UniformFanInShape};\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 4)]\n\nasync fn test_basic_sequence_3_fan_in_merge() -> Result<()> {\n\n once_cell::sync::Lazy::force(&proctor::tracing::TEST_TRACING);\n\n // fixtures::init_tracing(\"test_basic_sequence_3_fan_in_merge\");\n\n let main_span = tracing::info_span!(\"test_basic_sequence_3_fan_in_merge\");\n\n let _main_span_guard = main_span.enter();\n\n\n\n let src_0 = stage::Sequence::new(\"src_ONES\", 1..=9);\n\n let src_1 = stage::Sequence::new(\"src_TENS\", 11..=99);\n\n let src_2 = stage::Sequence::new(\"src_HUNDREDS\", 101..=999);\n\n\n\n let merge = stage::MergeN::new(\"merge\", 3);\n\n\n\n (&src_0.outlet(), &merge.inlets().get(0).await.unwrap()).connect().await;\n\n (&src_1.outlet(), &merge.inlets().get(1).await.unwrap()).connect().await;\n\n (&src_2.outlet(), &merge.inlets().get(2).await.unwrap()).connect().await;\n\n\n", "file_path": "tests/graph/basic_3_fan_in_merge.rs", "rank": 77, "score": 80417.50100325649 }, { "content": " let mut sum = stage::Fold::new(\"sum\", 0, |acc, x| acc + x);\n\n let rx_sum = sum.take_final_rx().unwrap();\n\n (merge.outlet(), sum.inlet()).connect().await;\n\n\n\n let mut g = Graph::default();\n\n g.push_back(Box::new(src_0)).await;\n\n g.push_back(Box::new(src_1)).await;\n\n g.push_back(Box::new(src_2)).await;\n\n g.push_back(Box::new(merge)).await;\n\n g.push_back(Box::new(sum)).await;\n\n g.run().await?;\n\n\n\n match rx_sum.await {\n\n Ok(actual) => {\n\n let e1: i32 = (1..=9).into_iter().sum();\n\n let e10: i32 = (11..=99).into_iter().sum();\n\n let e100: i32 = (101..=999).into_iter().sum();\n\n assert_eq!(actual, e1 + e10 + e100);\n\n Ok(())\n\n }\n\n Err(_err) => panic!(\"failed to receive final sum\"),\n\n }\n\n}\n", "file_path": "tests/graph/basic_3_fan_in_merge.rs", "rank": 78, "score": 80405.81273408256 }, { "content": "struct TimestampVisitor;\n\n\n\nimpl<'de> de::Visitor<'de> for TimestampVisitor {\n\n type Value = Timestamp;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a timestamp value in integer, float, sequence, map or string form\")\n\n }\n\n\n\n #[tracing::instrument(level = \"debug\", skip(self))]\n\n fn visit_i8<E>(self, v: i8) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n self.visit_i64(v as i64)\n\n }\n\n\n\n #[tracing::instrument(level = \"debug\", skip(self))]\n\n fn visit_i16<E>(self, v: i16) -> Result<Self::Value, E>\n\n where\n", "file_path": "src/elements/timestamp.rs", "rank": 79, "score": 70465.60497449382 }, { "content": "pub trait MetricLabel {\n\n fn label(&self) -> SharedString {\n\n match self.next() {\n\n Either::Right(n) => format!(\"{}::{}\", self.slug(), n.label()).into(),\n\n Either::Left(ls) => format!(\"{}::{}\", self.slug(), ls).into(),\n\n }\n\n }\n\n\n\n fn slug(&self) -> SharedString;\n\n fn next(&self) -> Either<SharedString, Box<&dyn MetricLabel>>;\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum ProctorError {\n\n #[error(\"{0}\")]\n\n CollectionError(#[from] CollectionError),\n\n\n\n #[error(\"{0}\")]\n\n EligibilityError(#[from] EligibilityError),\n\n\n", "file_path": "src/error.rs", "rank": 80, "score": 66067.17964583723 }, { "content": "pub trait ToTelemetry {\n\n #[allow(clippy::wrong_self_convention)]\n\n fn to_telemetry(self) -> TelemetryValue;\n\n}\n\n\n\nimpl<T: Into<TelemetryValue>> ToTelemetry for T {\n\n fn to_telemetry(self) -> TelemetryValue {\n\n self.into()\n\n }\n\n}\n", "file_path": "src/elements/telemetry/to_telemetry.rs", "rank": 81, "score": 64794.602593322954 }, { "content": "pub trait TelemetryCombinator {\n\n fn combine(&self, items: Vec<TelemetryValue>) -> Result<Option<TelemetryValue>, TelemetryError>;\n\n}\n\n\n", "file_path": "src/elements/telemetry/combine.rs", "rank": 82, "score": 63598.18342276449 }, { "content": "// todo: refactor to based on something like Json Schema\n\npub trait SubscriptionRequirements {\n\n fn required_fields() -> HashSet<SharedString>;\n\n\n\n fn optional_fields() -> HashSet<SharedString> {\n\n HashSet::default()\n\n }\n\n}\n\n\n\n#[derive(Clone, Serialize)]\n\npub enum TelemetrySubscription {\n\n All {\n\n name: SharedString,\n\n #[serde(skip)]\n\n outlet_to_subscription: Outlet<Telemetry>,\n\n #[serde(skip)]\n\n update_metrics: Option<Arc<UpdateMetricsFn>>,\n\n },\n\n Explicit {\n\n name: SharedString,\n\n required_fields: HashSet<SharedString>,\n", "file_path": "src/phases/collection/clearinghouse/subscription.rs", "rank": 83, "score": 62471.28421046623 }, { "content": "pub trait FromTelemetry: Sized {\n\n fn from_telemetry(val: TelemetryValue) -> Result<Self, TelemetryError>;\n\n}\n\n\n\nimpl<T> FromTelemetry for T\n\nwhere\n\n T: TryFrom<TelemetryValue>,\n\n <T as TryFrom<TelemetryValue>>::Error: Into<TelemetryError>,\n\n{\n\n fn from_telemetry(telemetry: TelemetryValue) -> Result<Self, TelemetryError> {\n\n T::try_from(telemetry).map_err(|err| err.into())\n\n }\n\n}\n", "file_path": "src/elements/telemetry/from_telemetry.rs", "rank": 84, "score": 61399.26667389096 }, { "content": "#[inline]\n\nfn track_subscriptions(count: usize) {\n\n SUBSCRIPTIONS_GAUGE.set(count as i64);\n\n}\n\n\n", "file_path": "src/phases/collection/clearinghouse.rs", "rank": 85, "score": 60771.11715436868 }, { "content": "#[inline]\n\nfn track_publications(subscription: &str) {\n\n PUBLICATIONS.with_label_values(&[subscription]).inc();\n\n}\n\n\n\npub const SUBSCRIPTION_TIMESTAMP: &str = \"recv_timestamp\";\n\npub const SUBSCRIPTION_CORRELATION: &str = \"correlation_id\";\n\n\n\npub type CorrelationGenerator = ProctorIdGenerator<Telemetry>;\n\n\n\n/// Clearinghouse is a sink for collected telemetry data and a subscription-based source for\n\n/// groups of telemetry fields.\n\npub struct Clearinghouse {\n\n name: SharedString,\n\n subscriptions: Vec<TelemetrySubscription>,\n\n database: Telemetry,\n\n correlation_generator: CorrelationGenerator,\n\n inlet: Inlet<Telemetry>,\n\n tx_api: ClearinghouseApi,\n\n rx_api: mpsc::UnboundedReceiver<ClearinghouseCmd>,\n\n}\n", "file_path": "src/phases/collection/clearinghouse.rs", "rank": 86, "score": 60771.11715436868 }, { "content": "fn benchmark_merge_5(c: &mut Criterion) {\n\n c.bench_function(\"merge_5_fan_in\", move |b| {\n\n let rt = tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap();\n\n b.to_async(rt).iter(|| async {\n\n let (g, rx_sum) = make_graph().await;\n\n black_box(run_scenario(g, rx_sum).await.expect(\"scenario failed\"));\n\n })\n\n });\n\n}\n\n\n\n// criterion_group!(merge, benchmark_merge_5);\n\n\n\ncriterion_group! {\n\n name = merge;\n\n config = Criterion::default().with_profiler(super::super::profiler::FlamegraphProfiler::new(100));\n\n targets = benchmark_merge_5\n\n}\n", "file_path": "benches/benchmarks/merge.rs", "rank": 87, "score": 60022.01395322346 }, { "content": "pub trait SignalFilter<I: Iterator> {\n\n fn peaks<F>(self, detector: SignalDetector, signal: F) -> SignalIterator<I, F>\n\n where\n\n F: FnMut(&I::Item) -> f64;\n\n}\n\n\n\nimpl<I: Iterator> SignalFilter<I> for I {\n\n fn peaks<F>(self, detector: SignalDetector, signal: F) -> SignalIterator<I, F>\n\n where\n\n F: FnMut(&I::Item) -> f64,\n\n {\n\n SignalIterator { source: self, signal, detector }\n\n }\n\n}\n\n\n\nimpl<I, F> Iterator for SignalIterator<I, F>\n\nwhere\n\n I: Iterator,\n\n F: FnMut(&I::Item) -> f64,\n\n{\n", "file_path": "src/elements/signal.rs", "rank": 88, "score": 59558.38478360583 }, { "content": "pub trait PolicySubscription: QueryPolicy {\n\n type Requirements: SubscriptionRequirements;\n\n // todo: once stable: type TemplateData = <Self as QueryPolicy>::TemplateData;\n\n\n\n fn subscription(\n\n &self, name: &str, settings: &PolicySettings<<Self as QueryPolicy>::TemplateData>,\n\n ) -> TelemetrySubscription {\n\n tracing::trace!(\n\n \"policy required_fields:{:?}, optional_fields:{:?}\",\n\n Self::Requirements::required_fields(),\n\n Self::Requirements::optional_fields(),\n\n );\n\n\n\n let subscription = TelemetrySubscription::new(name)\n\n .for_requirements::<Self::Requirements>()\n\n .with_required_fields(settings.required_subscription_fields.clone())\n\n .with_optional_fields(settings.optional_subscription_fields.clone());\n\n\n\n let subscription = self.do_extend_subscription(subscription);\n\n\n", "file_path": "src/elements/policy_filter/policy.rs", "rank": 89, "score": 58204.22248055991 }, { "content": "#[async_trait]\n\npub trait Planning: Debug + Send + Sync {\n\n type Observation: AppData + Clone;\n\n type Decision: AppData + Clone;\n\n type Out: AppData + Clone;\n\n\n\n fn set_outlet(&mut self, outlet: Outlet<Self::Out>);\n\n fn add_observation(&mut self, observation: Self::Observation);\n\n async fn handle_decision(&mut self, decision: Self::Decision) -> Result<Option<Self::Out>, PlanError>;\n\n async fn close(mut self) -> Result<(), PlanError>;\n\n}\n\n\n\npub struct Plan<P: Planning> {\n\n name: SharedString,\n\n planning: P,\n\n inlet: Inlet<P::Observation>,\n\n decision_inlet: Inlet<P::Decision>,\n\n outlet: Outlet<P::Out>,\n\n pub tx_monitor: broadcast::Sender<Arc<Event<P>>>,\n\n}\n\n\n", "file_path": "src/phases/plan.rs", "rank": 90, "score": 56867.77699108186 }, { "content": "#[inline]\n\nfn start_policy_timer(stage: &str) -> HistogramTimer {\n\n POLICY_FILTER_EVAL_TIME.with_label_values(&[stage]).start_timer()\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\npub enum PolicyResult {\n\n Passed,\n\n Blocked,\n\n Failed,\n\n}\n\n\n\nimpl fmt::Display for PolicyResult {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let label = match *self {\n\n Self::Passed => \"passed\",\n\n Self::Blocked => \"blocked\",\n\n Self::Failed => \"failed\",\n\n };\n\n write!(f, \"{}\", label)\n\n }\n", "file_path": "src/elements/policy_filter.rs", "rank": 91, "score": 55870.37869362747 }, { "content": "pub trait QueryPolicy: Debug + Send + Sync {\n\n type Item: ToPolar + Clone;\n\n type Context: ToPolar + Clone;\n\n type Args: ToPolarList;\n\n type TemplateData: Debug + Serialize + DeserializeOwned;\n\n\n\n fn zero_context(&self) -> Option<Self::Context> {\n\n None\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", skip(engine))]\n\n fn load_policy_engine(&mut self, engine: &mut oso::Oso) -> Result<(), PolicyError> {\n\n engine.clear_rules()?;\n\n let source_paths = self.render_policy_sources()?;\n\n engine.load_files(source_paths)?;\n\n Ok(())\n\n }\n\n\n\n #[tracing::instrument(level = \"info\")]\n\n fn render_policy_sources(&self) -> Result<Vec<PolicySourcePath>, PolicyError> {\n", "file_path": "src/elements/policy_filter/policy.rs", "rank": 92, "score": 53848.463408609794 }, { "content": "};\n\nuse proctor::error::{PolicyError, ProctorError};\n\nuse proctor::graph::stage::{self, WithApi, WithMonitor};\n\nuse proctor::graph::{Connect, Graph, SinkShape, SourceShape, UniformFanInShape};\n\nuse proctor::phases::collection::{\n\n self, CorrelationGenerator, SubscriptionRequirements, TelemetrySubscription, SUBSCRIPTION_CORRELATION,\n\n SUBSCRIPTION_TIMESTAMP,\n\n};\n\nuse proctor::phases::policy_phase::PolicyPhase;\n\nuse proctor::ProctorContext;\n\nuse proctor::{AppData, SharedString};\n\nuse serde_test::{assert_tokens, Token};\n\nuse tokio::sync::oneshot;\n\nuse tokio::task::JoinHandle;\n\n\n\n#[derive(PolarClass, Label, Debug, Clone, Serialize, Deserialize)]\n\npub struct Data {\n\n pub input_messages_per_sec: f64,\n\n #[serde(with = \"proctor::serde\")]\n\n pub timestamp: DateTime<Utc>,\n", "file_path": "tests/test_policy_phase.rs", "rank": 93, "score": 51724.91433190508 }, { "content": " })\n\n }\n\n}\n\n\n\n#[derive(PolarClass, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct TestClusterStatus {\n\n #[polar(attribute)]\n\n #[serde(rename = \"cluster.location_code\")]\n\n pub location_code: u32,\n\n\n\n #[polar(attribute)]\n\n #[serde(rename = \"cluster.is_deploying\")]\n\n pub is_deploying: bool,\n\n\n\n #[serde(with = \"proctor::serde\", rename = \"cluster.last_deployment\")]\n\n pub last_deployment: DateTime<Utc>,\n\n}\n\n\n\nimpl TestClusterStatus {\n\n pub fn last_deployment_within_seconds(&self, seconds: i64) -> bool {\n", "file_path": "tests/test_policy_phase.rs", "rank": 94, "score": 51722.49595566433 }, { "content": " maplit::hashset! { \"task.last_failure\".into(), }\n\n }\n\n}\n\n\n\n#[derive(PolarClass, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct TestTaskStatus {\n\n #[serde(default)]\n\n #[serde(\n\n rename = \"task.last_failure\",\n\n serialize_with = \"proctor::serde::date::serialize_optional_datetime_map\",\n\n deserialize_with = \"proctor::serde::date::deserialize_optional_datetime\"\n\n )]\n\n pub last_failure: Option<DateTime<Utc>>,\n\n}\n\n\n\nimpl TestTaskStatus {\n\n pub fn last_failure_within_seconds(&self, seconds: i64) -> bool {\n\n self.last_failure.map_or(false, |last_failure| {\n\n let boundary = Utc::now() - chrono::Duration::seconds(seconds);\n\n boundary < last_failure\n", "file_path": "tests/test_policy_phase.rs", "rank": 95, "score": 51720.95346045493 }, { "content": " pub correlation_id: Id<Data>,\n\n pub inbox_lag: i64,\n\n}\n\n\n\nimpl PartialEq for Data {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.input_messages_per_sec.eq(&other.input_messages_per_sec) && self.inbox_lag.eq(&other.inbox_lag)\n\n }\n\n}\n\n\n\n#[derive(PolarClass, Label, Debug, Clone, Serialize, Deserialize)]\n\npub struct TestPolicyPhaseContext {\n\n pub timestamp: Timestamp,\n\n pub correlation_id: Id<TestPolicyPhaseContext>,\n\n\n\n #[polar(attribute)]\n\n #[serde(flatten)]\n\n pub task_status: TestTaskStatus,\n\n #[polar(attribute)]\n\n #[serde(flatten)]\n", "file_path": "tests/test_policy_phase.rs", "rank": 96, "score": 51718.530363845 }, { "content": " pub cluster_status: TestClusterStatus,\n\n\n\n #[polar(attribute)]\n\n #[serde(flatten)]\n\n pub custom: telemetry::TableValue,\n\n}\n\n\n\n// impl Label for TestPolicyPhaseContext {\n\n// type Labeler = MakeLabeling<Self>;\n\n//\n\n// fn labeler() -> Self::Labeler {\n\n// MakeLabeling::default()\n\n// }\n\n// }\n\n\n\nimpl PartialEq for TestPolicyPhaseContext {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.task_status == other.task_status && self.cluster_status == other.cluster_status\n\n }\n\n}\n", "file_path": "tests/test_policy_phase.rs", "rank": 97, "score": 51706.47565614167 }, { "content": " let main_span = tracing::info_span!(\"test_eligibility_happy_context\");\n\n let _ = main_span.enter();\n\n\n\n #[derive(PolarClass, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\n pub struct MeasurementData {\n\n measurement: f64,\n\n }\n\n\n\n let policy = make_test_policy(&PolicySettings {\n\n required_subscription_fields: HashSet::default(),\n\n optional_subscription_fields: HashSet::default(),\n\n policies: vec![assert_ok!(PolicySource::from_complete_string(\n\n TestPolicyA::<MeasurementData>::base_template_name(),\n\n r##\"eligible(_, context) if context.cluster_status.is_deploying == false;\"##,\n\n ))],\n\n template_data: None,\n\n });\n\n\n\n let mut flow: TestFlow<MeasurementData, TestPolicyPhaseContext, PolicyData> = assert_ok!(\n\n TestFlow::new(\n", "file_path": "tests/test_policy_phase.rs", "rank": 98, "score": 51705.38876870743 }, { "content": " result\n\n }\n\n}\n\n\n\nimpl<T: AppData + ToPolar + Clone> QueryPolicy for TestPolicyA<T> {\n\n type Args = (Self::Item, Self::Context);\n\n type Context = TestPolicyPhaseContext;\n\n type Item = T;\n\n type TemplateData = PolicyData;\n\n\n\n fn base_template_name() -> &'static str {\n\n POLICY_A_TEMPLATE_NAME\n\n }\n\n\n\n fn policy_template_data(&self) -> Option<&Self::TemplateData> {\n\n self.policy_template_data.as_ref()\n\n }\n\n\n\n fn policy_template_data_mut(&mut self) -> Option<&mut Self::TemplateData> {\n\n self.policy_template_data.as_mut()\n", "file_path": "tests/test_policy_phase.rs", "rank": 99, "score": 51705.24294576958 } ]
Rust
src/union_find/client.rs
ccozad/cozad-union-find
3ec2ac39cecf08e6d68977a67f2c7bb02409fc6f
#[cfg(test)] #[path = "client_tests.rs"] mod client_tests; use std::collections::HashMap; #[derive(Hash, Eq, PartialEq, Debug)] struct Node { pub uuid: String, pub parent_index: usize, pub index: usize, pub size: usize } #[derive(Hash, Eq, PartialEq, Debug)] pub struct BulkConnection { pub a: usize, pub b: usize } #[derive(Debug)] pub struct Client { nodes: Vec<Node>, node_map: HashMap<String, usize>, set_count: usize } impl BulkConnection { pub fn new(a: usize, b: usize) -> Self { BulkConnection { a, b } } } impl Client { pub fn new() -> Self { let node_map = HashMap::new(); let mut nodes = Vec::new(); let root_node = Node { uuid: String::from("root"), parent_index: 0, index: 0, size: 0 }; nodes.push(root_node); Client { nodes, node_map, set_count: 0 } } #[allow(dead_code)] pub fn add_node(&mut self, uuid: &str) { if !self.node_exists(uuid) { let node = Node { uuid: String::from(uuid), parent_index: self.nodes.len(), index: self.nodes.len(), size: 1 }; self.node_map.insert(String::from(uuid), node.index); self.nodes.push(node); self.set_count += 1; } } pub fn add_nodes_bulk(&mut self, uuid_list: Vec<String>) { for uuid in uuid_list.iter() { let node = Node { uuid: String::from(uuid), parent_index: self.nodes.len(), index: self.nodes.len(), size: 1 }; self.node_map.insert(String::from(uuid), node.index); self.nodes.push(node); self.set_count += 1; } } #[allow(dead_code)] pub fn connect_nodes(&mut self, uuid_a: &str, uuid_b: &str) { let uuid_a_root = self.find_root_index(uuid_a); let uuid_b_root = self.find_root_index(uuid_b); if uuid_a_root == uuid_b_root { return } else { let node_slice = &mut self.nodes[..]; if node_slice[uuid_a_root].size < node_slice[uuid_b_root].size { node_slice[uuid_a_root].parent_index = uuid_b_root; node_slice[uuid_b_root].size += node_slice[uuid_a_root].size; } else { node_slice[uuid_b_root].parent_index = uuid_a_root; node_slice[uuid_a_root].size += node_slice[uuid_b_root].size; } self.set_count -= 1; } } pub fn connect_nodes_bulk(&mut self, connections: Vec<BulkConnection>) { for connection in connections.iter() { let uuid_a_root = self.find_root_index_bulk(connection.a + 1); let uuid_b_root = self.find_root_index_bulk(connection.b + 1); if uuid_a_root == uuid_b_root { } else { let node_slice = &mut self.nodes[..]; if node_slice[uuid_a_root].size < node_slice[uuid_b_root].size { node_slice[uuid_a_root].parent_index = uuid_b_root; node_slice[uuid_b_root].size += node_slice[uuid_a_root].size; } else { node_slice[uuid_b_root].parent_index = uuid_a_root; node_slice[uuid_a_root].size += node_slice[uuid_b_root].size; } self.set_count -= 1; } } } pub fn disjoint_set_count(&self) -> usize { self.set_count } pub fn find_root_index(&self, uuid: &str) -> usize { let node_index = self.node_index(uuid); if node_index > 0 { let mut node = self.nodes.get(node_index).unwrap(); while node.parent_index != node.index { node = self.nodes.get(node.parent_index).unwrap(); } node.parent_index } else { 0 } } pub fn find_root_index_bulk(&self, node_index: usize) -> usize { let mut node = self.nodes.get(node_index).unwrap(); while node.parent_index != node.index { node = self.nodes.get(node.parent_index).unwrap(); } node.parent_index } #[allow(dead_code)] pub fn nodes_connected(&self, uuid_a: &str, uuid_b: &str) -> bool { let uuid_a_root = self.find_root_index(uuid_a); let uuid_b_root = self.find_root_index(uuid_b); uuid_a_root > 0 && uuid_a_root == uuid_b_root } #[allow(dead_code)] pub fn node_count(&self) -> usize { self.nodes.len() - 1 } pub fn node_exists(&self, uuid: &str) -> bool { let node_uuid = String::from(uuid); self.node_map.contains_key(&node_uuid) } pub fn node_index(&self, uuid: &str) -> usize { let node_uuid = String::from(uuid); if self.node_map.contains_key(&node_uuid) { *self.node_map.get(&node_uuid).unwrap() } else { 0 } } }
#[cfg(test)] #[path = "client_tests.rs"] mod client_tests; use std::collections::HashMap; #[derive(Hash, Eq, PartialEq, Debug)] struct Node { pub uuid: String, pub parent_index: usize, pub index: usize, pub size: usize } #[derive(Hash, Eq, PartialEq, Debug)] pub struct BulkConnection { pub a: usize, pub b: usize } #[derive(Debug)] pub struct Client { nodes: Vec<Node>, node_map: HashMap<String, usize>, set_count: usize } impl BulkConnection { pub fn new(a: usize, b: usize) -> Self { BulkConnection { a, b } } } impl Client { pub fn new() -> Self { let node_map = HashMap::new(); let mut nodes = Vec::new(); let root_node = Node { uuid: String::from("root"), parent_index: 0, index: 0, size: 0 }; nodes.push(root_node); Client { nodes, node_map, set_count: 0 } } #[allow(dead_code)] pub fn add_node(&mut self, uuid: &str) { if !self.node_exists(uuid) { let node = Node { uuid: String::from(uuid), parent_index: self.nodes.len(), index: self.nodes.len(), size: 1 }; self.node_map.insert(String::from(uuid), node.index); self.nodes.push(node); self.set_count += 1; } } pub fn add_nodes_bulk(&mut self, uuid_list: Vec<String>) { for uuid in uuid_list.iter() { let node = Node { uuid: String::from(uuid), parent_index: self.nodes.len(), index: self.nodes.len(), size: 1 }; self.node_map.insert(String::from(uuid), node.index); self.nodes.push(node); self.set_count += 1; } } #[allow(dead_code)] pub fn connect_nodes(&mut self, uuid_a: &str, uuid_b: &str) { let uuid_a_root = self.find_root_index(uuid_a); let uuid_b_root = self.find_root_index(uuid_b); if uuid_a_root == uuid_b_root { return } else { let node_slice = &mut self.nodes[..];
self.set_count -= 1; } } pub fn connect_nodes_bulk(&mut self, connections: Vec<BulkConnection>) { for connection in connections.iter() { let uuid_a_root = self.find_root_index_bulk(connection.a + 1); let uuid_b_root = self.find_root_index_bulk(connection.b + 1); if uuid_a_root == uuid_b_root { } else { let node_slice = &mut self.nodes[..]; if node_slice[uuid_a_root].size < node_slice[uuid_b_root].size { node_slice[uuid_a_root].parent_index = uuid_b_root; node_slice[uuid_b_root].size += node_slice[uuid_a_root].size; } else { node_slice[uuid_b_root].parent_index = uuid_a_root; node_slice[uuid_a_root].size += node_slice[uuid_b_root].size; } self.set_count -= 1; } } } pub fn disjoint_set_count(&self) -> usize { self.set_count } pub fn find_root_index(&self, uuid: &str) -> usize { let node_index = self.node_index(uuid); if node_index > 0 { let mut node = self.nodes.get(node_index).unwrap(); while node.parent_index != node.index { node = self.nodes.get(node.parent_index).unwrap(); } node.parent_index } else { 0 } } pub fn find_root_index_bulk(&self, node_index: usize) -> usize { let mut node = self.nodes.get(node_index).unwrap(); while node.parent_index != node.index { node = self.nodes.get(node.parent_index).unwrap(); } node.parent_index } #[allow(dead_code)] pub fn nodes_connected(&self, uuid_a: &str, uuid_b: &str) -> bool { let uuid_a_root = self.find_root_index(uuid_a); let uuid_b_root = self.find_root_index(uuid_b); uuid_a_root > 0 && uuid_a_root == uuid_b_root } #[allow(dead_code)] pub fn node_count(&self) -> usize { self.nodes.len() - 1 } pub fn node_exists(&self, uuid: &str) -> bool { let node_uuid = String::from(uuid); self.node_map.contains_key(&node_uuid) } pub fn node_index(&self, uuid: &str) -> usize { let node_uuid = String::from(uuid); if self.node_map.contains_key(&node_uuid) { *self.node_map.get(&node_uuid).unwrap() } else { 0 } } }
if node_slice[uuid_a_root].size < node_slice[uuid_b_root].size { node_slice[uuid_a_root].parent_index = uuid_b_root; node_slice[uuid_b_root].size += node_slice[uuid_a_root].size; } else { node_slice[uuid_b_root].parent_index = uuid_a_root; node_slice[uuid_a_root].size += node_slice[uuid_b_root].size; }
if_condition
[ { "content": "#[test]\n\nfn node_index_positive() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n\n\n assert_eq!(1, client.node_index(\"A\"));\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 1, "score": 76115.25809501106 }, { "content": "#[test]\n\nfn node_index_negative() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n\n\n assert_eq!(0, client.node_index(\"foo\"));\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 2, "score": 76115.25809501106 }, { "content": "#[test]\n\nfn add_node() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n\n\n assert_eq!(2, client.nodes.len());\n\n assert_eq!(1, client.node_count());\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 3, "score": 58480.80996098595 }, { "content": "#[test]\n\nfn node_exists_positive() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n\n\n assert_eq!(true, client.node_exists(\"A\"));\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 4, "score": 56760.02115625231 }, { "content": "#[test]\n\nfn connect_nodes_negative() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n client.add_node(\"B\");\n\n client.add_node(\"C\");\n\n client.connect_nodes(\"A\", \"B\");\n\n\n\n assert_eq!(false, client.nodes_connected(\"A\", \"C\"));\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 5, "score": 56760.02115625231 }, { "content": "#[test]\n\nfn node_exists_negative() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n\n\n assert_eq!(false, client.node_exists(\"foo\"));\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 6, "score": 56760.02115625231 }, { "content": "#[test]\n\nfn connect_nodes_positive() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n client.add_node(\"B\");\n\n client.connect_nodes(\"A\", \"B\");\n\n\n\n assert_eq!(true, client.nodes_connected(\"A\", \"B\"));\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 7, "score": 56760.02115625231 }, { "content": "#[test]\n\nfn connect_nodes_bulk() {\n\n let mut client = ufclient::Client::new();\n\n let nodes = vec![\n\n String::from(\"A\"), \n\n String::from(\"B\"), \n\n String::from(\"C\"),\n\n String::from(\"D\"),\n\n String::from(\"E\"),\n\n String::from(\"F\"), \n\n String::from(\"G\"), \n\n String::from(\"H\"), \n\n String::from(\"I\"), \n\n String::from(\"J\")\n\n ];\n\n client.add_nodes_bulk(nodes);\n\n\n\n let connections = vec![\n\n ufconnection { a: 4, b: 3 },\n\n ufconnection { a: 3, b: 8 },\n\n ufconnection { a: 6, b: 5 },\n", "file_path": "src/union_find/client_tests.rs", "rank": 8, "score": 56760.02115625231 }, { "content": "#[test]\n\nfn add_nodes_bulk() {\n\n let mut client = ufclient::Client::new();\n\n let nodes = vec![\n\n String::from(\"A\"), \n\n String::from(\"B\"), \n\n String::from(\"C\"),\n\n String::from(\"D\"),\n\n String::from(\"E\"),\n\n String::from(\"F\"), \n\n String::from(\"G\"), \n\n String::from(\"H\"), \n\n String::from(\"I\"), \n\n String::from(\"J\")\n\n ];\n\n client.add_nodes_bulk(nodes);\n\n\n\n assert_eq!(10, client.node_count());\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 9, "score": 56760.02115625231 }, { "content": "fn convert_connection(line: String) -> BulkConnection {\n\n let connection_raw = line.split_once(\",\");\n\n let connection = connection_raw.unwrap();\n\n let a = connection.0.parse::<usize>().unwrap();\n\n let b = connection.1.parse::<usize>().unwrap();\n\n\n\n BulkConnection::new(a, b)\n\n}", "file_path": "src/main.rs", "rank": 10, "score": 52040.35719168531 }, { "content": "#[test]\n\nfn constructor() {\n\n let client = ufclient::Client::new();\n\n\n\n assert_eq!(1, client.nodes.len());\n\n assert_eq!(0, client.node_map.len());\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 11, "score": 40693.03232580585 }, { "content": "#[test]\n\nfn disjoint_set_count() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n client.add_node(\"B\");\n\n client.add_node(\"C\");\n\n assert_eq!(3, client.disjoint_set_count());\n\n client.connect_nodes(\"A\", \"B\");\n\n assert_eq!(2, client.disjoint_set_count());\n\n client.connect_nodes(\"B\", \"C\");\n\n assert_eq!(1, client.disjoint_set_count());\n\n client.connect_nodes(\"B\", \"C\");\n\n assert_eq!(1, client.disjoint_set_count());\n\n client.connect_nodes(\"A\", \"A\");\n\n assert_eq!(1, client.disjoint_set_count());\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 12, "score": 38446.35435844306 }, { "content": "#[test]\n\nfn duplicate_adds_ignored() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n client.add_node(\"A\");\n\n\n\n assert_eq!(2, client.nodes.len());\n\n assert_eq!(1, client.node_count());\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 13, "score": 38446.35435844306 }, { "content": "#[derive(Parser, Debug)]\n\n#[clap(author, version, about, long_about = None)]\n\nstruct Args {\n\n /// Name of the file with node names by index\n\n #[clap(short, long, value_name = \"FILE\")]\n\n nodes: String,\n\n\n\n /// Name of the file with node connections by index\n\n #[clap(short, long, value_name = \"FILE\")]\n\n connections: String,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 37735.586286748796 }, { "content": "fn main() {\n\n let args = Args::parse();\n\n let mut client = Client::new();\n\n\n\n println!(\"\\nNode File: {}\", args.nodes);\n\n let mut nodes: Vec<String> = vec![];\n\n\n\n let node_file = File::open(args.nodes).unwrap();\n\n let node_reader = BufReader::new(node_file);\n\n\n\n println!(\"Processing nodes file...\");\n\n for line in node_reader.lines() {\n\n nodes.push(line.unwrap());\n\n }\n\n println!(\"Nodes file processed\");\n\n\n\n println!(\"Bulk adding nodes...\");\n\n client.add_nodes_bulk(nodes);\n\n println!(\"Nodes bulk added\");\n\n\n", "file_path": "src/main.rs", "rank": 15, "score": 28533.366996397766 }, { "content": " ufconnection { a: 9, b: 4 },\n\n ufconnection { a: 2, b: 1 },\n\n ufconnection { a: 8, b: 9 },\n\n ufconnection { a: 5, b: 0 },\n\n ufconnection { a: 7, b: 2 },\n\n ufconnection { a: 6, b: 1 },\n\n ufconnection { a: 1, b: 0 },\n\n ufconnection{ a: 6, b: 7 }\n\n ];\n\n client.connect_nodes_bulk(connections);\n\n\n\n assert_eq!(10, client.node_count());\n\n assert_eq!(2, client.disjoint_set_count());\n\n}", "file_path": "src/union_find/client_tests.rs", "rank": 33, "score": 15005.950733698732 }, { "content": "use crate::union_find::client as ufclient;\n\nuse crate::union_find::client::BulkConnection as ufconnection;\n\n\n\n#[test]\n", "file_path": "src/union_find/client_tests.rs", "rank": 34, "score": 15003.900884331593 }, { "content": "## Using the bulk interfaces\n\n\n\nWhen you have a large volume of connections to process you can skip the lookups that occur with named nodes and use the bulk interfaces. The process involves giving a vector of node names and then specifying connections between nodes by index.\n\n\n\n``` rust\n\nextern crate cozad_union_find;\n\nuse cozad_union_find::union_find::client as ufclient;\n\nuse cozad_union_find::union_find::client::BulkConnection as ufconnection;\n\n\n\nfn main() {\n\n\n\n let mut bulk_client = ufclient::Client::new();\n\n let nodes = vec![\n\n String::from(\"A\"), \n\n String::from(\"B\"), \n\n String::from(\"C\"),\n\n String::from(\"D\"),\n\n String::from(\"E\"),\n\n String::from(\"F\"), \n\n String::from(\"G\"), \n\n String::from(\"H\"), \n\n String::from(\"I\"), \n\n String::from(\"J\")\n\n ];\n\n bulk_client.add_nodes_bulk(nodes);\n\n\n\n let connections = vec![\n\n ufconnection { a: 4, b: 3 },\n\n ufconnection { a: 3, b: 8 },\n\n ufconnection { a: 6, b: 5 },\n\n ufconnection { a: 9, b: 4 },\n\n ufconnection { a: 2, b: 1 },\n\n ufconnection { a: 8, b: 9 },\n\n ufconnection { a: 5, b: 0 },\n\n ufconnection { a: 7, b: 2 },\n\n ufconnection { a: 6, b: 1 },\n\n ufconnection { a: 1, b: 0 },\n\n ufconnection{ a: 6, b: 7 }\n\n ];\n\n bulk_client.connect_nodes_bulk(connections);\n\n\n\n println!(\"\\nDisjoint sets found: {}\", bulk_client.disjoint_set_count());\n\n}\n\n```\n\n\n\nOutput\n\n```\n\nDisjoint sets found: 2\n\n```\n\n\n\n## Run as a CLI\n\n\n\n```\n\ncargo build\n\ncd target/debug\n\n./cozad-union-find -n ../../data/nodes_small.txt -c ../../data/connections_small.txt\n\n\n\n```\n\n\n\nExample Output\n\n```\n\nNode File: ../../data/nodes_small.txt\n\nProcessing nodes file...\n\nNodes file processed\n\nBulk adding nodes...\n\nNodes bulk added\n\n\n\nConnections File: ../../data/connections_small.txt\n\nProcessing connections file...\n\nConnections file processed\n\nBulk connecting nodes...\n\nNodes bulk connected\n\n\n\nDisjoint sets found: 2\n\n```\n\n\n\n## Run the tests\n\n\n\n```\n\ncargo test\n\n```\n\n\n", "file_path": "README.md", "rank": 35, "score": 12.370487083503694 }, { "content": "# cozad-union-find\n\nAn implementation of the union-find disjoint set graph algorithm\n\n\n\n![MIT License](https://img.shields.io/github/license/ccozad/cozad-union-find)\n\n\n\n# Quick Start\n\n\n\n## Using the named node interfaces\n\nFor relatively small networks you can simply interact with nodes by name.\n\n\n\n``` rust\n\nextern crate cozad_union_find;\n\nuse cozad_union_find::union_find::client as ufclient;\n\n\n\nfn main() {\n\n let mut client = ufclient::Client::new();\n\n\n\n client.add_node(\"A\");\n\n client.add_node(\"B\");\n\n client.add_node(\"C\");\n\n client.add_node(\"D\");\n\n client.add_node(\"E\");\n\n client.add_node(\"F\");\n\n client.add_node(\"G\");\n\n client.add_node(\"H\");\n\n client.add_node(\"I\");\n\n client.add_node(\"J\");\n\n\n\n\n\n client.connect_nodes(\"E\", \"D\");\n\n client.connect_nodes(\"D\", \"I\");\n\n client.connect_nodes(\"G\", \"F\");\n\n client.connect_nodes(\"J\", \"E\");\n\n client.connect_nodes(\"C\", \"B\");\n\n client.connect_nodes(\"I\", \"J\");\n\n client.connect_nodes(\"F\", \"A\");\n\n client.connect_nodes(\"H\", \"B\");\n\n client.connect_nodes(\"G\", \"B\");\n\n client.connect_nodes(\"B\", \"A\");\n\n client.connect_nodes(\"G\", \"H\");\n\n\n\n println!(\"\\nDisjoint sets found: {}\", client.disjoint_set_count());\n\n}\n\n```\n\n\n\nOutput\n\n```\n\nDisjoint sets found: 2\n\n```\n\n\n", "file_path": "src/README.md", "rank": 36, "score": 10.552311578241357 }, { "content": "## Using the bulk interfaces\n\n\n\nWhen you have a large volume of connections to process you can skip the lookups that occur with named nodes and use the bulk interfaces. The process involves giving a vector of node names and then specifying connections between nodes by index.\n\n\n\n``` rust\n\nextern crate cozad_union_find;\n\nuse cozad_union_find::union_find::client as ufclient;\n\nuse cozad_union_find::union_find::client::BulkConnection as ufconnection;\n\n\n\nfn main() {\n\n\n\n let mut bulk_client = ufclient::Client::new();\n\n let nodes = vec![\n\n String::from(\"A\"), \n\n String::from(\"B\"), \n\n String::from(\"C\"),\n\n String::from(\"D\"),\n\n String::from(\"E\"),\n\n String::from(\"F\"), \n\n String::from(\"G\"), \n\n String::from(\"H\"), \n\n String::from(\"I\"), \n\n String::from(\"J\")\n\n ];\n\n bulk_client.add_nodes_bulk(nodes);\n\n\n\n let connections = vec![\n\n ufconnection { a: 4, b: 3 },\n\n ufconnection { a: 3, b: 8 },\n\n ufconnection { a: 6, b: 5 },\n\n ufconnection { a: 9, b: 4 },\n\n ufconnection { a: 2, b: 1 },\n\n ufconnection { a: 8, b: 9 },\n\n ufconnection { a: 5, b: 0 },\n\n ufconnection { a: 7, b: 2 },\n\n ufconnection { a: 6, b: 1 },\n\n ufconnection { a: 1, b: 0 },\n\n ufconnection{ a: 6, b: 7 }\n\n ];\n\n bulk_client.connect_nodes_bulk(connections);\n\n\n\n println!(\"\\nDisjoint sets found: {}\", bulk_client.disjoint_set_count());\n\n}\n\n```\n\n\n\nOutput\n\n```\n\nDisjoint sets found: 2\n\n```\n\n\n\n# Concepts\n\n - What is a disjoint set?\n\n - Disjoint sets have no items in common between each set\n\n - https://en.wikipedia.org/wiki/Disjoint_sets\n\n - Why would I use this?\n\n - You have a large un-directed graph and you want to find non overlapping sets, such as for\n\n - 2D and 3D Percolation\n\n - Disease exposure\n\n - Contact tracing\n\n - Labeling clusters\n\n - How can I learn more?\n\n - https://algs4.cs.princeton.edu/15uf/\n\n - Purchase access to the full support videos\n\n - Includes detailed coverage of theory, code, and tests\n\n - Coming soon!\n\n\n\n# Support\n\n - How do I request a change?\n\n - Please submit an issue or a pull request\n\n - How fast will my request be added?\n\n - Probably not very fast for requests outside of a support package because this repo is maintained by a working professional\n\n - If you require fast, predictable responses, please purchase a support package\n\n - Can support package be purchased?\n\n - Yes, various support packages can be purchased and customized for your needs. Support areas available include:\n\n - On demand support videos\n\n - 1:1 and team coaching\n\n - New features and other modifications\n", "file_path": "src/README.md", "rank": 37, "score": 10.462704030773686 }, { "content": "# cozad-union-find\n\nA Rust implementation of the union-find disjoint set graph algorithm\n\n\n\n![MIT License](https://img.shields.io/github/license/ccozad/cozad-union-find) ![Build Status](https://img.shields.io/github/workflow/status/ccozad/cozad-union-find/Build) ![Code Size](https://img.shields.io/github/languages/code-size/ccozad/cozad-union-find) ![Top Language](https://img.shields.io/github/languages/top/ccozad/cozad-union-find)\n\n![Crates.io](https://img.shields.io/crates/v/cozad_union_find)\n\n\n\n# Quick Start\n\n\n\n## Instalation\n\nAdd the following to your Cargo.toml file\n\n\n\n```\n\ncozad-union-find = \"1.1.0\"\n\n```\n\n\n\n## Using the named node interfaces\n\nFor relatively small networks you can simply interact with nodes by name.\n\n\n\n``` rust\n\nextern crate cozad_union_find;\n\nuse cozad_union_find::union_find::client as ufclient;\n\n\n\nfn main() {\n\n let mut client = ufclient::Client::new();\n\n\n\n client.add_node(\"A\");\n\n client.add_node(\"B\");\n\n client.add_node(\"C\");\n\n client.add_node(\"D\");\n\n client.add_node(\"E\");\n\n client.add_node(\"F\");\n\n client.add_node(\"G\");\n\n client.add_node(\"H\");\n\n client.add_node(\"I\");\n\n client.add_node(\"J\");\n\n\n\n\n\n client.connect_nodes(\"E\", \"D\");\n\n client.connect_nodes(\"D\", \"I\");\n\n client.connect_nodes(\"G\", \"F\");\n\n client.connect_nodes(\"J\", \"E\");\n\n client.connect_nodes(\"C\", \"B\");\n\n client.connect_nodes(\"I\", \"J\");\n\n client.connect_nodes(\"F\", \"A\");\n\n client.connect_nodes(\"H\", \"B\");\n\n client.connect_nodes(\"G\", \"B\");\n\n client.connect_nodes(\"B\", \"A\");\n\n client.connect_nodes(\"G\", \"H\");\n\n\n\n println!(\"\\nDisjoint sets found: {}\", client.disjoint_set_count());\n\n}\n\n```\n\n\n\nOutput\n\n```\n\nDisjoint sets found: 2\n\n```\n\n\n", "file_path": "README.md", "rank": 38, "score": 10.052783184156327 }, { "content": "use std::fs::File;\n\nuse std::io::{BufReader, BufRead};\n\nuse clap::Parser;\n\nmod union_find;\n\nuse union_find::client::BulkConnection;\n\nuse union_find::client::Client;\n\n\n\n#[derive(Parser, Debug)]\n\n#[clap(author, version, about, long_about = None)]\n", "file_path": "src/main.rs", "rank": 39, "score": 8.159303985753311 }, { "content": "pub mod client;", "file_path": "src/union_find.rs", "rank": 40, "score": 7.519688907846335 }, { "content": " println!(\"\\nConnections File: {}\", args.connections);\n\n let mut connections: Vec<BulkConnection> = vec![];\n\n\n\n let connection_file = File::open(args.connections).unwrap();\n\n let connection_reader = BufReader::new(connection_file);\n\n\n\n println!(\"Processing connections file...\");\n\n for line in connection_reader.lines() {\n\n connections.push(convert_connection(line.unwrap()))\n\n }\n\n println!(\"Connections file processed\");\n\n\n\n println!(\"Bulk connecting nodes...\");\n\n client.connect_nodes_bulk(connections);\n\n println!(\"Nodes bulk connected\");\n\n\n\n println!(\"\\nDisjoint sets found: {}\", client.disjoint_set_count())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 41, "score": 5.579080913010493 }, { "content": "pub mod union_find;\n", "file_path": "src/lib.rs", "rank": 42, "score": 5.036909715643024 }, { "content": "# Concepts\n\n - What is a disjoint set?\n\n - Disjoint sets have no items in common between each set\n\n - https://en.wikipedia.org/wiki/Disjoint_sets\n\n - Why would I use this?\n\n - You have a large un-directed graph and you want to find non overlapping sets, such as for\n\n - 2D and 3D Percolation\n\n - Disease exposure\n\n - Contact tracing\n\n - Labeling clusters\n\n - How can I learn more?\n\n - https://algs4.cs.princeton.edu/15uf/\n\n - Purchase access to the full support videos\n\n - Includes detailed coverage of theory, code, and tests\n\n - Coming soon!\n\n\n\n# Support\n\n - How do I request a change?\n\n - Please submit an issue or a pull request\n\n - How fast will my request be added?\n\n - Probably not very fast for requests outside of a support package because this repo is maintained by a working professional\n\n - If you require fast, predictable responses, please purchase a support package\n\n - Can support package be purchased?\n\n - Yes, various support packages can be purchased and customized for your needs. Support areas available include:\n\n - On demand support videos\n\n - 1:1 and team coaching\n\n - New features and other modifications\n\n\n\n## License\n\n\n\nLicensed under\n\n\n\n - MIT license\n\n ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n", "file_path": "README.md", "rank": 43, "score": 1.4556851017379673 } ]
Rust
parser/src/typing/phase1.rs
thejohncrafter/projets-2020
2c4525f5d241a67663a1f74f2326abd2890c44fa
use std::collections::HashSet; use crate::ast::{Structure, Function, Exp, StaticType}; use super::data::*; use super::visit::IntoVisitor; use super::assign::collect_all_assign; use super::func_signatures::{build_signature, is_callable_with_exactly, format_signature}; fn is_reserved_name(n: &String) -> bool { match n.as_str() { "div" | "print" | "println" => true, _ => false } } impl<'a> IntoVisitor<'a, InternalTypingResult<'a>> for GlobalEnvironmentState<'a> { fn visit_structure(&mut self, s: Structure<'a>) -> InternalTypingResult<'a> { if self.structures.contains_key(&s.name.name) { return Err( (s.span, format!("The ident '{}' is already taken by another structure", s.name.name).to_string()).into()); } self.known_types.insert(StaticType::Struct(s.name.name.clone())); for field in &s.fields { let fname = &field.name.name; if self.all_structure_fields.contains_key(fname) { return Err( (field.span, format!("The field name '{}' is already taken by this structure or another one", fname).to_string()).into() ); } if !self.known_types.contains(&field.ty) { return Err( (field.span, format!("This type is malformed, either it is not a primitive, or it's not this structure itself or another structure declared before").to_string()).into() ); } self.all_structure_fields.insert( fname.to_string().clone(), field.ty.clone() ); self.structure_name_by_fields.insert( fname.to_string().clone(), s.name.name.clone() ); if s.mutable { self.all_mutable_fields.insert(fname.to_string().clone()); } } self.structures.insert(s.name.name.clone(), s); Ok(()) } fn visit_function(&mut self, f: Function<'a>) -> InternalTypingResult<'a> { if is_reserved_name(&f.name) { return Err( (f.span, format!("The ident '{}' is a reserved name, it cannot be used as a function name", f.name).to_string()).into() ); } if !self.known_types.contains(&f.ret_ty) { return Err((f.span, format!("The return type '{}' of '{}' is malformed, either it's not a primitive or a declared structure", f.ret_ty, f.name).to_string()).into()); } let mut names: HashSet<String> = HashSet::new(); for param in &f.params { if names.contains(&param.name.name) { return Err((param.span, format!("The ident '{}' is already taken by another argument", param.name.name).to_string()).into()); } names.insert(param.name.name.clone()); if !self.known_types.contains(&param.ty) { return Err( (param.span, format!("This type is malformed, either it is not a primitive or it's not a declared before structure").to_string()).into() ); } } for sig in self.function_sigs.entry(f.name.clone()).or_default() { if is_callable_with_exactly(f.params.iter().map(|arg| arg.ty.clone()).collect(), &sig) { return Err( (f.span, format!( "The function '{}' has already been defined with the exact same signature ({}), add type annotations to disambiguate or remove duplicates", f.name, format_signature(f.params.into_iter().map(|arg| arg.ty).collect()) ).to_string()).into() ); } } self.function_sigs.entry(f.name.clone()).or_default().push(build_signature(&f)); self.functions.entry(f.name.clone()).or_default().push(f); Ok(()) } fn visit_expression(&mut self, ge: Exp<'a>) -> InternalTypingResult<'a> { self.global_variables.extend(collect_all_assign(&ge).into_iter().map(|l_ident| l_ident.name)); self.global_expressions.push(ge); Ok(()) } }
use std::collections::HashSet; use crate::ast::{Structure, Function, Exp, StaticType}; use super::data::*; use super::visit::IntoVisitor; use super::assign::collect_all_assign; use super::func_signatures::{build_signature, is_callable_with_exactly, format_signature}; fn is_reserved_name(n: &String) -> bool { match n.as_str() { "div" | "print" | "println" => true, _ => false } } impl<'a> IntoVisitor<'a, InternalTypingResult<'a>> for GlobalEnvironmentState<'a> { fn visit_structure(&mut self, s: Structure<'a>) -> InternalTypingResult<'a> { if self.structures.contains_key(&s.name.name) { return Err( (s.span, format!("The ident '{}' is already taken by another structure", s.name.name).to_string()).into()); } self.known_types.insert(StaticType::Struct(s.name.name.clone())); for field in &s.fields { let fname = &field.name.name; if self.all_structure_fields.contains_key(fname) { return Err( (field.span, format!("The field name '{}' is already taken by this structure or another one", fname).to_string()).into() ); } if !self.known_types.contains(&field.ty) { return Err( (field.span, format!("This type is malformed, either it is not a primitive, or it's not this structure itself or another structure declared before").to_string()).into() ); } self.all_structure_fields.insert( fname.to_string().clone(), field.ty.clone() ); self.structure_name_by_fields.insert( fname.to_string().clone(), s.name.name.clone() ); if s.mutable { self.all_mutable_fields.insert(fname.to_string().clone()); } } self.structures.insert(s.name.name.clone(), s); Ok(()) } fn visit_function(&mut self, f: Function<'a>) -> InternalTypingResult<'a> { if is_reserved_name(&f.name) { return Err( (f.span, format!("The ident '{}' is a reserved name, it cannot be used as a function name", f.name).to_string()).into() ); } if !self.known_types.contains(&f.ret_ty) { return Err((f.span, format!("The return type '{}' of '{}' is malformed, either it's not a primitive or a declared structure", f.ret_ty, f.name).to_string()).into()); } let mut names: HashSet<String> = HashSet::new(); for param in &f.params { if names.contains(&param.name.name) { return Err((param.span, format!("The ident '{}' is already taken by another argument", param.name.name).to_string()).into()); } names.insert(param.name.name.clone()); if !self.known_types.contains(&param.ty) { return
; } } for sig in self.function_sigs.entry(f.name.clone()).or_default() { if is_callable_with_exactly(f.params.iter().map(|arg| arg.ty.clone()).collect(), &sig) { return Err( (f.span, format!( "The function '{}' has already been defined with the exact same signature ({}), add type annotations to disambiguate or remove duplicates", f.name, format_signature(f.params.into_iter().map(|arg| arg.ty).collect()) ).to_string()).into() ); } } self.function_sigs.entry(f.name.clone()).or_default().push(build_signature(&f)); self.functions.entry(f.name.clone()).or_default().push(f); Ok(()) } fn visit_expression(&mut self, ge: Exp<'a>) -> InternalTypingResult<'a> { self.global_variables.extend(collect_all_assign(&ge).into_iter().map(|l_ident| l_ident.name)); self.global_expressions.push(ge); Ok(()) } }
Err( (param.span, format!("This type is malformed, either it is not a primitive or it's not a declared before structure").to_string()).into() )
call_expression
[ { "content": "pub fn is_builtin_function(name: &String) -> bool {\n\n match name.as_str() {\n\n \"println\" | \"div\" | \"print\" => true,\n\n _ => false\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TypedDecls<'a> {\n\n pub functions: HashMap<String, Vec<Function<'a>>>,\n\n pub structures: HashMap<String, Structure<'a>>,\n\n pub global_expressions: Vec<Exp<'a>>\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct GlobalEnvironmentState<'a> {\n\n pub structures: HashMap<String, Structure<'a>>,\n\n pub functions: HashMap<String, Vec<Function<'a>>>,\n\n pub function_sigs: HashMap<String, Vec<FuncSignature>>,\n\n pub structure_name_by_fields: HashMap<String, String>,\n", "file_path": "parser/src/typing/data.rs", "rank": 0, "score": 286881.96434900403 }, { "content": "fn type_user_function<'a>(tcx: &mut TypingContext<'a>, span: &Span<'a>, name: &String, args: &mut Vec<Exp<'a>>) -> PartialTypingResult<'a> {\n\n let entity_types: Vec<StaticType>;\n\n\n\n if tcx.structures.contains_key(name) {\n\n entity_types = tcx.structures[name].fields.iter().map(|field| field.ty.clone()).collect();\n\n } else if tcx.functions.contains_key(name) && tcx.functions[name].len() == 1 {\n\n entity_types = tcx.functions[name].first().unwrap().1.clone();\n\n } else {\n\n entity_types = vec![StaticType::Any; args.len()];\n\n }\n\n\n\n for (arg, expected_ty) in args.iter_mut().zip(entity_types.iter()) {\n\n type_expression(tcx, arg)?;\n\n\n\n if !is_compatible(arg.static_ty.clone(), expected_ty.clone()) {\n\n return Err(\n\n (arg.span, format!(\"Incompatible types. Expected '{}', found '{}'\", expected_ty, arg.static_ty).to_string()).into()\n\n );\n\n }\n\n }\n", "file_path": "parser/src/typing/fill.rs", "rank": 2, "score": 276291.0195928105 }, { "content": "fn type_complex_assign<'a>(tcx: &mut TypingContext<'a>, name: &String, span: &Span<'a>, prefix_e: &mut Exp<'a>, e: &mut Exp<'a>) -> InternalTypingResult<'a> {\n\n type_expression(tcx, prefix_e)?;\n\n\n\n // If prefix_e is known, we can check if the field exist.\n\n if !tcx.field_exist_in(&prefix_e.static_ty, name) {\n\n return Err(\n\n (span.clone(), format!(\"Field '{}' does not exist for the type '{}'\", name, prefix_e.static_ty).to_string()).into()\n\n );\n\n }\n\n\n\n // If we do not know the type, we can just assume the static type to be the one which is\n\n // related to the unique structure containing this field if it exist at all.\n\n if prefix_e.static_ty == StaticType::Any {\n\n match tcx.structure_name_by_fields.get(name) {\n\n None => {\n\n return Err(\n\n (span.clone(), format!(\"Field '{}' is not declared anywhere in any structure\", name).to_string()).into());\n\n },\n\n Some(s) => {\n\n prefix_e.static_ty = StaticType::Struct(s.clone());\n", "file_path": "parser/src/typing/fill.rs", "rank": 3, "score": 271910.2186350329 }, { "content": "fn run(file_name: &str, parse_only: bool, _type_only: bool) -> Result<(), String> {\n\n let path = Path::new(file_name);\n\n let display = path.display();\n\n\n\n let mut file = match File::open(&path) {\n\n Err(why) => panic!(\"Couldn't open {} : {}\", display, why),\n\n Ok(file) => file,\n\n };\n\n \n\n let mut s = String::new();\n\n file.read_to_string(&mut s).map_err(|e| e.to_string())?;\n\n\n\n let ast = parse(file_name, &s).map_err(|e| e.to_string())?;\n\n if !parse_only {\n\n let typed_decls = static_type(ast).map_err(|e| e.to_string())?;\n\n\n\n println!(\"{:?}\", typed_decls);\n\n } else {\n\n println!(\"{:?}\", ast);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "parser/src/main.rs", "rank": 4, "score": 223328.1204186511 }, { "content": "fn is_one_of_or_any<'a>(alpha: &'a Exp<'a>, ts: &[StaticType]) -> bool {\n\n if alpha.static_ty == StaticType::Any {\n\n return true;\n\n }\n\n\n\n return ts.into_iter().any(|t| *t == alpha.static_ty)\n\n}\n\n\n", "file_path": "parser/src/typing/fill.rs", "rank": 5, "score": 222908.15112371757 }, { "content": "fn is_any_or<'a>(alpha: &'a Exp<'a>, t: StaticType) -> bool {\n\n return alpha.static_ty == StaticType::Any || alpha.static_ty == t;\n\n}\n\n\n", "file_path": "parser/src/typing/fill.rs", "rank": 6, "score": 197835.62889107323 }, { "content": "pub fn type_expression<'a>(tcx: &mut TypingContext<'a>, expr: &mut Exp<'a>) -> InternalTypingResult<'a> {\n\n match expr.val.as_mut() {\n\n ExpVal::Return(m_e) => {\n\n if let Some(e) = m_e {\n\n type_expression(tcx, e)?;\n\n }\n\n expr.static_ty = StaticType::Any;\n\n },\n\n ExpVal::Assign(lv, e) => {\n\n match lv.in_exp.as_mut() {\n\n None => {\n\n type_simple_assign(tcx, lv, e)?;\n\n },\n\n Some(prefix_e) => {\n\n type_complex_assign(tcx, &lv.name, &lv.span, prefix_e, e)?;\n\n }\n\n }\n\n },\n\n ExpVal::BinOp(op, a, b) => {\n\n type_expression(tcx, a)?;\n", "file_path": "parser/src/typing/fill.rs", "rank": 7, "score": 193759.95216940087 }, { "content": "pub fn type_simple_assign<'a>(tcx: &mut TypingContext<'a>, lv: &mut LValue<'a>, e: &mut Exp<'a>) -> InternalTypingResult<'a> {\n\n type_expression(tcx, e)?;\n\n\n\n match tcx.type_from_env_name(&lv.name) {\n\n None => {\n\n return Err(\n\n (lv.span, format!(\"Compiler error, '{}' was not found in the global typing context, unreachable variable. Environment was {:?}\", &lv.name, tcx.environment).to_string()).into()\n\n );\n\n },\n\n Some(st) => {\n\n if !is_compatible(st.clone(), e.static_ty.clone()) {\n\n return Err(\n\n (e.span, format!(\"Expected on the lhs '{}' type, found: '{}' on the rhs\", st, e.static_ty).to_string()).into()\n\n );\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "parser/src/typing/fill.rs", "rank": 8, "score": 189423.1334649552 }, { "content": "fn extract_labels(f: &Function) -> HashMap<String, usize> {\n\n struct Receiver {\n\n map: HashMap<String, usize>,\n\n next_id: usize,\n\n }\n\n\n\n impl Receiver {\n\n fn new() -> Self {\n\n Receiver {\n\n map: HashMap::new(),\n\n next_id: 0,\n\n }\n\n }\n\n\n\n fn recv_label(&mut self, label: &String) {\n\n if !self.map.contains_key(label) {\n\n self.map.insert(label.clone(), self.next_id);\n\n self.next_id += 1;\n\n }\n\n }\n", "file_path": "ir/src/lir_to_asm.rs", "rank": 9, "score": 182642.2323507421 }, { "content": "fn visit_returns<'a>(e: &Exp<'a>, expected: &StaticType) -> InternalTypingResult<'a> {\n\n \n\n fn visit_else_returns<'a>(else_: &Else<'a>, expected: &StaticType) -> InternalTypingResult<'a> {\n\n match else_.val.as_ref() {\n\n ElseVal::End => {},\n\n ElseVal::Else(b) => {\n\n for e in &b.val {\n\n visit_returns(e, expected)?;\n\n }\n\n },\n\n ElseVal::ElseIf(e, b, rest_) => {\n\n visit_returns(e, expected)?;\n\n for x in &b.val {\n\n visit_returns(x, expected)?;\n\n }\n\n visit_else_returns(&rest_, expected)?;\n\n }\n\n }\n\n\n\n Ok(())\n", "file_path": "parser/src/typing/returns.rs", "rank": 10, "score": 180962.5955697051 }, { "content": "fn fun_name_variants(name: &String, variants: usize) -> Vec<String> {\n\n (0..variants).into_iter().map(|i| format!(\"{}_{}\", name, i)).collect()\n\n}\n\n\n", "file_path": "ir/src/ast_to_hir.rs", "rank": 11, "score": 180250.87683548656 }, { "content": "fn read_file(name: &str) -> Result<String, String> {\n\n let path = Path::new(name);\n\n let display = path.display();\n\n\n\n let mut file = match File::open(&path) {\n\n Err(why) => panic!(\"Couldn't open {} : {}\", display, why),\n\n Ok(file) => file,\n\n };\n\n \n\n let mut s = String::new();\n\n file.read_to_string(&mut s).map_err(|e| e.to_string())?;\n\n\n\n Ok(s)\n\n}\n\n\n", "file_path": "sim/src/main.rs", "rank": 12, "score": 179980.68381266878 }, { "content": "fn read_file(name: &str) -> Result<String, String> {\n\n let path = Path::new(name);\n\n let display = path.display();\n\n\n\n let mut file = match File::open(&path) {\n\n Err(why) => panic!(\"Couldn't open {} : {}\", display, why),\n\n Ok(file) => file,\n\n };\n\n \n\n let mut s = String::new();\n\n file.read_to_string(&mut s).map_err(|e| e.to_string())?;\n\n\n\n Ok(s)\n\n}\n\n\n", "file_path": "ir/src/main.rs", "rank": 13, "score": 179980.68381266878 }, { "content": "fn read_file(name: &str) -> Result<String, String> {\n\n let path = Path::new(name);\n\n let display = path.display();\n\n\n\n let mut file = match File::open(&path) {\n\n Err(why) => panic!(\"Couldn't open for read {} : {}\", display, why),\n\n Ok(file) => file,\n\n };\n\n \n\n let mut s = String::new();\n\n file.read_to_string(&mut s).map_err(|e| e.to_string())?;\n\n\n\n Ok(s)\n\n}\n\n\n", "file_path": "compiler/src/main.rs", "rank": 14, "score": 179980.68381266878 }, { "content": "pub fn verify_implicit_return<'a>(func: &Function<'a>) -> InternalTypingResult<'a> {\n\n if !func.body.trailing_semicolon {\n\n match func.body.val.last() {\n\n None => {\n\n if !is_compatible(func.ret_ty.clone(), StaticType::Nothing) { // Empty body\n\n return Err(\n\n (func.span, format!(\"Empty function '{}' returning `nothing` while '{}' was expected\", func.name, func.ret_ty).to_string()).into());\n\n }\n\n },\n\n Some(last_expr) => {\n\n if !is_compatible(func.ret_ty.clone(), last_expr.static_ty.clone()) {\n\n return Err(\n\n (last_expr.span, format!(\"Invalid type for implicit return in function '{}', expected '{}', found: '{}'\", func.name, func.ret_ty, last_expr.static_ty).to_string()).into());\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(()) // Everything is okay.\n\n}\n", "file_path": "parser/src/typing/returns.rs", "rank": 15, "score": 177834.3961472971 }, { "content": "pub fn parse_and_type_file<'a>(file_name: &'a str, contents: &'a str) -> Result<TypedASTDeclarations<'a>, String> {\n\n static_type(parse(file_name, &contents).map_err(|e| e.to_string())?).map_err(|e| e.to_string())\n\n}\n", "file_path": "parser/src/lib.rs", "rank": 16, "score": 175483.60003067396 }, { "content": "fn parse_names<KW: Parse>(input: ParseStream) -> Result<Vec<(Ident, Type)>> {\n\n input.parse::<KW>()?;\n\n input.parse::<Token![:]>()?;\n\n\n\n let terms;\n\n bracketed!(terms in input);\n\n let terms = terms.parse_terminated::<_, Token![,]>(parse_typed)?;\n\n\n\n Ok(terms.into_iter().collect())\n\n}\n\n\n\npub struct TypesInfo {\n\n pub src_lifetime: Lifetime,\n\n pub span_ty: Type,\n\n}\n\n\n", "file_path": "parsergen/src/parser/input.rs", "rank": 17, "score": 169028.4916606113 }, { "content": "pub fn null(exp: &IRegexp) -> bool {\n\n match exp {\n\n IRegexp::Epsilon => true,\n\n IRegexp::Character(_) => false,\n\n IRegexp::Union(l, r) => null(l) || null(r),\n\n IRegexp::Concat(l, r) => null(l) && null(r),\n\n IRegexp::Star(_) => true,\n\n }\n\n}\n\n\n\n/*\n\n * Computes the FIRST set.\n\n */\n", "file_path": "automata/src/lexer/sets.rs", "rank": 18, "score": 168103.89359286957 }, { "content": "fn parse_typed(input: ParseStream) -> Result<(Ident, Type)> {\n\n let ident = input.parse()?;\n\n input.parse::<Token![:]>()?;\n\n let ty = input.parse()?;\n\n Ok((ident, ty))\n\n}\n\n\n", "file_path": "parsergen/src/parser/input.rs", "rank": 19, "score": 166582.34650933868 }, { "content": "fn verify_return_type<'a>(span: Span<'a>, found: Option<&Exp<'a>>, expected: &StaticType) -> InternalTypingResult<'a> {\n\n match found {\n\n None => {\n\n if expected != &StaticType::Any && expected != &StaticType::Nothing {\n\n Err(\n\n (span, format!(\"Mismatching return types, found nothing, expected: '{}'\", expected).to_string()).into()\n\n )\n\n } else { Ok(()) }\n\n },\n\n Some(expr) => {\n\n if !is_compatible(expr.static_ty.clone(), expected.clone()) {\n\n Err(\n\n (expr.span, format!(\"Mismatching return types, found: '{}', expected: '{}'\", expr.static_ty, expected).to_string()).into()\n\n )\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "parser/src/typing/returns.rs", "rank": 20, "score": 166168.7677925798 }, { "content": "pub fn build_signature(f: &Function) -> FuncSignature {\n\n let (ret, mut params): FuncSignature = (f.ret_ty.clone(), vec![]);\n\n\n\n for param in &f.params {\n\n params.push(param.ty.clone());\n\n }\n\n\n\n (ret, params)\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 21, "score": 162709.9598949015 }, { "content": "// Test if a certain set of StaticType match another signature.\n\n// Useful for ambiguity and duplication detection.\n\npub fn is_callable_with(params: &Vec<StaticType>, target_sig: &FuncSignature) -> bool {\n\n params\n\n .into_iter()\n\n .zip(target_sig.1.iter())\n\n .all(|(param_ty, target_type)| is_compatible(param_ty.clone(), target_type.clone()))\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 22, "score": 158373.3918019133 }, { "content": "pub fn is_callable_with_exactly(params: Vec<StaticType>, target_sig: &FuncSignature) -> bool {\n\n params\n\n .iter()\n\n .zip(target_sig.1.iter())\n\n .all(|(param_ty, target_type)| param_ty == target_type)\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 23, "score": 155813.92354358907 }, { "content": "fn request_input(name: &str, len: u32) -> Vec<bool> {\n\n if len == 1 {\n\n print!(\" {} (1 bit): \", name);\n\n } else {\n\n print!(\" {} ({} bits): \", name, len);\n\n }\n\n\n\n loop {\n\n io::stdout().flush().expect(\"IO error.\");\n\n \n\n let mut input = String::new();\n\n io::stdin()\n\n .read_line(&mut input)\n\n .expect(\" Failed to read line.\");\n\n let input = input.trim();\n\n\n\n if input.chars().all(|c| c == '0' || c == '1') {\n\n if input.len() != len as usize {\n\n if len == 1 {\n\n print!(\" Please enter 1 bit : \");\n", "file_path": "sim/src/main.rs", "rank": 24, "score": 155724.46992823842 }, { "content": "fn write_file(name: &str, contents: &str) -> Result<(), String> {\n\n let path = Path::new(name);\n\n let display = path.display();\n\n let cwd = env::current_dir().map_err(|err| err.to_string())?;\n\n let cwd_display = cwd.display();\n\n\n\n let mut file = match File::create(&path) {\n\n Err(why) => Err(format!(\"Couldn't open for write {} : {} (cwd: {})\", display, why, cwd_display)),\n\n Ok(file) => Ok(file),\n\n }?;\n\n\n\n match file.write_fmt(format_args!(\"{}\", contents)) {\n\n Ok(()) => Ok(()),\n\n Err(e) => Err(format!(\"{}\", e)),\n\n }?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler/src/main.rs", "rank": 25, "score": 155303.84106573678 }, { "content": "fn write_file(name: &str, contents: &str) -> Result<(), String> {\n\n let path = Path::new(name);\n\n let display = path.display();\n\n\n\n let mut file = match File::create(&path) {\n\n Err(why) => Err(format!(\"Couldn't open {} : {}\", display, why)),\n\n Ok(file) => Ok(file),\n\n }?;\n\n\n\n match file.write_fmt(format_args!(\"{}\", contents)) {\n\n Ok(()) => Ok(()),\n\n Err(e) => Err(format!(\"{}\", e)),\n\n }?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ir/src/main.rs", "rank": 26, "score": 155303.84106573678 }, { "content": "fn compile_hir(file_name: &str) -> Result<String, String> {\n\n let s = read_file(file_name)?;\n\n let res = parse_hir(file_name, &s);\n\n\n\n let res = match res {\n\n Ok(res) => res,\n\n Err(e) => return Err(e.to_string())\n\n };\n\n\n\n println!(\"** HIR **\");\n\n println!(\"{}\", res);\n\n\n\n println!();\n\n println!(\"** LIR **\");\n\n let compiled = hir_to_lir(&res).map_err(|e| format!(\"{}\", e))?;\n\n println!(\"{}\", compiled);\n\n \n\n println!();\n\n println!(\"** asm **\"); \n\n let asm = lir_to_asm(&compiled).map_err(|e| format!(\"{}\", e))?;\n\n println!(\"{}\", asm);\n\n\n\n Ok(asm)\n\n}\n\n\n", "file_path": "ir/src/main.rs", "rank": 27, "score": 154118.08532978044 }, { "content": "fn compile_lir(file_name: &str) -> Result<String, String> {\n\n let s = read_file(file_name)?;\n\n let res = parse_lir(file_name, &s);\n\n\n\n let res = match res {\n\n Ok(res) => res,\n\n Err(e) => return Err(e.to_string())\n\n };\n\n\n\n println!(\"** LIR **\");\n\n println!(\"{}\", res);\n\n\n\n println!();\n\n println!(\"** asm **\");\n\n\n\n let compiled = lir_to_asm(&res).map_err(|e| format!(\"{}\", e))?;\n\n println!(\"{}\", compiled);\n\n\n\n Ok(compiled)\n\n}\n\n\n", "file_path": "ir/src/main.rs", "rank": 28, "score": 154118.08532978044 }, { "content": "pub fn is_this_call_ambiguous(args: Vec<StaticType>, functions: &Vec<FuncSignature>) -> bool {\n\n // Functions cannot be empty.\n\n let weights: Vec<i32> = functions.iter()\n\n .filter(|sig| is_callable_with(&args, sig))\n\n .map(|sig| compute_selectivity_weight(&args, sig))\n\n .collect();\n\n let optimal_call_weight = weights.iter().max().unwrap();\n\n\n\n //FIXME: clean me.\n\n //println!(\"[DEBUG] Ambiguous detection, here's the weights: {:?}, optimal call weight: {}\", weights, optimal_call_weight);\n\n //println!(\"[DEBUG] Ambiguous detection, here's the args: {:?}, here's the functions: {:?}\", args, functions);\n\n\n\n optimal_call_weight > &0 && weights.iter().filter(|w| w == &optimal_call_weight).count() > 1\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 29, "score": 154016.03630290698 }, { "content": "fn type_else<'a>(tcx: &mut TypingContext<'a>, else_: &mut Else<'a>) -> PartialTypingResult<'a> {\n\n match else_.val.as_mut() {\n\n ElseVal::End => Ok(StaticType::Nothing),\n\n ElseVal::Else(block) => {\n\n tcx.enter_in_local_scope();\n\n type_block(tcx, block)?;\n\n tcx.restore_previous_scope();\n\n Ok(block.static_ty.clone())\n\n },\n\n ElseVal::ElseIf(e, block, else_) => {\n\n type_expression(tcx, e)?;\n\n tcx.enter_in_local_scope();\n\n type_block(tcx, block)?;\n\n tcx.restore_previous_scope();\n\n let ret = type_else(tcx, else_)?;\n\n if ret == block.static_ty {\n\n Ok(block.static_ty.clone())\n\n } else {\n\n Ok(StaticType::Any)\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "parser/src/typing/fill.rs", "rank": 30, "score": 152712.42081686397 }, { "content": "pub fn type_block<'a>(tcx: &mut TypingContext<'a>, block: &mut Block<'a>) -> InternalTypingResult<'a> {\n\n for exp in &mut block.val {\n\n type_expression(tcx, exp)?;\n\n }\n\n\n\n if block.trailing_semicolon {\n\n block.static_ty = match block.val.last() {\n\n None => StaticType::Nothing,\n\n Some(ret_exp) => ret_exp.static_ty.clone()\n\n };\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "parser/src/typing/fill.rs", "rank": 31, "score": 149662.38469385804 }, { "content": "fn is_native_function(n: &str) -> bool {\n\n match n {\n\n \"pow\" => true,\n\n _ => false\n\n }\n\n}\n\n\n", "file_path": "ir/src/ast_to_hir.rs", "rank": 32, "score": 149312.25410894369 }, { "content": "pub fn is_compatible(a: StaticType, b: StaticType) -> bool {\n\n a == StaticType::Any || b == StaticType::Any || a == b\n\n}\n\n\n", "file_path": "parser/src/typing/data.rs", "rank": 33, "score": 148085.03843851952 }, { "content": "fn build_rules(types_info: &TypesInfo, token_types: &HashMap<String, &Type>, rules: &[Rule]) -> TokenStream {\n\n rules.iter().enumerate().map(|(i, rule)| {\n\n let src_lifetime = &types_info.src_lifetime;\n\n let span_ty = &types_info.span_ty;\n\n let holder_ident = Ident::new(\"Holder\", Span::mixed_site());\n\n let fn_ident = Ident::new(&format!(\"rule_{}\", i + 1), Span::mixed_site());\n\n let fn_return_type = token_types.get(&rule.token.to_string()).unwrap();\n\n let fn_return_variant = Ident::new(&format!(\"{}\", rule.token), Span::mixed_site());\n\n let closure_ident = Ident::new(&format!(\"prod_{}\", i + 1), Span::mixed_site());\n\n let body = &rule.block.contents;\n\n\n\n let span_ident = Ident::new(\"span\", Span::mixed_site());\n\n \n\n let args_decl = rule.expand.iter().filter_map(|(x, ident)| {\n\n if let Some(arg_ident) = x {\n\n let token = ident.to_string();\n\n let ty = token_types.get(&token).unwrap();\n\n let arg_name = Ident::new(&arg_ident.to_string(), Span::mixed_site());\n\n let exp: TokenStream = quote! {\n\n #arg_name: #ty\n", "file_path": "parsergen/src/parser/macro_def.rs", "rank": 34, "score": 146635.85189035867 }, { "content": "fn build_holder(src_lifetime: &Lifetime, tokens: &[&(Ident, Type)]) -> TokenStream {\n\n let holder_ident = Ident::new(\"Holder\", Span::mixed_site());\n\n\n\n let variants = tokens.iter().map(|(ident, ty)| {\n\n let e: TokenStream = (quote! {#ident(#ty)}).into();\n\n e\n\n });\n\n\n\n let intos = tokens.iter().map(|(ident, ty)| {\n\n let into_name = Ident::new(&format!(\"into_{}\", ident), Span::mixed_site());\n\n quote! {\n\n fn #into_name(self) -> #ty {\n\n match self {\n\n #holder_ident::#ident(x) => x,\n\n _ => panic!()\n\n }\n\n } \n\n }\n\n }).flatten().collect::<TokenStream>();\n\n\n", "file_path": "parsergen/src/parser/macro_def.rs", "rank": 35, "score": 146167.46492543363 }, { "content": "fn build_decls(src_lifetime: &Lifetime, tokens: &[(Ident, Type)]) -> TokenStream {\n\n tokens.iter().enumerate().map(|(i, typed)| {\n\n let ident = Ident::new(typed.0.to_string().as_str(), Span::mixed_site());\n\n\n\n let i = i + 1;\n\n let ty = &typed.1;\n\n let holder_ident = Ident::new(\"Holder\", Span::mixed_site());\n\n \n\n quote! {\n\n fn #ident<#src_lifetime>(x: #ty) -> (usize, #holder_ident<#src_lifetime>) {(#i, #holder_ident::#ident(x))}\n\n }\n\n }).flatten().collect()\n\n}\n\n\n", "file_path": "parsergen/src/parser/macro_def.rs", "rank": 36, "score": 146167.46492543363 }, { "content": "pub fn is_behaved(c: char) -> bool {\n\n (c != '\\\\') && (c != '\"') && (c != '\\n')\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]\n\npub enum Character {\n\n Char(char),\n\n Alpha,\n\n Num,\n\n Behaved,\n\n Any,\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum Regexp {\n\n Epsilon,\n\n Character(Character),\n\n Union(Box<Regexp>, Box<Regexp>),\n\n Concat(Box<Regexp>, Box<Regexp>),\n\n Star(Box<Regexp>),\n", "file_path": "automata/src/lexer/types.rs", "rank": 37, "score": 145465.2970360883 }, { "content": "fn run(file_name: &str) -> Result<(), String> {\n\n let s = read_file(file_name)?;\n\n let res = parse_netlist(file_name, &s);\n\n\n\n let res = match res {\n\n Ok(res) => res,\n\n Err(e) => return Err(e.to_string())\n\n };\n\n\n\n let graph = build_graph(res)?;\n\n let list = sort_graph(graph)?;\n\n\n\n let (mut runner, inputs, outputs) = Runner::new(&list);\n\n \n\n for i in 0.. {\n\n println!();\n\n println!(\"Tick #{}\", i);\n\n\n\n if outputs.len() == 0 {\n\n println!(\"No outputs.\");\n", "file_path": "sim/src/main.rs", "rank": 38, "score": 143077.65983114537 }, { "content": "fn test(test_name: &str) -> Result<(), String> {\n\n let netlist_name = &format!(\"{}.net\", test_name);\n\n let input_name = &format!(\"{}.in\", test_name);\n\n let output_name = &format!(\"{}.out\", test_name);\n\n\n\n let netlist = read_file(netlist_name)?;\n\n let input = read_file(input_name)?;\n\n let output = read_file(output_name)?;\n\n\n\n fn read_bits_sequence(src: &str) -> Result<Vec<Vec<Vec<bool>>>, String> {\n\n let mut frames = Vec::new();\n\n let mut lists = Vec::new();\n\n let mut curr_list = Vec::new();\n\n let mut empty_line = true;\n\n\n\n src.chars().try_for_each(|c| -> Result<(), String> {\n\n let mut flush_curr = |curr_list: &mut Vec<_>, lists: &mut Vec<_>| {\n\n if !empty_line {\n\n let mut v = Vec::new();\n\n std::mem::swap(curr_list, &mut v);\n", "file_path": "sim/src/main.rs", "rank": 39, "score": 143077.65983114537 }, { "content": "// We want to know if target improves the precision of the original type.\n\nfn is_valuable_type(origin: Option<&StaticType>, target: Option<&StaticType>) -> bool {\n\n match origin {\n\n None => true,\n\n Some(s) => match target {\n\n None => false,\n\n Some(t) => match (s, t) {\n\n (StaticType::Any, _) => true,\n\n (_, StaticType::Any) => false,\n\n (_, _) => true\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "parser/src/typing/expr.rs", "rank": 40, "score": 141094.66356380342 }, { "content": "pub fn format_signature(ts: Vec<StaticType>) -> String {\n\n ts.into_iter().map(|t| format!(\"::{}\", t)).collect::<Vec<String>>().join(\", \")\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 41, "score": 140051.7902680715 }, { "content": "fn to_env(known: &HashSet<String>) -> EnvironmentMap {\n\n known.into_iter().map(|t| (t.clone(), vec![EnvVariable::init()])).collect()\n\n}\n\n\n", "file_path": "parser/src/typing/main.rs", "rank": 42, "score": 136792.48198754434 }, { "content": "pub fn collect_all_assign<'a>(e: &Exp<'a>) -> AssignationList<'a> {\n\n fn collect_else<'a>(u: &Else<'a>) -> AssignationList<'a> {\n\n match u.val.as_ref() {\n\n ElseVal::End => vec![],\n\n ElseVal::Else(b) => collect_all_assign_in_array(&b.val),\n\n ElseVal::ElseIf(e, b, rest_) => collect_all_assign(&e)\n\n .into_iter()\n\n .chain(collect_all_assign_in_array(&b.val).into_iter())\n\n .chain(collect_else(&rest_).into_iter())\n\n .collect()\n\n }\n\n }\n\n\n\n // Perform a DFS on e to smoke out all Assign\n\n match e.val.as_ref() {\n\n ExpVal::Return(e) => match e {\n\n None => vec![],\n\n Some(e) => collect_all_assign(&e)\n\n },\n\n ExpVal::Assign(lv, e) => {\n", "file_path": "parser/src/typing/assign.rs", "rank": 43, "score": 133245.7658465369 }, { "content": "pub fn collect_all_assign_in_array<'a>(a: &Vec<Exp<'a>>) -> AssignationList<'a> {\n\n a.iter().flat_map(collect_all_assign).collect()\n\n}\n\n\n", "file_path": "parser/src/typing/assign.rs", "rank": 44, "score": 126450.70931784033 }, { "content": "pub fn verify_explicit_returns<'a>(block: &Block<'a>, expected: StaticType) -> InternalTypingResult<'a> {\n\n for e in &block.val {\n\n visit_returns(e, &expected)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "parser/src/typing/returns.rs", "rank": 45, "score": 124385.72396474206 }, { "content": "pub fn match_score(a: &StaticType, b: &StaticType) -> i32 {\n\n if *a == StaticType::Any || *b == StaticType::Any {\n\n 0\n\n } else if *a == *b {\n\n 1\n\n } else {\n\n -1\n\n }\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 46, "score": 121791.50896139527 }, { "content": "fn main() -> Result<(), String> {\n\n let matches = App::new(\"sysnum-2020\")\n\n .version(\"1.0\")\n\n .author(\"Julien Marquet\")\n\n .subcommand(SubCommand::with_name(\"run\")\n\n .about(\"Interactively simulates the given netlist\")\n\n .arg(Arg::with_name(\"input\")\n\n .help(\"The netlist to simulate\")\n\n .required(true)\n\n .index(1)))\n\n .subcommand(SubCommand::with_name(\"test\")\n\n .about(\"Runs the given test (see folder tests/)\")\n\n .arg(Arg::with_name(\"input\")\n\n .help(\"The netlist to simulate\")\n\n .required(true)\n\n .index(1)))\n\n .get_matches();\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"run\") {\n\n let file_name = matches.value_of(\"input\").unwrap();\n\n run(file_name)?;\n\n } else if let Some(matches) = matches.subcommand_matches(\"test\") {\n\n let test_name = matches.value_of(\"input\").unwrap();\n\n test(test_name)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "sim/src/main.rs", "rank": 47, "score": 121397.41274155145 }, { "content": "fn parse_rule_lhs(input: ParseStream) -> Result<(Ident, Vec<(Option<Ident>, Ident)>)> {\n\n let token = input.parse::<Ident>()?;\n\n input.parse::<Token![->]>()?;\n\n \n\n let mut expand = Vec::new();\n\n\n\n while !input.is_empty() {\n\n let ident = input.parse::<Ident>()?;\n\n \n\n let e = if input.peek(Token![:]) {\n\n input.parse::<Token![:]>()?;\n\n let name = input.parse::<Ident>()?;\n\n (Some(ident), name)\n\n } else {\n\n (None, ident)\n\n };\n\n\n\n expand.push(e)\n\n }\n\n\n", "file_path": "parsergen/src/parser/input.rs", "rank": 48, "score": 120961.31733636867 }, { "content": "fn from_static_type(s: StaticType) -> Option<hir::Type> {\n\n match s {\n\n StaticType::Any => None,\n\n StaticType::Nothing => Some(hir::Type::Nothing),\n\n StaticType::Int64 => Some(hir::Type::Int64),\n\n StaticType::Bool => Some(hir::Type::Bool),\n\n StaticType::Str => Some(hir::Type::Str),\n\n StaticType::Struct(s) => Some(hir::Type::Struct(s))\n\n }\n\n}\n\n\n", "file_path": "ir/src/ast_to_hir.rs", "rank": 49, "score": 118666.72747193644 }, { "content": "fn check_tokens(tokens: &[&Ident]) -> Result<()> {\n\n tokens.iter().enumerate().try_for_each(|(i, ident)| {\n\n if tokens[0..i].iter().any(|other| other.to_string() == ident.to_string()) {\n\n Err(Error::new(ident.span(), \"Already defined.\"))\n\n } else {\n\n Ok(())\n\n }\n\n })\n\n}\n\n\n", "file_path": "parsergen/src/parser/macro_def.rs", "rank": 50, "score": 111215.0813085041 }, { "content": "pub fn last(exp: &IRegexp) -> CSet {\n\n match exp {\n\n IRegexp::Epsilon => CSet::new(),\n\n IRegexp::Character(c) => {\n\n let mut set = CSet::new();\n\n set.insert(c.clone());\n\n set\n\n },\n\n IRegexp::Union(l, r) => CSet::union(&last(l), &last(r)).cloned().collect(),\n\n IRegexp::Concat(l, r) => {\n\n if null(r) {\n\n CSet::union(&last(l), &last(r)).cloned().collect()\n\n } else {\n\n last(r)\n\n }\n\n },\n\n IRegexp::Star(e) => last(e),\n\n }\n\n}\n\n\n\n/*\n\n * Computes the FOLLOW set.\n\n */\n", "file_path": "automata/src/lexer/sets.rs", "rank": 51, "score": 108031.03741497821 }, { "content": "pub fn first(exp: &IRegexp) -> CSet {\n\n match exp {\n\n IRegexp::Epsilon => CSet::new(),\n\n IRegexp::Character(c) => {\n\n let mut set = CSet::new();\n\n set.insert(c.clone());\n\n set\n\n },\n\n IRegexp::Union(l, r) => CSet::union(&first(l), &first(r)).cloned().collect(),\n\n IRegexp::Concat(l, r) => {\n\n if null(l) {\n\n CSet::union(&first(l), &first(r)).cloned().collect()\n\n } else {\n\n first(l)\n\n }\n\n },\n\n IRegexp::Star(e) => first(e),\n\n }\n\n}\n\n\n\n/*\n\n * Coputes the LAST set.\n\n */\n", "file_path": "automata/src/lexer/sets.rs", "rank": 52, "score": 108031.03741497821 }, { "content": "pub fn compute_selectivity_weight(params: &Vec<StaticType>, target_sig: &FuncSignature) -> i32 {\n\n params\n\n .iter()\n\n .zip(target_sig.1.iter())\n\n .map(|(param_ty, target_ty)| match_score(param_ty, target_ty))\n\n .sum()\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 53, "score": 105667.35745370695 }, { "content": "pub fn compute_ambiguous_signature(args: Vec<StaticType>, functions: &Vec<FuncSignature>) -> Option<Vec<StaticType>> {\n\n // The idea is simple\n\n // If an ambiguous signature exist, then it must be a (f_i, f_j) pair such that for all type in\n\n // the signature t_k^(i) is compatible with t_k^(j) *and* such that f_i is callable with args\n\n // and f_j callable with args.\n\n // we return the signature enriched from the args, that is, each time there is Any and we have\n\n // a more valuable type, we replace it.\n\n // So we just do O(N^2 S) to find such a sig and compute it.\n\n\n\n let n = functions.len();\n\n\n\n for i in 0..n {\n\n for j in i + 1..n {\n\n let f_i = &functions[i];\n\n let f_j = &functions[j];\n\n\n\n if args\n\n .iter()\n\n .zip(f_i.1.iter()).zip(f_j.1.iter())\n\n .all(|((arg, param_i), param_j)| is_compatible(param_i.clone(), param_j.clone()) && is_compatible(arg.clone(), param_i.clone()) && is_compatible(arg.clone(), param_j.clone())) {\n\n return Some(compute_enriched_signature(args, f_i.1.iter().cloned().collect(), f_j.1.iter().cloned().collect()));\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 54, "score": 104406.74189035442 }, { "content": "fn raise_no_such_operation_err<'a, T: fmt::Display>(span: Span<'a>, op: T, ts: Vec<&StaticType>) -> InternalTypingResult<'a> {\n\n Err((span, format!(\n\n \"No such operation '{}' for signature ({})\", \n\n op,\n\n format_signature(ts.into_iter().cloned().collect())\n\n )).into())\n\n}\n\n\n", "file_path": "parser/src/typing/fill.rs", "rank": 55, "score": 102379.84482154073 }, { "content": "pub fn follow(c: &IChar, exp: &IRegexp) -> CSet {\n\n match exp {\n\n IRegexp::Epsilon | IRegexp::Character(_) => CSet::new(),\n\n IRegexp::Union(l, r) => CSet::union(&follow(c, l), &follow(c, r)).cloned().collect(),\n\n IRegexp::Concat(l, r) => {\n\n let follow_l = follow(c, l);\n\n let follow_r = follow(c, r);\n\n let u = CSet::union(&follow_l, &follow_r);\n\n \n\n if last(l).contains(&c) {\n\n u.chain(first(r).iter()).cloned().collect()\n\n } else {\n\n u.cloned().collect()\n\n }\n\n },\n\n IRegexp::Star(e) => {\n\n if last(e).contains(&c) {\n\n CSet::union(&follow(c, e), &first(e)).cloned().collect()\n\n } else {\n\n follow(c, e)\n\n }\n\n },\n\n }\n\n}\n\n\n", "file_path": "automata/src/lexer/sets.rs", "rank": 56, "score": 101727.7741907083 }, { "content": "pub fn lir_to_asm(source: &Source) -> Result<String, Error> {\n\n let mut s = String::new();\n\n let asm = &mut s;\n\n \n\n let global = GlobalRegistry::new(&source.globals);\n\n let mut fn_ids = HashMap::new();\n\n let mut reg = StringRegistry::new();\n\n\n\n source.functions.iter().enumerate().try_for_each(|(i, f)| {\n\n if fn_ids.contains_key(&f.name) {\n\n Err(format!(\"[LIR] Function \\\"{}\\\" is not uniquely defined.\", f.name))\n\n } else {\n\n fn_ids.insert(f.name.clone(), i);\n\n Ok(())\n\n }\n\n })?;\n\n\n\n let main_id = match fn_ids.get(\"main\") {\n\n Some(id) => id,\n\n None => Err(\"[LIR] No \\\"main\\\" function !\".to_string())?\n", "file_path": "ir/src/lir_to_asm.rs", "rank": 57, "score": 99739.34125545999 }, { "content": "pub fn build_graph(netlist: Netlist) -> Result<OpsGraph, String> {\n\n // Maps the variable names to :\n\n // * their identifier\n\n // * their address\n\n // * their length\n\n let mut addresses: HashMap<&str, (usize, usize, u32)> = HashMap::new();\n\n let mut mem_size: usize = 0;\n\n\n\n netlist.vars.iter().enumerate().try_for_each(|(id, (name, ty))| {\n\n let len = match ty {\n\n ValueType::Bit => 1,\n\n ValueType::BitArray(k) => *k\n\n // We checked during parsing that k != 0.\n\n };\n\n \n\n let prev = addresses.insert(name, (id, mem_size, len));\n\n mem_size += len as usize;\n\n\n\n if prev.is_some() {\n\n Err(format!(\"Variable \\\"{}\\\" already declared.\", name))\n", "file_path": "sim/src/build_graph.rs", "rank": 58, "score": 98181.80711021033 }, { "content": "fn compile_return(\n\n _global: &GlobalRegistry,\n\n local: &LocalRegistry,\n\n a: &hir::Val,\n\n) -> Result<Vec<lir::Statement>, Error> {\n\n let data = local.compile_val(a)?;\n\n\n\n Ok(vec!(lir::Statement::Inst(\n\n lir::Instruction::Return(data.ty, data.val)\n\n )))\n\n}\n\n\n", "file_path": "ir/src/hir_to_lir.rs", "rank": 59, "score": 97438.8493152371 }, { "content": "pub fn sort_graph(graph: OpsGraph) -> Result<OpsList, String> {\n\n let len = graph.edges.len();\n\n let mut visitor = Visitor::new(graph.edges);\n\n\n\n (0..len).try_for_each(|id| visitor.visit(id))\n\n .map_err(|_| \"Circular dependency in variables.\".to_string())?;\n\n\n\n Ok(OpsList {\n\n mem_size: graph.mem_size,\n\n inputs: graph.inputs,\n\n outputs: graph.outputs,\n\n mems: graph.mems,\n\n ops: visitor.sorted,\n\n mem_ops: graph.mem_ops,\n\n })\n\n}\n\n\n", "file_path": "sim/src/sort_graph.rs", "rank": 60, "score": 96701.71437213234 }, { "content": "fn next_state(exp: &IRegexp, s: &CSet, c: &Character) -> CSet {\n\n fn is_in(c: &Character, c1: &Character) -> bool {\n\n match (c, c1) {\n\n (Character::Char(a), Character::Char(b)) => a == b,\n\n (Character::Char(a), Character::Alpha) => a.is_ascii_alphabetic(),\n\n (Character::Char(a), Character::Num) => a.is_ascii_digit(),\n\n (Character::Char(a), Character::Behaved) => is_behaved(*a),\n\n (Character::Alpha, Character::Alpha) => true,\n\n (Character::Num, Character::Num) => true,\n\n (Character::Alpha, Character::Behaved) => true,\n\n (Character::Num, Character::Behaved) => true,\n\n (Character::Behaved, Character::Behaved) => true,\n\n (_, Character::Any) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n s.iter().filter(|ci| {\n\n match ci {\n\n IChar::Char(c1, _) => is_in(&c, c1),\n\n _ => false\n\n }\n\n }).flat_map(|ci| {\n\n follow(ci, exp).into_iter()\n\n }).collect()\n\n}\n\n\n", "file_path": "automata/src/lexer/building.rs", "rank": 61, "score": 96582.51416427435 }, { "content": "fn static_type_from_str(s: &str) -> StaticType {\n\n match s {\n\n \"Any\" => StaticType::Any,\n\n \"Nothing\" => StaticType::Nothing,\n\n \"Int64\" => StaticType::Int64,\n\n \"Bool\" => StaticType::Bool,\n\n \"String\" => StaticType::Str,\n\n _ => StaticType::Struct(s.to_string())\n\n }\n\n}\n\n\n\nimpl fmt::Display for StaticType {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n StaticType::Any => write!(f, \"Any\"),\n\n StaticType::Nothing => write!(f, \"Nothing\"),\n\n StaticType::Int64 => write!(f, \"Int64\"),\n\n StaticType::Bool => write!(f, \"Bool\"),\n\n StaticType::Str => write!(f, \"String\"),\n\n StaticType::Struct(s) => write!(f, \"Structure '{}'\", s)\n", "file_path": "parser/src/ast.rs", "rank": 62, "score": 95565.00146002659 }, { "content": "// Assumes that s1 is compatible with s2.\n\nfn most_precise_type(s1: StaticType, s2: StaticType) -> StaticType {\n\n assert!(is_compatible(s1.clone(), s2.clone()), \"Cannot compute most precise type on non-compatible types!\");\n\n\n\n match s1 {\n\n StaticType::Any => s2,\n\n _ => s1\n\n }\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 63, "score": 95527.85190574187 }, { "content": "pub fn static_type<'a>(decls: Vec<Decl<'a>>) -> TypingResult<'a> {\n\n // Step 1. Build the global environment.\n\n let mut global_state: GlobalEnvironmentState<'a> = GlobalEnvironmentState::init();\n\n // Walk over declarations for phase 1-typing.\n\n for decl in decls {\n\n global_state.visit_decl(decl)?; // It will consume decl forever.\n\n }\n\n\n\n // Prepare for the global environment.\n\n let mut environment = to_env(&global_state.global_variables);\n\n\n\n // Add nothing: Nothing in the future environment.\n\n environment\n\n .entry(\"nothing\".to_string())\n\n .or_default()\n\n .push(EnvVariable::typed(StaticType::Nothing));\n\n\n\n // Step 2.\n\n // Iterate over all declarations.\n\n // Looks like déjà vu. :>\n", "file_path": "parser/src/typing/main.rs", "rank": 64, "score": 91198.83621323542 }, { "content": "fn build_states(exp: &IRegexp) -> Vec<(BTreeMap<Character, usize>, Option<usize>)> {\n\n struct Ctx<'a> {\n\n exp: &'a IRegexp,\n\n states_trans: BTreeMap<State, (usize, Option<usize>, TransMap)>,\n\n next_i: usize\n\n }\n\n\n\n impl Ctx<'_> {\n\n fn visit(&mut self, s: &State) {\n\n if self.states_trans.contains_key(s) {\n\n return\n\n }\n\n\n\n let id = self.next_i;\n\n self.states_trans.insert(s.clone(), (id, None, TransMap::new()));\n\n self.next_i += 1;\n\n let mut trans_map = TransMap::new();\n\n let mut accept = None;\n\n\n\n s.iter().map(|ic| {\n", "file_path": "automata/src/lexer/building.rs", "rank": 65, "score": 90589.46059042765 }, { "content": "fn parse_types_info(input: ParseStream) -> Result<TypesInfo> {\n\n input.parse::<kw::src_lifetime>()?;\n\n input.parse::<Token![:]>()?;\n\n let src_lifetime = input.parse()?;\n\n\n\n input.parse::<kw::span>()?;\n\n input.parse::<Token![:]>()?;\n\n let span_ty = input.parse()?;\n\n\n\n Ok(TypesInfo {\n\n src_lifetime,\n\n span_ty,\n\n })\n\n}\n\n\n\npub struct TokensInput {\n\n pub body: HookedContents,\n\n}\n\n\n", "file_path": "parsergen/src/parser/input.rs", "rank": 66, "score": 88093.14503836245 }, { "content": "pub fn typed_ast_to_hir(t_ast: TypedDecls) -> HIRSourceResult {\n\n let mut compiled = Vec::new();\n\n\n\n for s in t_ast.structures.values() {\n\n compiled.push(hir::Decl::Struct(emit_struct_decl(s)?));\n\n }\n\n\n\n let mut emitter = Emitter::init(t_ast.structures.keys().cloned().collect());\n\n\n\n // print, println\n\n compiled.extend(emitter.emit_core_declarations()?);\n\n\n\n // generate entrypoint based on the global expressions, where all variables *are global*.\n\n let (globals, fun) = emitter.emit_entrypoint(\n\n t_ast.functions.iter().flat_map(|(name, f_s)| fun_name_variants(name, f_s.len())).collect(),\n\n t_ast.global_expressions\n\n )?;\n\n\n\n // generate dynamic dispatch thunk.\n\n for (name, f_s) in t_ast.functions {\n\n compiled.extend(emitter.emit_dynamic_dispatch(&name, &f_s)?);\n\n }\n\n\n\n let entrypoint_name = fun.name.clone();\n\n\n\n compiled.push(hir::Decl::Function(fun));\n\n \n\n Ok(hir::Source::new(globals, entrypoint_name, compiled))\n\n}\n", "file_path": "ir/src/ast_to_hir.rs", "rank": 67, "score": 86687.34235351077 }, { "content": "fn build_iregexp(exps: &[Regexp]) -> IRegexp {\n\n struct Ctx {\n\n i: usize\n\n }\n\n\n\n impl Ctx {\n\n fn visit(&mut self, exp: &Regexp) -> IRegexp {\n\n match exp {\n\n Regexp::Epsilon => IRegexp::Epsilon,\n\n Regexp::Character(c) => {\n\n let e = IRegexp::Character(IChar::Char(c.clone(), self.i));\n\n self.i += 1;\n\n e\n\n },\n\n Regexp::Union(l, r) => IRegexp::Union(\n\n Box::new(self.visit(l)),\n\n Box::new(self.visit(r)),\n\n ),\n\n Regexp::Concat(l, r) => IRegexp::Concat(\n\n Box::new(self.visit(l)),\n", "file_path": "automata/src/lexer/building.rs", "rank": 68, "score": 86531.04613587372 }, { "content": "fn compute_enriched_signature(sig_1: Vec<StaticType>, sig_2: Vec<StaticType>, sig_3: Vec<StaticType>) -> Vec<StaticType> {\n\n sig_1.into_iter().zip(sig_2.into_iter()).zip(sig_3.into_iter()).map(|((s1, s2), s3)| most_precise_type(most_precise_type(s1, s2), s3)).collect()\n\n}\n\n\n", "file_path": "parser/src/typing/func_signatures.rs", "rank": 69, "score": 83300.28050071116 }, { "content": "fn emit_struct_decl(s: &Structure) -> HIRStructDeclResult {\n\n Ok(hir::StructDecl::new(s.name.name.clone(),\n\n s.fields.iter().map(|f| f.name.name.clone()).collect()\n\n ))\n\n}\n\n\n", "file_path": "ir/src/ast_to_hir.rs", "rank": 70, "score": 81456.92530318236 }, { "content": " }\n\n\n\n match e.val.as_ref() {\n\n ExpVal::Return(r) => verify_return_type(e.span, r.as_ref(), expected),\n\n ExpVal::Assign(_, e) => visit_returns(e, expected),\n\n ExpVal::BinOp(_, a, b) => {\n\n visit_returns(&a, expected)?;\n\n visit_returns(&b, expected)?;\n\n\n\n Ok(())\n\n },\n\n ExpVal::UnaryOp(_, e) => visit_returns(e, expected),\n\n ExpVal::Call(_, e_s) => {\n\n for e in e_s {\n\n visit_returns(e, expected)?;\n\n }\n\n Ok(())\n\n },\n\n ExpVal::Block(b) | ExpVal::LMul(_, b) | ExpVal::For(_, _, b) => {\n\n for e in &b.val {\n", "file_path": "parser/src/typing/returns.rs", "rank": 71, "score": 77348.36427568994 }, { "content": " visit_returns(e, expected)?;\n\n }\n\n Ok(())\n\n },\n\n ExpVal::RMul(e, _) => visit_returns(e, expected),\n\n ExpVal::If(e, b, else_branch) => {\n\n visit_returns(e, expected)?;\n\n for x in &b.val {\n\n visit_returns(x, expected)?;\n\n }\n\n visit_else_returns(else_branch, expected)?;\n\n Ok(())\n\n },\n\n ExpVal::While(e, b) => {\n\n visit_returns(e, expected)?;\n\n for x in &b.val {\n\n visit_returns(x, expected)?;\n\n }\n\n Ok(())\n\n },\n\n _ => Ok(())\n\n }\n\n}\n\n\n", "file_path": "parser/src/typing/returns.rs", "rank": 72, "score": 77343.58047186268 }, { "content": "use super::data::*;\n\nuse crate::ast::*;\n\nuse automata::line_counter::Span;\n\n\n", "file_path": "parser/src/typing/returns.rs", "rank": 73, "score": 77334.2195058851 }, { "content": "#define TYPE_BOOL 3\n", "file_path": "ir/runtime.c", "rank": 74, "score": 72450.11089918284 }, { "content": "void native_print_bool(int64_t *ret_ty, int64_t *ret_val, int64_t ty, int64_t val) {\n\n if(ty != TYPE_BOOL) {\n\n fprintf(stderr, \"Expected a Bool for print_bool.\\n\");\n\n exit(1);\n\n }\n\n\n\n if(val)\n\n printf(\"true\");\n\n else\n\n printf(\"false\");\n\n\n\n *ret_ty = TYPE_NOTHING;\n\n *ret_val = 0;\n", "file_path": "ir/runtime.c", "rank": 75, "score": 71278.49566519671 }, { "content": "void native_print_string(int64_t *ret_ty, int64_t *ret_val, int64_t ty, char* val) {\n\n if(ty != TYPE_STR) {\n\n fprintf(stderr, \"Expected a Str for print_string.\\n\");\n\n exit(1);\n\n }\n\n\n\n printf(\"%s\", val);\n\n\n\n *ret_ty = TYPE_NOTHING;\n\n *ret_val = 0;\n", "file_path": "ir/runtime.c", "rank": 76, "score": 71180.67479460214 }, { "content": "fn compile_fn(\n\n global: &GlobalRegistry,\n\n f: &hir::Function\n\n) -> Result<lir::Function, Error> {\n\n let vars_and_params: Vec<String> = f.vars.iter().chain(f.args.iter()).cloned().collect();\n\n let mut local = LocalRegistry::new(&global, &vars_and_params);\n\n let mut lbl_gen = LabelGenerator::new();\n\n\n\n let mut body = Vec::new();\n\n f.vars.iter()\n\n .try_for_each(|name| -> Result<(), Error> {\n\n body.push(lir::Statement::Inst(lir::Instruction::Mov(\n\n local.get_var(name)?.ty_name.clone(), lir::Val::Const(0)\n\n )));\n\n Ok(())\n\n })?;\n\n\n\n let mut statements = compile_block(&mut lbl_gen, &global, &mut local, &f.body)?;\n\n body.append(&mut statements);\n\n\n", "file_path": "ir/src/hir_to_lir.rs", "rank": 77, "score": 70135.90620543284 }, { "content": "fn fn_to_asm(\n\n asm: &mut String,\n\n reg: &mut StringRegistry,\n\n fn_ids: &HashMap<String, usize>,\n\n global: &GlobalRegistry,\n\n f: &Function,\n\n id: usize\n\n) -> Result<(), Error> {\n\n let label_ids = extract_labels(f);\n\n let local = LocalRegistry::new(global, &f.vars);\n\n let var_count = local.get_var_count();\n\n let frame_size = 8 * if var_count % 2 == 0 {var_count} else {var_count + 1};\n\n\n\n // Declare function\n\n writeln!(asm, \"fn_{}:\", id)?;\n\n\n\n // Create new frame\n\n writeln!(asm, \"\\tpushq %rbp\")?;\n\n writeln!(asm, \"\\tmovq %rsp, %rbp\")?;\n\n writeln!(asm, \"\\tsubq ${}, %rsp\", frame_size)?;\n", "file_path": "ir/src/lir_to_asm.rs", "rank": 78, "score": 70135.90620543284 }, { "content": "pub fn parse<'a>(file_name: &'a str, contents: &'a str) -> Result<Vec<Decl<'a>>, ReadError<'a>> {\n\n let chars = LineIter::new(contents);\n\n let input = IndexedString::new(file_name, contents);\n\n\n\n fn parse_i64(text: &str) -> Result<i64, String> {\n\n text.parse().map_err(|_| \"This number does not fit in 64 bits.\".to_string())\n\n }\n\n\n\n enum IdentOrKeyword {\n\n Ident(String),\n\n Keyword(Keyword),\n\n }\n\n\n\n impl IdentOrKeyword {\n\n fn expect_ident(self) -> Result<String, String> {\n\n use IdentOrKeyword::*;\n\n\n\n match self {\n\n Ident(id) => Ok(id),\n\n Keyword(_) => Err(\"Expected an identifier, found a keyword.\".to_string())\n", "file_path": "parser/src/parse.rs", "rank": 79, "score": 68630.95420156358 }, { "content": "pub fn parse_lir<'a>(file_name: &'a str, contents: &'a str) -> Result<Source, ReadError<'a>> {\n\n let chars = LineIter::new(contents);\n\n let input = IndexedString::new(file_name, contents);\n\n\n\n fn parse_i64(text: &str) -> Result<i64, String> {\n\n text.parse().map_err(|_| \"This number does not fit in 64 bits.\".to_string())\n\n }\n\n\n\n macro_rules! punct {\n\n ($variant:ident) => {Ok(Some(Token::Punct(Punct::$variant)))};\n\n }\n\n\n\n let dfa: DFA<LineIter, IndexedString, Option<Token>, ReadError> = lex! {\n\n chars: {chars}\n\n input: {&input}\n\n\n\n ((' ' | '\\t' | '\\n') & (' ' | '\\t' | '\\n')*) => {\n\n Ok(None)\n\n },\n\n ('#' & behaved* & '\\n') => {\n", "file_path": "ir/src/lir/parsing.rs", "rank": 80, "score": 68243.45647202464 }, { "content": "pub fn parse_hir<'a>(file_name: &'a str, contents: &'a str) -> Result<Source, ReadError<'a>> {\n\n let chars = LineIter::new(contents);\n\n let input = IndexedString::new(file_name, contents);\n\n\n\n fn parse_i64(text: &str) -> Result<i64, String> {\n\n text.parse().map_err(|_| \"This number does not fit in 64 bits.\".to_string())\n\n }\n\n\n\n macro_rules! punct {\n\n ($variant:ident) => {Ok(Some(Token::Punct(Punct::$variant)))};\n\n }\n\n\n\n let dfa: DFA<LineIter, IndexedString, Option<Token>, ReadError> = lex! {\n\n chars: {chars}\n\n input: {&input}\n\n\n\n ((' ' | '\\t' | '\\n') & (' ' | '\\t' | '\\n')*) => {\n\n Ok(None)\n\n },\n\n ('#' & behaved* & '\\n') => {\n", "file_path": "ir/src/hir/parsing.rs", "rank": 81, "score": 68243.45647202464 }, { "content": "pub fn parse_netlist<'a>(file_name: &'a str, contents: &'a str) -> Result<Netlist, ReadError<'a>> {\n\n let chars = LineIter::new(contents);\n\n let input = IndexedString::new(file_name, contents);\n\n\n\n let dfa: DFA<LineIter, IndexedString, Option<Token>, ReadError> = lex! {\n\n chars: {chars}\n\n input: {&input}\n\n\n\n ((' ' | '\\t' | '\\n') & (' ' | '\\t' | '\\n')*) => {\n\n Ok(None)\n\n },\n\n ('#' & behaved* & '\\n') => {\n\n Ok(None)\n\n },\n\n\n\n ((alpha | '_') & (alpha | '_' | num)*) => {\n\n Ok(Some(Token::Ident($text.to_string())))\n\n },\n\n (num & num*) => {\n\n Ok(Some(Token::Num($text.to_string())))\n", "file_path": "sim/src/parsing/parser.rs", "rank": 82, "score": 68243.45647202464 }, { "content": "fn main() {\n\n let matches = App::new(\"pjulia\")\n\n .version(\"1.0\")\n\n .author(\"Julien Marquet, Ryan Lahfa\")\n\n .arg(Arg::with_name(\"input\")\n\n .help(\"The source file\")\n\n .required(true)\n\n .index(1))\n\n .arg(Arg::with_name(\"output\")\n\n .short(\"o\")\n\n .takes_value(true))\n\n .arg(Arg::with_name(\"parse-only\")\n\n .short(\"p\")\n\n .long(\"parse-only\")\n\n .help(\"Only parse the input as an AST\"))\n\n .arg(Arg::with_name(\"type-only\")\n\n .short(\"t\")\n\n .long(\"type-only\")\n\n .help(\"Parse the input and type it\"))\n\n .arg(Arg::with_name(\"asm-only\")\n", "file_path": "compiler/src/main.rs", "rank": 83, "score": 62941.63579959795 }, { "content": "fn main() {\n\n let matches = App::new(\"petit-julia\")\n\n .version(\"1.0\")\n\n .author(\"Julien Marquet, Ryan Lahfa\")\n\n .arg(Arg::with_name(\"input\")\n\n .help(\"The program to run\")\n\n .required(true)\n\n .index(1))\n\n .arg(Arg::with_name(\"parse-only\")\n\n .long(\"parse-only\")\n\n .help(\"Only parse the input\"))\n\n .arg(Arg::with_name(\"type-only\")\n\n .long(\"type-only\")\n\n .help(\"Only types the input\"))\n\n .get_matches();\n\n\n\n let success = {\n\n let file_name = matches.value_of(\"input\").unwrap();\n\n let parse_only = matches.is_present(\"parse-only\");\n\n let type_only = matches.is_present(\"type-only\");\n", "file_path": "parser/src/main.rs", "rank": 84, "score": 62941.63579959795 }, { "content": "fn main() {\n\n let matches = App::new(\"petit-julia-ir\")\n\n .version(\"1.0\")\n\n .author(\"Julien Marquet, Ryan Lahfa\")\n\n .subcommand(SubCommand::with_name(\"pjulia\")\n\n .about(\"Compiles a pJulia source file\")\n\n .arg(Arg::with_name(\"input\")\n\n .help(\"The source file\")\n\n .required(true)\n\n .index(1))\n\n .arg(Arg::with_name(\"output\")\n\n .short(\"o\")\n\n .takes_value(true)))\n\n .subcommand(SubCommand::with_name(\"hir\")\n\n .about(\"Compiles a HIR source file\")\n\n .arg(Arg::with_name(\"input\")\n\n .help(\"The source file\")\n\n .required(true)\n\n .index(1))\n\n .arg(Arg::with_name(\"output\")\n", "file_path": "ir/src/main.rs", "rank": 85, "score": 62941.63579959795 }, { "content": "fn compile_while(\n\n lbl_gen: &mut LabelGenerator,\n\n global: &GlobalRegistry,\n\n local: &mut LocalRegistry,\n\n cond: &hir::Val,\n\n block: &hir::Block,\n\n) -> Result<Vec<lir::Statement>, Error> {\n\n let mut out = Vec::new();\n\n\n\n let lbl_body = lbl_gen.new_label();\n\n let lbl_end = lbl_gen.new_label();\n\n\n\n out.push(lir::Statement::Label(lbl_body.clone()));\n\n out.push(lir::Statement::Inst(\n\n lir::Instruction::JumpifNot(\n\n local.compile_val(cond)?.val,\n\n lbl_end.clone(),\n\n )\n\n ));\n\n\n", "file_path": "ir/src/hir_to_lir.rs", "rank": 86, "score": 61719.77191528816 }, { "content": "fn compile_to_asm(\n\n file_name: &str,\n\n output: &str,\n\n debug_hir: bool,\n\n debug_lir: bool,\n\n parse_only: bool,\n\n type_only: bool,\n\n) -> Result<Option<String>, String> {\n\n let s = read_file(file_name)?;\n\n\n\n // static_type(parse(file_name, &contents).map_err(|e| e.to_string())?).map_err(|e| e.to_string())\n\n \n\n let parse_res = match parse(file_name, &s) {\n\n Ok(res) => res,\n\n Err(e) => return Err(e.to_string())\n\n };\n\n\n\n if parse_only {return Ok(None)}\n\n\n\n let res = match static_type(parse_res) {\n", "file_path": "compiler/src/main.rs", "rank": 87, "score": 61719.77191528816 }, { "content": "fn compile_if(\n\n lbl_gen: &mut LabelGenerator,\n\n global: &GlobalRegistry,\n\n local: &mut LocalRegistry,\n\n cond: &hir::Val,\n\n block_true: &hir::Block,\n\n block_false: &hir::Block,\n\n) -> Result<Vec<lir::Statement>, Error> {\n\n let mut out = Vec::new();\n\n\n\n let lbl_true = lbl_gen.new_label();\n\n let lbl_end = lbl_gen.new_label();\n\n\n\n out.push(lir::Statement::Inst(\n\n lir::Instruction::Jumpif(\n\n local.compile_val(cond)?.val,\n\n lbl_true.clone(),\n\n )\n\n ));\n\n\n", "file_path": "ir/src/hir_to_lir.rs", "rank": 88, "score": 61719.77191528816 }, { "content": "fn inst_to_asm(\n\n asm: &mut String,\n\n reg: &mut StringRegistry,\n\n fn_ids: &HashMap<String, usize>,\n\n label_ids: &HashMap<String, usize>,\n\n local: &LocalRegistry,\n\n fn_id: usize,\n\n inst: &Instruction\n\n) -> Result<(), Error> {\n\n match inst {\n\n Instruction::Bin(dest, op, a, b) => {\n\n write_get_val(asm, reg, local, a, \"%rax\")?;\n\n write_get_val(asm, reg, local, b, \"%rbx\")?;\n\n\n\n match op {\n\n BinOp::And => writeln!(asm, \"\\tandq %rbx, %rax\")?,\n\n BinOp::Or => writeln!(asm, \"\\torq %rbx, %rax\")?,\n\n\n\n BinOp::Equ => {\n\n writeln!(asm, \"\\tcmp %rbx, %rax\")?;\n", "file_path": "ir/src/lir_to_asm.rs", "rank": 89, "score": 60573.741695089266 }, { "content": "fn compile_block(\n\n lbl_gen: &mut LabelGenerator,\n\n global: &GlobalRegistry,\n\n local: &mut LocalRegistry,\n\n block: &hir::Block,\n\n) -> Result<Vec<lir::Statement>, Error> {\n\n let mut out = Vec::new();\n\n\n\n block.stmts.iter().try_for_each(|stmt| -> Result<(), Error> {\n\n match stmt {\n\n hir::Statement::Call(dest, call) => {\n\n out.extend(compile_call(global, local, dest, call)?);\n\n },\n\n hir::Statement::Return(a) => {\n\n out.extend(compile_return(global, local, a)?);\n\n },\n\n hir::Statement::If(cond, block_true, block_false) => {\n\n out.extend(compile_if(lbl_gen, global, local, cond, block_true, block_false)?);\n\n },\n\n hir::Statement::While(cond, block) => {\n\n out.extend(compile_while(lbl_gen, global, local, cond, block)?);\n\n },\n\n }\n\n \n\n Ok(())\n\n })?;\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "ir/src/hir_to_lir.rs", "rank": 90, "score": 60573.741695089266 }, { "content": "fn compile_call(\n\n global: &GlobalRegistry,\n\n local: &mut LocalRegistry,\n\n dest: &hir::LValue,\n\n call: &hir::Callable\n\n) -> Result<Vec<lir::Statement>, Error> {\n\n let mut out = Vec::new();\n\n let mut maybe_store = Vec::new();\n\n \n\n let dest_var = match dest {\n\n hir::LValue::Var(dest) => local.get_var(dest)?,\n\n hir::LValue::Access(_, _, _) => {\n\n let data = local.mk_additional_var();\n\n maybe_store.push(data);\n\n maybe_store.last().unwrap()\n\n },\n\n };\n\n\n\n match call {\n\n hir::Callable::Call(fn_name, native, args) => {\n", "file_path": "ir/src/hir_to_lir.rs", "rank": 91, "score": 60573.741695089266 }, { "content": "fn write_get_val(\n\n asm: &mut String,\n\n reg: &mut StringRegistry,\n\n local: &LocalRegistry,\n\n val: &Val,\n\n dest: &str\n\n) -> Result<(), Error> {\n\n match val {\n\n Val::Var(name) => {\n\n writeln!(asm, \"\\tmovq {}, {}\", local.get_var_access(name)?, dest)?\n\n },\n\n Val::Const(i) => {\n\n writeln!(asm, \"\\tmovq ${}, {}\", i, dest)?\n\n },\n\n Val::Str(s) => {\n\n let id = reg.register(s.clone());\n\n writeln!(asm, \"\\tmovq $string_{}, %rax\", id)?;\n\n },\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ir/src/lir_to_asm.rs", "rank": 92, "score": 59496.69782279244 }, { "content": "fn compile(input: &str,\n\n output: &str,\n\n parse_only: bool,\n\n type_only: bool,\n\n asm_only: bool, \n\n debug_hir: bool, debug_lir: bool,\n\n runtime_object_filename: &str) -> Result<(), String> { \n\n let asm = match compile_to_asm(input, output, debug_hir, debug_lir, parse_only, type_only)? {\n\n Some(asm) => asm,\n\n None => return Ok(())\n\n };\n\n let asm_filename = format!(\"{}.s\", output);\n\n write_file(&asm_filename, &asm)?; \n\n\n\n if !asm_only {\n\n // run `as` on asm file to provide object file.\n\n // use runtime object file.\n\n // compile test to binary.\n\n\n\n let user_object_filename = format!(\"{}.o\", output);\n", "file_path": "compiler/src/main.rs", "rank": 93, "score": 57275.756746863655 }, { "content": "pub fn build_pda_data(\n\n terms: &[&str],\n\n nterms: &[&str],\n\n prods: &[(&str, Vec<&str>)],\n\n start: &str\n\n )\n\n -> (Vec<Production>, MachineTable)\n\n{\n\n let term_count = 1 + terms.len();\n\n let nterm_count = 1 + nterms.len();\n\n\n\n let maybe_term = |name: &str| -> Option<usize> {\n\n terms.iter().position(|x| *x == name).map(|i| i + 1)\n\n };\n\n\n\n let nterm_index = |name: &str| -> usize {\n\n if let Some(i) = nterms.iter().position(|x| *x == name) {\n\n i + 1\n\n } else {\n\n panic!(format!(\"Can't find the non-terminal \\\"{}\\\"\", name))\n", "file_path": "automata/src/parser/api.rs", "rank": 94, "score": 56716.09170792416 }, { "content": "fn serialize_character(c: &Character) -> TokenStream {\n\n match c {\n\n Character::Char(c) => quote! {::automata::lexer::Character::Char(#c)},\n\n Character::Alpha => quote! {::automata::lexer::Character::Alpha},\n\n Character::Num => quote! {::automata::lexer::Character::Num},\n\n Character::Behaved => quote! {::automata::lexer::Character::Behaved},\n\n Character::Any => quote! {::automata::lexer::Character::Any},\n\n }\n\n}\n\n\n\nimpl Regexp {\n\n pub fn serialize(&self) -> TokenStream {\n\n match self {\n\n Regexp::Epsilon => quote! {::automata::lexer::Regexp::Epsilon},\n\n Regexp::Character(c) => {\n\n let c = serialize_character(c);\n\n quote! {::automata::lexer::Regexp::Character(#c)}\n\n },\n\n Regexp::Union(l, r) => {\n\n let (l, r) = (l.serialize(), r.serialize());\n", "file_path": "parsergen/src/lexer/regexp.rs", "rank": 95, "score": 52887.64194255708 }, { "content": "fn check(input: &MacroInput) -> Result<()> {\n\n input.chars_block.check(&[])?;\n\n input.input_block.check(&[])?;\n\n\n\n let hooked = &[\"span\".to_string(), \"text\".to_string()];\n\n input.arms.iter().try_for_each(|arm| arm.contents.check(hooked))\n\n}\n\n\n", "file_path": "parsergen/src/lexer/macro_def.rs", "rank": 96, "score": 51983.926800054454 }, { "content": "fn check(input: &MacroInput) -> Result<()> {\n\n let tokens = input.terms.iter().chain(input.nterms.iter()).map(|typed| &typed.0).collect::<Vec<&Ident>>();\n\n let terms = input.terms.iter().map(|typed| typed.0.to_string()).collect::<Vec<String>>();\n\n let token_names = tokens.iter().map(|ident| ident.to_string()).collect::<Vec<String>>();\n\n let nterm_names = input.nterms.iter().map(|typed| typed.0.to_string()).collect::<Vec<String>>();\n\n\n\n check_tokens(&tokens)?;\n\n input.tokenizer.body.check(&terms)?;\n\n input.on_empty.check(&vec!())?;\n\n\n\n input.rules.iter().try_for_each::<_, Result<()>>(|rule| {\n\n let vars = rule.expand.iter()\n\n .filter_map(|(x, _)| {\n\n if let Some(ident) = x {\n\n Some(ident.to_string())\n\n } else {\n\n None\n\n }\n\n })\n\n .chain(std::iter::once(\"span\".to_string()))\n", "file_path": "parsergen/src/parser/macro_def.rs", "rank": 97, "score": 51983.926800054454 }, { "content": "fn serialize_table_data(data: &TableData) -> TokenStream {\n\n let terms = &data.terms;\n\n let nterms = &data.nterms;\n\n\n\n let (rules, states) = automata::parser::build_pda_data(\n\n &terms.iter().map(|s| s.as_str()).collect::<Vec<&str>>(),\n\n &nterms.iter().map(|s| s.as_str()).collect::<Vec<&str>>(),\n\n &data.prods.iter().map(|(token, expand)| (\n\n token.as_str(),\n\n expand.iter().map(|s| s.as_str()).collect::<Vec<&str>>(),\n\n )).collect::<Vec<_>>(),\n\n &data.start_token\n\n );\n\n\n\n let rules_count = rules.len();\n\n let s_rules = rules.iter().map(|prod| {\n\n let s_expand = prod.expand.iter().map(|symbol| {\n\n match symbol {\n\n types::Symbol::T(i) => quote! {::automata::parser::types::Symbol::T(#i), },\n\n types::Symbol::N(i) => quote! {::automata::parser::types::Symbol::N(#i), },\n", "file_path": "parsergen/src/parser/macro_def.rs", "rank": 98, "score": 49549.25366492144 }, { "content": "fn build_table_data(input: &MacroInput) -> TableData {\n\n TableData {\n\n terms: input.terms.iter().map(|typed| typed.0.to_string()).collect(),\n\n nterms: input.nterms.iter().map(|typed| typed.0.to_string()).collect(),\n\n prods: input.rules.iter().map(|rule| {\n\n (\n\n rule.token.to_string(),\n\n rule.expand.iter().map(|(_, ident)| ident.to_string()).collect()\n\n )\n\n }).collect(),\n\n start_token: input.start_token.to_string(),\n\n }\n\n}\n\n\n", "file_path": "parsergen/src/parser/macro_def.rs", "rank": 99, "score": 49549.25366492144 } ]
Rust
clashctl-tui/src/event.rs
EurusEurus/clashctl
6fabdeabc6dc4920cb9fb3da701242ac103fbf04
use std::fmt::Display; use crossterm::event::{KeyCode as KC, KeyEvent as KE, KeyModifiers as KM}; use log::Level; use tui::{ style::{Color, Style}, text::{Span, Spans}, }; use crate::{ clashctl::model::{ConnectionsWithSpeed, Log, Proxies, Rules, Traffic, Version}, components::MovableListItem, utils::AsColor, Error, Result, }; #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum Event { Quit, Input(InputEvent), Update(UpdateEvent), Diagnostic(DiagnosticEvent), } impl<'a> MovableListItem<'a> for Event { fn to_spans(&self) -> Spans<'a> { match self { Event::Quit => Spans(vec![]), Event::Update(event) => Spans(vec![ Span::styled("⇵ ", Style::default().fg(Color::Yellow)), Span::raw(event.to_string()), ]), Event::Input(event) => Spans(vec![ Span::styled("✜ ", Style::default().fg(Color::Green)), Span::raw(format!("{:?}", event)), ]), Event::Diagnostic(event) => match event { DiagnosticEvent::Log(level, payload) => Spans(vec![ Span::styled( format!("✇ {:<6}", level), Style::default().fg(level.as_color()), ), Span::raw(payload.to_owned()), ]), }, } } } impl Event { pub fn is_quit(&self) -> bool { matches!(self, Event::Quit) } pub fn is_interface(&self) -> bool { matches!(self, Event::Input(_)) } pub fn is_update(&self) -> bool { matches!(self, Event::Update(_)) } pub fn is_diagnostic(&self) -> bool { matches!(self, Event::Diagnostic(_)) } } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum InputEvent { Esc, TabGoto(u8), ToggleDebug, ToggleHold, List(ListEvent), TestLatency, NextSort, PrevSort, Other(KE), } #[derive(Debug, Clone, PartialEq, Eq)] pub struct ListEvent { pub fast: bool, pub code: KC, } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum UpdateEvent { Config(clashctl_interactive::clashctl::model::Config), Connection(ConnectionsWithSpeed), Version(Version), Traffic(Traffic), Proxies(Proxies), Rules(Rules), Log(Log), ProxyTestLatencyDone, } impl Display for UpdateEvent { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { UpdateEvent::Config(x) => write!(f, "{:?}", x), UpdateEvent::Connection(x) => write!(f, "{:?}", x), UpdateEvent::Version(x) => write!(f, "{:?}", x), UpdateEvent::Traffic(x) => write!(f, "{:?}", x), UpdateEvent::Proxies(x) => write!(f, "{:?}", x), UpdateEvent::Rules(x) => write!(f, "{:?}", x), UpdateEvent::Log(x) => write!(f, "{:?}", x), UpdateEvent::ProxyTestLatencyDone => write!(f, "Test latency done"), } } } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum DiagnosticEvent { Log(Level, String), } impl TryFrom<KC> for Event { type Error = Error; fn try_from(value: KC) -> Result<Self> { match value { KC::Char('q') | KC::Char('x') => Ok(Event::Quit), KC::Char('t') => Ok(Event::Input(InputEvent::TestLatency)), KC::Esc => Ok(Event::Input(InputEvent::Esc)), KC::Char(' ') => Ok(Event::Input(InputEvent::ToggleHold)), KC::Char(char) if char.is_ascii_digit() => Ok(Event::Input(InputEvent::TabGoto( char.to_digit(10) .expect("char.is_ascii_digit() should be able to parse into number") as u8, ))), _ => Err(Error::TuiInternalErr), } } } impl From<KE> for Event { fn from(value: KE) -> Self { match (value.modifiers, value.code) { (KM::CONTROL, KC::Char('c')) => Self::Quit, (KM::CONTROL, KC::Char('d')) => Self::Input(InputEvent::ToggleDebug), (modi, arrow @ (KC::Left | KC::Right | KC::Up | KC::Down | KC::Enter)) => { Event::Input(InputEvent::List(ListEvent { fast: matches!(modi, KM::CONTROL | KM::SHIFT), code: arrow, })) } (KM::ALT, KC::Char('s')) => Self::Input(InputEvent::PrevSort), (KM::NONE, KC::Char('s')) => Self::Input(InputEvent::NextSort), (KM::NONE, key_code) => key_code .try_into() .unwrap_or_else(|_| Self::Input(InputEvent::Other(value))), _ => Self::Input(InputEvent::Other(value)), } } }
use std::fmt::Display; use crossterm::event::{KeyCode as KC, KeyEvent as KE, KeyModifiers as KM}; use log::Level; use tui::{ style::{Color, Style}, text::{Span, Spans}, }; use crate::{ clashctl::model::{ConnectionsWithSpeed, Log, Proxies, Rules, Traffic, Version}, components::MovableListItem, utils::AsColor, Error, Result, }; #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum Event { Quit, Input(InputEvent), Update(UpdateEvent), Diagnostic(DiagnosticEvent), } impl<'a> MovableListItem<'a> for Event { fn to_spans(&self) -> Spans<'a> { match self { Event::Quit => Spans(vec![]), Event::Update(event) => Spans(vec![ Span::styled("⇵ ", Style::default().fg(Color::Yellow)), Span::raw(event.to_string()), ]), Event::Input(event) => Spans(vec![ Span::styled("✜ ", Style::default().fg(Color::Green)), Span::raw(format!("{:?}",
, ToggleDebug, ToggleHold, List(ListEvent), TestLatency, NextSort, PrevSort, Other(KE), } #[derive(Debug, Clone, PartialEq, Eq)] pub struct ListEvent { pub fast: bool, pub code: KC, } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum UpdateEvent { Config(clashctl_interactive::clashctl::model::Config), Connection(ConnectionsWithSpeed), Version(Version), Traffic(Traffic), Proxies(Proxies), Rules(Rules), Log(Log), ProxyTestLatencyDone, } impl Display for UpdateEvent { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { UpdateEvent::Config(x) => write!(f, "{:?}", x), UpdateEvent::Connection(x) => write!(f, "{:?}", x), UpdateEvent::Version(x) => write!(f, "{:?}", x), UpdateEvent::Traffic(x) => write!(f, "{:?}", x), UpdateEvent::Proxies(x) => write!(f, "{:?}", x), UpdateEvent::Rules(x) => write!(f, "{:?}", x), UpdateEvent::Log(x) => write!(f, "{:?}", x), UpdateEvent::ProxyTestLatencyDone => write!(f, "Test latency done"), } } } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum DiagnosticEvent { Log(Level, String), } impl TryFrom<KC> for Event { type Error = Error; fn try_from(value: KC) -> Result<Self> { match value { KC::Char('q') | KC::Char('x') => Ok(Event::Quit), KC::Char('t') => Ok(Event::Input(InputEvent::TestLatency)), KC::Esc => Ok(Event::Input(InputEvent::Esc)), KC::Char(' ') => Ok(Event::Input(InputEvent::ToggleHold)), KC::Char(char) if char.is_ascii_digit() => Ok(Event::Input(InputEvent::TabGoto( char.to_digit(10) .expect("char.is_ascii_digit() should be able to parse into number") as u8, ))), _ => Err(Error::TuiInternalErr), } } } impl From<KE> for Event { fn from(value: KE) -> Self { match (value.modifiers, value.code) { (KM::CONTROL, KC::Char('c')) => Self::Quit, (KM::CONTROL, KC::Char('d')) => Self::Input(InputEvent::ToggleDebug), (modi, arrow @ (KC::Left | KC::Right | KC::Up | KC::Down | KC::Enter)) => { Event::Input(InputEvent::List(ListEvent { fast: matches!(modi, KM::CONTROL | KM::SHIFT), code: arrow, })) } (KM::ALT, KC::Char('s')) => Self::Input(InputEvent::PrevSort), (KM::NONE, KC::Char('s')) => Self::Input(InputEvent::NextSort), (KM::NONE, key_code) => key_code .try_into() .unwrap_or_else(|_| Self::Input(InputEvent::Other(value))), _ => Self::Input(InputEvent::Other(value)), } } }
event)), ]), Event::Diagnostic(event) => match event { DiagnosticEvent::Log(level, payload) => Spans(vec![ Span::styled( format!("✇ {:<6}", level), Style::default().fg(level.as_color()), ), Span::raw(payload.to_owned()), ]), }, } } } impl Event { pub fn is_quit(&self) -> bool { matches!(self, Event::Quit) } pub fn is_interface(&self) -> bool { matches!(self, Event::Input(_)) } pub fn is_update(&self) -> bool { matches!(self, Event::Update(_)) } pub fn is_diagnostic(&self) -> bool { matches!(self, Event::Diagnostic(_)) } } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum InputEvent { Esc, TabGoto(u8)
random
[ { "content": "pub fn help_footer(content: &str, normal: Style, highlight: Style) -> Spans {\n\n if content.is_empty() {\n\n Spans(vec![])\n\n } else if content.len() == 1 {\n\n Spans(vec![Span::raw(content)])\n\n } else {\n\n let (index, _) = content.char_indices().nth(1).unwrap();\n\n let (first_char, rest) = content.split_at(index);\n\n Spans(vec![\n\n Span::styled(\"[\", normal),\n\n Span::styled(first_char, highlight),\n\n Span::styled(\"]\", normal),\n\n Span::styled(rest, normal),\n\n ])\n\n }\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 0, "score": 176528.86488997826 }, { "content": "pub fn get_text_style() -> Style {\n\n Style::default().fg(Color::White)\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 1, "score": 176514.4277609259 }, { "content": "pub fn tagged_footer<T: ToString>(label: &str, style: Style, content: T) -> Spans {\n\n let mut ret = help_footer(label, style, style.add_modifier(Modifier::BOLD)).wrapped();\n\n ret.0.push(Span::styled(\n\n content.to_string().wrapped(),\n\n Style::default()\n\n .fg(Color::White)\n\n .add_modifier(Modifier::REVERSED),\n\n ));\n\n ret\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 2, "score": 166664.42460946913 }, { "content": "pub fn main_loop(opt: TuiOpt, flag: Flags) -> Result<()> {\n\n let config = flag.get_config()?;\n\n if config.using_server().is_none() {\n\n println!(\n\n \"{} No API server configured yet. Use this command to add a server:\\n\\n $ {}\",\n\n \"WARN:\".red(),\n\n \"clashctl server add\".green()\n\n );\n\n return Ok(());\n\n };\n\n\n\n init_config(config);\n\n\n\n let state = Arc::new(RwLock::new(TuiStates::default()));\n\n let error = Arc::new(Mutex::new(None));\n\n\n\n let (event_tx, event_rx) = channel();\n\n let (action_tx, action_rx) = channel();\n\n\n\n let opt = Arc::new(opt);\n", "file_path": "clashctl-tui/src/app.rs", "rank": 3, "score": 152229.3517698302 }, { "content": "pub fn spans_window<'a>(spans: &'a Spans, range: &Range<usize>) -> Spans<'a> {\n\n let inner = &spans.0;\n\n match inner.len() {\n\n 0 => spans.to_owned(),\n\n 1 => {\n\n let item = &inner[0];\n\n Spans(vec![Span::styled(\n\n string_window(&item.content, range),\n\n item.style,\n\n )])\n\n }\n\n _ => {\n\n let (start, end) = (range.start, range.end);\n\n inner\n\n .iter()\n\n .flat_map(|x| x.styled_graphemes(Style::default()))\n\n .skip(start)\n\n .take(end - start)\n\n .collect::<Vec<_>>()\n\n .into_spans()\n\n }\n\n }\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 4, "score": 139331.61523807253 }, { "content": "pub fn spans_window_owned<'a>(mut spans: Spans<'a>, range: &Range<usize>) -> Spans<'a> {\n\n match spans.0.len() {\n\n 0 => spans,\n\n 1 => {\n\n let item = &mut spans.0[0];\n\n item.content = string_window_owned(item.content.to_string(), range).into();\n\n spans\n\n }\n\n _ => {\n\n let (start, end) = (range.start, range.end);\n\n spans\n\n .0\n\n .iter_mut()\n\n .flat_map(|x| x.content.chars().map(|c| (x.style, c)))\n\n .skip(start)\n\n .take(end - start)\n\n .collect::<Vec<_>>()\n\n .into_spans()\n\n }\n\n }\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 5, "score": 133667.06373561785 }, { "content": "fn setup() -> Result<Terminal<Backend>> {\n\n let mut stdout = io::stdout();\n\n\n\n execute!(stdout, EnterAlternateScreen)?;\n\n enable_raw_mode()?;\n\n\n\n let backend = CrosstermBackend::new(stdout);\n\n let mut terminal = Terminal::new(backend)?;\n\n terminal.clear()?;\n\n\n\n Ok(terminal)\n\n}\n\n\n", "file_path": "clashctl-tui/src/app.rs", "rank": 6, "score": 122168.29304063303 }, { "content": "pub fn init_config(config: Config) {\n\n let _ = CONFIG.set(RwLock::new(config));\n\n}\n\n\n", "file_path": "clashctl-tui/src/config.rs", "rank": 7, "score": 116323.4020951473 }, { "content": "fn wrap_up(mut terminal: Terminal<Backend>) -> Result<()> {\n\n execute!(terminal.backend_mut(), LeaveAlternateScreen,)?;\n\n\n\n disable_raw_mode()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "clashctl-tui/src/app.rs", "rank": 8, "score": 112380.24459389651 }, { "content": "#[test]\n\nfn test_into_span() {\n\n use tui::style::Color;\n\n\n\n let style_blue = Style::default().fg(Color::Blue);\n\n let style_plain = Style::default();\n\n let style_red = Style::default().fg(Color::Red);\n\n\n\n let (a, b, c) = (\n\n Span::raw(\"Hello\"),\n\n Span::raw(\" \"),\n\n Span::raw(\"World 中文测试\"),\n\n );\n\n let chars_blue = a.styled_graphemes(style_blue);\n\n let chars_plain = b.styled_graphemes(style_plain);\n\n let chars_red = c.styled_graphemes(style_red);\n\n\n\n let spans = chars_blue\n\n .chain(chars_plain)\n\n .chain(chars_red)\n\n .collect::<Vec<_>>()\n", "file_path": "clashctl-tui/src/utils/ext.rs", "rank": 9, "score": 110506.30631652167 }, { "content": "pub trait IntoSpan<'a> {\n\n fn into_span(self) -> Span<'a>;\n\n}\n\n\n\nimpl<'a> IntoSpan<'a> for StyledGrapheme<'a> {\n\n fn into_span(self) -> Span<'a> {\n\n Span::styled(self.symbol, self.style)\n\n }\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/ext.rs", "rank": 10, "score": 109463.40046842096 }, { "content": "pub trait IntoSpans<'a> {\n\n fn into_spans(self) -> Spans<'a>;\n\n}\n\n\n\nimpl<'a> IntoSpans<'a> for Vec<StyledGrapheme<'a>> {\n\n fn into_spans(self) -> Spans<'a> {\n\n self.into_iter()\n\n .fold(None, |mut acc: Option<(Vec<Span<'a>>, Style)>, x| {\n\n let x_style = x.style;\n\n match acc {\n\n Some((ref mut vec, ref mut style)) => {\n\n if style == &x_style {\n\n vec.last_mut().expect(\"vec.len() >= 1\").content += x.symbol;\n\n } else {\n\n vec.push(x.into_span());\n\n *style = x_style\n\n }\n\n }\n\n None => return Some((vec![x.into_span()], x_style)),\n\n };\n", "file_path": "clashctl-tui/src/utils/ext.rs", "rank": 11, "score": 109463.40046842096 }, { "content": "pub fn get_block(title: &str) -> Block {\n\n Block::default()\n\n .borders(Borders::ALL)\n\n .style(Style::default().fg(Color::LightBlue))\n\n .title(Span::raw(format!(\" {} \", title)))\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 12, "score": 109181.30393563928 }, { "content": "pub fn get_focused_block(title: &str) -> Block {\n\n Block::default()\n\n .borders(Borders::ALL)\n\n .title(Span::styled(\n\n format!(\" {} \", title),\n\n Style::default().fg(Color::LightGreen),\n\n ))\n\n .style(Style::default().fg(Color::Green))\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 13, "score": 106938.67230329184 }, { "content": "pub fn get_config<'a>() -> RwLockReadGuard<'a, Config> {\n\n CONFIG\n\n .get()\n\n .expect(\"Config is not initialized\")\n\n .read()\n\n .unwrap()\n\n}\n\n\n", "file_path": "clashctl-tui/src/config.rs", "rank": 14, "score": 104997.78140476522 }, { "content": "pub fn run() {\n\n let opts = Opts::parse();\n\n opts.init_logger();\n\n debug!(\"Opts: {:#?}\", opts);\n\n\n\n if let Err(e) = match opts.cmd {\n\n None => main_loop(Default::default(), opts.flag).map_err(Error::TuiError),\n\n Some(Cmd::Tui(opt)) => main_loop(opt, opts.flag).map_err(Error::TuiError),\n\n Some(Cmd::Proxy(sub)) => sub.handle(&opts.flag),\n\n Some(Cmd::Server(sub)) => sub.handle(&opts.flag),\n\n Some(Cmd::Completion(arg)) => arg.handle(),\n\n } {\n\n eprintln!(\"{}\", e)\n\n }\n\n}\n", "file_path": "clashctl/src/lib.rs", "rank": 15, "score": 104519.8121238291 }, { "content": "pub fn get_config_mut<'a>() -> RwLockWriteGuard<'a, Config> {\n\n CONFIG\n\n .get()\n\n .expect(\"Config is not initialized\")\n\n .write()\n\n .unwrap()\n\n}\n\n\n\n#[derive(Clone, Debug, SmartDefault)]\n\npub struct ConfigState {\n\n clash: Option<ConfigModel>,\n\n #[default(_code = \"{ get_config().get_inner().clone() }\")]\n\n clashctl: ConfigData,\n\n offset: usize,\n\n}\n\n\n\nimpl ConfigState {\n\n pub fn clashctl_list(&self) -> impl Iterator<Item = (&str, String)> {\n\n let server = self\n\n .clashctl\n", "file_path": "clashctl-tui/src/config.rs", "rank": 16, "score": 102876.0674730949 }, { "content": "pub fn route(state: &TuiStates, area: Rect, f: &mut Frame<Backend>) {\n\n match state.page_index {\n\n 0 => f.render_widget(status::StatusPage::new(state), area),\n\n 1 => f.render_widget(proxy::ProxyPage::new(state), area),\n\n 2 => f.render_widget(rule::RulePage::new(state), area),\n\n 3 => f.render_widget(connection::ConnectionPage::new(state), area),\n\n 4 => f.render_widget(log::LogPage::new(state), area),\n\n 5 => f.render_widget(config::ConfigPage::new(&state.config_state), area),\n\n 6 => f.render_widget(debug::DebugPage::new(state), area),\n\n _ => unreachable!(),\n\n };\n\n}\n", "file_path": "clashctl-tui/src/pages/mod.rs", "rank": 17, "score": 101903.63658623498 }, { "content": "pub fn string_window_owned(string: String, range: &Range<usize>) -> String {\n\n string\n\n .chars()\n\n .skip(range.start)\n\n .take(range.end - range.start)\n\n .collect()\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 18, "score": 95780.20930131573 }, { "content": "pub fn detect_shell() -> Option<Shell> {\n\n match env::var(\"SHELL\") {\n\n Ok(shell) => PathBuf::from(shell)\n\n .file_name()\n\n .and_then(|name| name.to_str())\n\n .and_then(|name| name.parse().ok()),\n\n Err(_) => None,\n\n }\n\n}\n\n\n", "file_path": "clashctl/src/utils.rs", "rank": 19, "score": 92359.38007214284 }, { "content": "pub fn string_window<'a>(string: &'a str, range: &Range<usize>) -> Cow<'a, str> {\n\n string\n\n .chars()\n\n .skip(range.start)\n\n .take(range.end - range.start)\n\n .collect()\n\n}\n\n\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 20, "score": 91280.02656120644 }, { "content": "#[test]\n\nfn test_proxy_type() {\n\n let string = \"direct\";\n\n let parsed = string.parse().unwrap();\n\n assert_eq!(ProxyType::Direct, parsed);\n\n}\n", "file_path": "clashctl/src/command/proxy.rs", "rank": 21, "score": 91006.70295346927 }, { "content": "#[test]\n\nfn test_proxies() {\n\n let proxy_kv = [\n\n (\n\n \"test_a\".to_owned(),\n\n Proxy {\n\n proxy_type: ProxyType::Direct,\n\n history: vec![],\n\n udp: Some(false),\n\n all: None,\n\n now: None,\n\n },\n\n ),\n\n (\n\n \"test_b\".to_owned(),\n\n Proxy {\n\n proxy_type: ProxyType::Selector,\n\n history: vec![],\n\n udp: Some(false),\n\n all: Some(vec![\"test_c\".into()]),\n\n now: Some(\"test_c\".into()),\n", "file_path": "clashctl-core/src/model/proxy.rs", "rank": 22, "score": 91006.70295346927 }, { "content": "pub fn init_logger(level: Option<LevelFilter>) {\n\n let mut builder = Builder::new();\n\n\n\n if let Some(lf) = level {\n\n builder.filter_level(lf);\n\n } else if let Ok(s) = ::std::env::var(\"CLASHCTL_LOG\") {\n\n builder.parse_filters(&s);\n\n } else {\n\n builder.filter_level(LevelFilter::Info);\n\n }\n\n\n\n builder.format(|f, record| {\n\n use std::io::Write;\n\n let mut style = f.style();\n\n\n\n let level = match record.level() {\n\n Level::Trace => style.set_color(Color::Magenta).value(\"Trace\"),\n\n Level::Debug => style.set_color(Color::Blue).value(\"Debug\"),\n\n Level::Info => style.set_color(Color::Green).value(\" Info\"),\n\n Level::Warn => style.set_color(Color::Yellow).value(\" Warn\"),\n\n Level::Error => style.set_color(Color::Red).value(\"Error\"),\n\n };\n\n\n\n writeln!(f, \" {} > {}\", level, record.args(),)\n\n });\n\n\n\n builder.init()\n\n}\n", "file_path": "clashctl/src/utils.rs", "rank": 23, "score": 86479.57986984635 }, { "content": "#[test]\n\nfn test_version() {\n\n let clash = init();\n\n info!(\"{:#?}\", clash.get_version().unwrap())\n\n}\n", "file_path": "clashctl-core/src/test/api.rs", "rank": 24, "score": 83102.04046056457 }, { "content": "#[test]\n\nfn test_log() {\n\n let clash = init();\n\n clash.get_log().unwrap().next();\n\n}\n\n\n", "file_path": "clashctl-core/src/test/api.rs", "rank": 25, "score": 83055.41348310612 }, { "content": "#[test]\n\nfn test_traffic() {\n\n let clash = init();\n\n clash.get_traffic().unwrap().next();\n\n}\n\n\n", "file_path": "clashctl-core/src/test/api.rs", "rank": 26, "score": 83010.99054997714 }, { "content": "#[test]\n\nfn test_rules() {\n\n let clash = init();\n\n clash.get_rules().unwrap();\n\n}\n\n\n", "file_path": "clashctl-core/src/test/api.rs", "rank": 27, "score": 82876.96965763067 }, { "content": "pub trait RenderList {\n\n fn render_list(&self, opt: &ProxyListOpt);\n\n fn render_plain(&self, opt: &ProxyListOpt);\n\n fn render_tree(&self, opt: &ProxyListOpt);\n\n}\n\n\n\nimpl RenderList for Proxies {\n\n // pub fn names(&self) -> impl Iterator<Item = &String> {\n\n // self.iter().map(|x| x.0)\n\n // }\n\n\n\n fn render_list(&self, opt: &ProxyListOpt) {\n\n let (Width(terminal_width), _) = terminal_size().unwrap_or((Width(70), Height(0)));\n\n println!(\"\\n{:-<1$}\", \"\", terminal_width as usize);\n\n println!(\"{:<18}{:<8}NAME\", \"TYPE\", \"DELAY\");\n\n println!(\"{:-<1$}\", \"\", terminal_width as usize);\n\n\n\n if opt.plain {\n\n self.render_plain(opt)\n\n } else {\n", "file_path": "clashctl/src/proxy_render.rs", "rank": 28, "score": 82479.97902509832 }, { "content": "pub trait Sortable<'a, S: SortMethod<Self::Item<'a>>> {\n\n type Item<'b>;\n\n fn sort_with(&mut self, method: &S);\n\n}\n\n\n", "file_path": "clashctl-interactive/src/sort/mod.rs", "rank": 29, "score": 82300.6634945349 }, { "content": "fn main() {\n\n let Opt { opt, flag } = Opt::parse();\n\n if let Err(e) = main_loop(opt, flag) {\n\n eprintln!(\"{}\", e)\n\n }\n\n}\n", "file_path": "clashctl-tui/src/main.rs", "rank": 30, "score": 82218.51720263544 }, { "content": "#[test]\n\nfn test_proxies() {\n\n let clash = init();\n\n clash.get_proxies().unwrap();\n\n}\n\n\n", "file_path": "clashctl-core/src/test/api.rs", "rank": 31, "score": 81856.02314675352 }, { "content": "#[test]\n\nfn test() {\n\n let serialized = r#\"ProxySort ( by: name, order: ascendant )\"#;\n\n let deserialized = ProxySort {\n\n by: ProxySortBy::Name,\n\n order: SortOrder::Ascendant,\n\n };\n\n assert_eq!(\n\n ron::from_str::<ProxySort>(serialized).unwrap(),\n\n deserialized\n\n );\n\n}\n", "file_path": "clashctl-interactive/src/sort/proxy_sort.rs", "rank": 32, "score": 81856.02314675352 }, { "content": "#[test]\n\nfn test_proxy() {\n\n let clash = init();\n\n let proxies = clash.get_proxies().unwrap();\n\n let (proxy, _) = proxies.iter().next().unwrap();\n\n clash.get_proxy(proxy).unwrap();\n\n}\n\n\n", "file_path": "clashctl-core/src/test/api.rs", "rank": 33, "score": 81856.02314675352 }, { "content": "pub trait EndlessSelf {\n\n fn next_self(&mut self);\n\n fn prev_self(&mut self);\n\n}\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n PartialEq,\n\n Eq,\n\n PartialOrd,\n\n Ord,\n\n Serialize,\n\n Deserialize,\n\n SmartDefault,\n\n strum::EnumString,\n\n strum::Display,\n\n strum::EnumVariantNames,\n\n)]\n\n#[serde(rename_all = \"lowercase\")]\n\n#[strum(ascii_case_insensitive)]\n\npub enum SortOrder {\n\n Ascendant,\n\n #[default]\n\n Descendant,\n\n}\n\n\n", "file_path": "clashctl-interactive/src/sort/mod.rs", "rank": 34, "score": 81701.13432075286 }, { "content": "enum ConfigListItem<'a> {\n\n Title(&'a str),\n\n Item { label: &'a str, content: String },\n\n Separator,\n\n Empty,\n\n}\n\n\n\nimpl<'a> ConfigListItem<'a> {\n\n pub fn title(title: &'a str) -> impl Iterator<Item = ConfigListItem> {\n\n [\n\n ConfigListItem::Empty,\n\n ConfigListItem::Title(title),\n\n ConfigListItem::Separator,\n\n ]\n\n .into_iter()\n\n }\n\n pub fn into_list_item(self, width: u16) -> ListItem<'a> {\n\n match self {\n\n ConfigListItem::Title(title) => ListItem::new(title).style(\n\n Style::default()\n", "file_path": "clashctl-tui/src/pages/config.rs", "rank": 35, "score": 80605.36920610719 }, { "content": "#[test]\n\nfn test_set_proxy() {\n\n let clash = init();\n\n let proxies = clash.get_proxies().unwrap();\n\n if let Some((group, proxy)) = proxies\n\n .iter()\n\n .find(|(_, proxy)| proxy.proxy_type.is_selector())\n\n {\n\n let all = proxy.all.as_ref().unwrap();\n\n let member = all.iter().next().unwrap();\n\n clash.set_proxygroup_selected(group, member).unwrap();\n\n }\n\n}\n\n\n", "file_path": "clashctl-core/src/test/api.rs", "rank": 36, "score": 79891.27100642491 }, { "content": "#[test]\n\nfn test_proxy_delay() {\n\n let clash = init();\n\n let proxies = clash.get_proxies().unwrap();\n\n let (proxy, _) = proxies.iter().find(|x| x.1.proxy_type.is_normal()).unwrap();\n\n clash\n\n .get_proxy_delay(proxy, \"https://static.miao.dev/generate_204\", 10000)\n\n .unwrap();\n\n}\n\n\n", "file_path": "clashctl-core/src/test/api.rs", "rank": 37, "score": 79891.27100642491 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nenum FooterItemInner<'a> {\n\n Raw(String),\n\n Span(Span<'a>),\n\n Spans(Spans<'a>),\n\n}\n\n\n\nimpl<'a> From<FooterItemInner<'a>> for Spans<'a> {\n\n fn from(val: FooterItemInner<'a>) -> Self {\n\n match val {\n\n FooterItemInner::Raw(raw) => raw.into(),\n\n FooterItemInner::Span(span) => span.into(),\n\n FooterItemInner::Spans(spans) => spans,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> From<FooterItem<'a>> for Spans<'a> {\n\n fn from(val: FooterItem<'a>) -> Self {\n\n val.inner.into()\n\n }\n", "file_path": "clashctl-tui/src/components/block_footer.rs", "rank": 38, "score": 78859.15501607419 }, { "content": "pub trait HMS {\n\n fn as_second(&self) -> i64;\n\n fn hms(&self) -> String {\n\n let mut s = self.as_second();\n\n let mut neg = false;\n\n let mut written = false;\n\n if s < 0 {\n\n neg = true;\n\n s = -s;\n\n }\n\n let (h, s) = (s / 3600, s % 3600);\n\n let (m, s) = (s / 60, s % 60);\n\n let mut ret = String::with_capacity(10);\n\n if neg {\n\n written = true;\n\n ret.push('-')\n\n };\n\n if written || h > 0 {\n\n written = true;\n\n write!(ret, \"{}h \", h).expect(\"Cannot write to buf\")\n", "file_path": "clashctl-tui/src/utils/hms.rs", "rank": 39, "score": 78763.27548002744 }, { "content": "pub trait AsColor {\n\n fn as_color(&self) -> Color;\n\n}\n\n\n\nimpl AsColor for model::Level {\n\n fn as_color(&self) -> Color {\n\n match self {\n\n model::Level::Debug => Color::Gray,\n\n model::Level::Info => Color::Blue,\n\n model::Level::Warning => Color::Yellow,\n\n model::Level::Error => Color::Red,\n\n }\n\n }\n\n}\n\n\n\nimpl AsColor for log::Level {\n\n fn as_color(&self) -> Color {\n\n match self {\n\n log::Level::Debug => Color::Gray,\n\n log::Level::Info => Color::Blue,\n\n log::Level::Warn => Color::Yellow,\n\n log::Level::Error => Color::Red,\n\n _ => Color::Gray,\n\n }\n\n }\n\n}\n", "file_path": "clashctl-tui/src/utils/as_color.rs", "rank": 40, "score": 78763.27548002744 }, { "content": "#[test]\n\nfn test_interval() {\n\n let mut interval = Interval::every(Duration::from_millis(100));\n\n assert!(interval.next_tick().as_millis().abs_diff(100) < 2);\n\n sleep(Duration::from_millis(50));\n\n assert!(interval.next_tick().as_millis().abs_diff(50) < 2);\n\n}\n", "file_path": "clashctl-tui/src/utils/interval.rs", "rank": 41, "score": 78139.31960168264 }, { "content": "#[test]\n\nfn test_string_window() {\n\n let test = \"▼ 代理相关的 API\".to_owned();\n\n assert_eq!(\"代理\", &string_window(&test, &(2..4)));\n\n assert_eq!(\"理相关的 API\", &string_window(&test, &(3..114)));\n\n}\n", "file_path": "clashctl-tui/src/utils/helper.rs", "rank": 42, "score": 76290.37599322308 }, { "content": "pub trait Wrap: Sized {\n\n fn wrap_by(self, char: char) -> Self;\n\n fn wrapped(self) -> Self {\n\n self.wrap_by(' ')\n\n }\n\n}\n\n\n\nmacro_rules! impl_wrap {\n\n ($t:ty) => {\n\n impl Wrap for $t {\n\n fn wrap_by(mut self, char: char) -> Self {\n\n (&mut self).wrap_by(char);\n\n self\n\n }\n\n }\n\n };\n\n ($t:ty, $life:lifetime) => {\n\n impl<$life> Wrap for $t {\n\n fn wrap_by(mut self, char: char) -> Self {\n\n (&mut self).wrap_by(char);\n", "file_path": "clashctl-tui/src/utils/wrap.rs", "rank": 43, "score": 75242.02977690779 }, { "content": "fn render(state: &TuiStates, f: &mut Frame<Backend>) {\n\n let layout = Layout::default()\n\n .constraints([Constraint::Length(3), Constraint::Min(0)])\n\n .split(f.size());\n\n\n\n let tabs = Tabs::new(state);\n\n f.render_widget(tabs, layout[0]);\n\n\n\n let main = layout[1];\n\n\n\n route(state, main, f);\n\n}\n", "file_path": "clashctl-tui/src/app.rs", "rank": 44, "score": 72585.44439244192 }, { "content": "pub trait MovableListManage {\n\n fn sort(&mut self) -> &mut Self;\n\n\n\n fn next_sort(&mut self) -> &mut Self;\n\n\n\n fn prev_sort(&mut self) -> &mut Self;\n\n\n\n fn current_pos(&self) -> Coord;\n\n\n\n fn len(&self) -> usize;\n\n\n\n fn is_empty(&self) -> bool;\n\n fn toggle(&mut self) -> &mut Self;\n\n\n\n fn end(&mut self) -> &mut Self;\n\n\n\n fn hold(&mut self) -> &mut Self;\n\n\n\n fn handle(&mut self, event: ListEvent) -> Option<Action>;\n\n fn offset(&self) -> &Coord;\n", "file_path": "clashctl-tui/src/components/movable_list/state.rs", "rank": 45, "score": 71979.83404747728 }, { "content": "pub trait MovableListItem<'a> {\n\n fn to_spans(&self) -> Spans<'a>;\n\n}\n\n\n\nimpl<'a> MovableListItem<'a> for Spans<'a> {\n\n fn to_spans(&self) -> Spans<'a> {\n\n self.to_owned()\n\n }\n\n}\n\n\n\nimpl<'a> MovableListItem<'a> for String {\n\n fn to_spans(&self) -> Spans<'a> {\n\n Spans(vec![Span::raw(self.to_owned())])\n\n }\n\n}\n\n\n\nimpl<'a> MovableListItem<'a> for Cow<'a, str> {\n\n fn to_spans(&self) -> Spans<'a> {\n\n Spans(vec![Span::raw(self.to_owned())])\n\n }\n\n}\n\n\n", "file_path": "clashctl-tui/src/components/movable_list/item.rs", "rank": 46, "score": 70312.97232103179 }, { "content": " #[error(\"Set logger error ({0})\")]\n\n SetLoggerError(#[from] log::SetLoggerError),\n\n}\n\n\n\nimpl<T> From<std::sync::mpsc::SendError<T>> for Error {\n\n fn from(_: std::sync::mpsc::SendError<T>) -> Self {\n\n Self::TuiBackendErr\n\n }\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n", "file_path": "clashctl-tui/src/error.rs", "rank": 47, "score": 66307.68607484952 }, { "content": "#[derive(Debug, thiserror::Error)]\n\npub enum Error {\n\n #[error(\"{0}\")]\n\n InteractiveError(#[from] clashctl_interactive::Error),\n\n\n\n #[error(\"Clashctl error: {0}\")]\n\n ClashCtl(#[from] crate::clashctl::Error),\n\n\n\n #[error(\"TUI error\")]\n\n TuiError(#[from] std::io::Error),\n\n\n\n #[error(\"TUI backend error\")]\n\n TuiBackendErr,\n\n\n\n #[error(\"TUI interuptted error\")]\n\n TuiInterupttedErr,\n\n\n\n #[error(\"TUI internal error\")]\n\n TuiInternalErr,\n\n\n", "file_path": "clashctl-tui/src/error.rs", "rank": 48, "score": 66302.22078005127 }, { "content": "use tui::{\n\n style::Style,\n\n text::{Span, Spans},\n\n widgets::Widget,\n\n};\n\n\n\nuse crate::{\n\n clashctl::model::Log,\n\n components::{MovableList, MovableListItem},\n\n define_widget, AsColor,\n\n};\n\n\n\nimpl<'a> MovableListItem<'a> for Log {\n\n fn to_spans(&self) -> Spans<'a> {\n\n let color = self.log_type.clone().as_color();\n\n Spans::from(vec![\n\n Span::styled(\n\n format!(\"{:<5}\", self.log_type.to_string().to_uppercase()),\n\n Style::default().fg(color),\n\n ),\n", "file_path": "clashctl-tui/src/pages/log.rs", "rank": 57, "score": 64048.39510158383 }, { "content": " Span::raw(\" \"),\n\n Span::raw(self.payload.to_owned()),\n\n ])\n\n }\n\n}\n\n\n\ndefine_widget!(LogPage);\n\n\n\n// TODO Pretty print parsed Log\n\nimpl<'a> Widget for LogPage<'a> {\n\n fn render(self, area: tui::layout::Rect, buf: &mut tui::buffer::Buffer) {\n\n let list = MovableList::new(\"Logs\", &self.state.log_state);\n\n list.render(area, buf);\n\n }\n\n}\n", "file_path": "clashctl-tui/src/pages/log.rs", "rank": 58, "score": 64036.533804774605 }, { "content": "use bytesize::ByteSize;\n\nuse tui::widgets::Widget;\n\nuse tui::{\n\n layout::{Constraint, Layout},\n\n symbols::bar::Set,\n\n};\n\nuse tui::{\n\n style::{Color, Style},\n\n text::Span,\n\n};\n\n\n\nuse crate::{\n\n components::{Footer, FooterItem, FooterWidget, Sparkline},\n\n define_widget,\n\n utils::get_block,\n\n};\n\n\n\npub const DOTS: Set = Set {\n\n empty: \" \",\n\n one_eighth: \"⡀\",\n", "file_path": "clashctl-tui/src/components/traffic.rs", "rank": 59, "score": 63996.531355510204 }, { "content": "\n\npub const HALF: Constraint = Constraint::Percentage(50);\n\n\n\ndefine_widget!(Traffics);\n\n\n\nimpl<'a> Widget for Traffics<'a> {\n\n fn render(self, area: tui::layout::Rect, buf: &mut tui::buffer::Buffer) {\n\n let traffic_size = area.width - 2;\n\n\n\n let traffics = self.state.traffics.iter().rev().take(traffic_size.into());\n\n\n\n let (up, down): (Vec<_>, Vec<_>) = traffics.map(|x| (x.up, x.down)).unzip();\n\n\n\n let (up_max, down_max) = (\n\n *up.iter().max().unwrap_or(&100),\n\n *down.iter().max().unwrap_or(&100),\n\n );\n\n\n\n let title = format!(\"▲ Max = {}/s\", ByteSize(up_max).to_string_as(true));\n\n\n", "file_path": "clashctl-tui/src/components/traffic.rs", "rank": 60, "score": 63987.34681960626 }, { "content": " let up_line = Sparkline::default()\n\n .data(&up)\n\n .max(up_max)\n\n .bar_set(DOTS)\n\n .style(Style::default().fg(Color::Green));\n\n\n\n let down_line = Sparkline::default()\n\n .data(&down)\n\n .max(down_max)\n\n .bar_set(REV_DOTS)\n\n .style(Style::default().fg(Color::White))\n\n .reversed(true);\n\n\n\n let block = get_block(&title);\n\n\n\n let inner = block.inner(area);\n\n\n\n let layout = Layout::default()\n\n .direction(tui::layout::Direction::Vertical)\n\n .constraints([HALF, HALF])\n", "file_path": "clashctl-tui/src/components/traffic.rs", "rank": 61, "score": 63983.7957023801 }, { "content": " .split(inner);\n\n\n\n block.render(area, buf);\n\n up_line.render(layout[0], buf);\n\n down_line.render(layout[1], buf);\n\n\n\n let mut footer = Footer::default();\n\n footer\n\n .push_left(FooterItem::span(Span::raw(format!(\n\n \" ▼ Max = {}/s \",\n\n ByteSize(down_max).to_string_as(true)\n\n ))))\n\n .left_offset(1);\n\n let footer_widget = FooterWidget::new(&footer);\n\n footer_widget.render(area, buf);\n\n }\n\n}\n", "file_path": "clashctl-tui/src/components/traffic.rs", "rank": 62, "score": 63980.81920448305 }, { "content": " one_quarter: \"⣀\",\n\n three_eighths: \"⣄\",\n\n half: \"⣤\",\n\n five_eighths: \"⣦\",\n\n three_quarters: \"⣶\",\n\n seven_eighths: \"⣷\",\n\n full: \"⣿\",\n\n};\n\n\n\npub const REV_DOTS: Set = Set {\n\n empty: \" \",\n\n one_eighth: \"⠁\",\n\n one_quarter: \"⠉\",\n\n three_eighths: \"⠋\",\n\n half: \"⠛\",\n\n five_eighths: \"⠟\",\n\n three_quarters: \"⠿\",\n\n seven_eighths: \"⡿\",\n\n full: \"⣿\",\n\n};\n", "file_path": "clashctl-tui/src/components/traffic.rs", "rank": 63, "score": 63980.1099355982 }, { "content": "use clashctl_interactive::RuleSort;\n\nuse tui::{\n\n style::{Color, Modifier, Style},\n\n text::{Span, Spans},\n\n widgets::Widget,\n\n};\n\n\n\nuse crate::{\n\n components::{MovableList, MovableListItem, MovableListState},\n\n define_widget,\n\n model::{Rule, RuleType, Rules},\n\n AsColor,\n\n};\n\n\n\ndefine_widget!(RulePage);\n\n\n\nimpl<'a> Widget for RulePage<'a> {\n\n fn render(self, area: tui::layout::Rect, buf: &mut tui::buffer::Buffer) {\n\n MovableList::new(\"Rules\", &self.state.rule_state).render(area, buf);\n\n }\n", "file_path": "clashctl-tui/src/pages/rule.rs", "rank": 64, "score": 63854.21762782172 }, { "content": "}\n\n\n\nimpl<'a> From<Rules> for MovableListState<'a, Rule, RuleSort> {\n\n fn from(val: Rules) -> Self {\n\n Self::new_with_sort(val.rules, RuleSort::default())\n\n }\n\n}\n\n\n\nimpl<'a> MovableListItem<'a> for Rule {\n\n fn to_spans(&self) -> Spans<'a> {\n\n let type_color = self.rule_type.as_color();\n\n let name_color = if self.proxy == \"DIRECT\" || self.proxy == \"REJECT\" {\n\n Color::DarkGray\n\n } else {\n\n Color::Yellow\n\n };\n\n let gray = Style::default().fg(Color::DarkGray);\n\n let r_type: &'static str = self.rule_type.into();\n\n let payload = if self.payload.is_empty() {\n\n \"*\"\n", "file_path": "clashctl-tui/src/pages/rule.rs", "rank": 65, "score": 63850.61241256287 }, { "content": " } else {\n\n &self.payload\n\n }\n\n .to_owned();\n\n let dash: String = \"─\".repeat(35_usize.saturating_sub(payload.len()) + 2) + \" \";\n\n vec![\n\n Span::styled(format!(\"{:16}\", r_type), Style::default().fg(type_color)),\n\n Span::styled(payload + \" \", Style::default().add_modifier(Modifier::BOLD)),\n\n Span::styled(dash, gray),\n\n Span::styled(self.proxy.to_owned(), Style::default().fg(name_color)),\n\n ]\n\n .into()\n\n }\n\n}\n", "file_path": "clashctl-tui/src/pages/rule.rs", "rank": 66, "score": 63845.8311221783 }, { "content": "}\n\n\n\nimpl AsColor for RuleType {\n\n fn as_color(&self) -> tui::style::Color {\n\n match self {\n\n RuleType::Domain => Color::Green,\n\n RuleType::DomainSuffix => Color::Green,\n\n RuleType::DomainKeyword => Color::Green,\n\n RuleType::GeoIP => Color::Yellow,\n\n RuleType::IPCIDR => Color::Yellow,\n\n RuleType::SrcIPCIDR => Color::Yellow,\n\n RuleType::SrcPort => Color::Yellow,\n\n RuleType::DstPort => Color::Yellow,\n\n RuleType::Process => Color::Yellow,\n\n RuleType::Match => Color::Blue,\n\n RuleType::Direct => Color::Blue,\n\n RuleType::Reject => Color::Red,\n\n RuleType::Unknown => Color::DarkGray,\n\n }\n\n }\n", "file_path": "clashctl-tui/src/pages/rule.rs", "rank": 67, "score": 63845.537366524804 }, { "content": "use tui::widgets::Widget;\n\n\n\nuse crate::{components::ProxyTreeWidget, define_widget};\n\n\n\ndefine_widget!(ProxyPage);\n\n\n\nimpl<'a> Widget for ProxyPage<'a> {\n\n fn render(self, area: tui::layout::Rect, buf: &mut tui::buffer::Buffer) {\n\n ProxyTreeWidget::new(&self.state.proxy_tree).render(area, buf);\n\n }\n\n}\n", "file_path": "clashctl-tui/src/pages/proxy.rs", "rank": 68, "score": 62757.527534241985 }, { "content": "pub trait MovableListItemExt<'a>: MovableListItem<'a> {\n\n fn width(&self) -> usize {\n\n self.to_spans().width()\n\n }\n\n}\n\n\n\nimpl<'a, T> MovableListItemExt<'a> for T where T: MovableListItem<'a> {}\n", "file_path": "clashctl-tui/src/components/movable_list/item.rs", "rank": 69, "score": 62322.1354847809 }, { "content": "use std::{fmt::Debug, marker::PhantomData};\n\n\n\nuse tui::{\n\n style::{Color, Modifier, Style},\n\n text::{Span, Spans},\n\n};\n\n\n\nuse crate::{\n\n components::{Consts, ProxyItem},\n\n model::ProxyType,\n\n utils::get_text_style,\n\n};\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct ProxyGroup<'a> {\n\n pub(super) name: String,\n\n pub(super) proxy_type: ProxyType,\n\n pub(super) members: Vec<ProxyItem>,\n\n pub(super) current: Option<usize>,\n\n pub(super) cursor: usize,\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 70, "score": 60685.605628964804 }, { "content": "use std::{cmp::Ordering, collections::HashMap};\n\nuse std::{fmt::Debug, marker::PhantomData};\n\n\n\nuse clashctl_interactive::{EndlessSelf, ProxySort, Sortable};\n\nuse crossterm::event::KeyCode;\n\nuse tui::{\n\n style::{Color, Modifier, Style},\n\n text::Span,\n\n};\n\n\n\nuse crate::{\n\n components::{Footer, FooterItem, MovableListManage, ProxyGroup, ProxyItem},\n\n help_footer,\n\n model::Proxies,\n\n tagged_footer, Action, Coord, ListEvent, Wrap,\n\n};\n\n\n\n// TODO Proxy tree furthur functions\n\n//\n\n// - [X] Right & Enter can be used to apply selection\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 71, "score": 60680.03775818067 }, { "content": "use std::fmt::Debug;\n\n\n\nuse crate::clashctl::model::{History, Proxy, ProxyType};\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct ProxyItem {\n\n pub(super) name: String,\n\n pub(super) proxy_type: ProxyType,\n\n pub(super) history: Option<History>,\n\n pub(super) udp: Option<bool>,\n\n pub(super) now: Option<String>,\n\n}\n\n\n\nimpl<'a> From<(&'a str, &'a Proxy)> for ProxyItem {\n\n fn from(val: (&'a str, &'a Proxy)) -> Self {\n\n let (name, proxy) = val;\n\n Self {\n\n name: name.to_owned(),\n\n proxy_type: proxy.proxy_type,\n\n history: proxy.history.get(0).cloned(),\n", "file_path": "clashctl-tui/src/components/proxy/item.rs", "rank": 72, "score": 60680.00391237115 }, { "content": " match x.history {\n\n Some(ref history) => Self::get_delay_span(history.delay),\n\n None => Consts::NO_LATENCY_SPAN,\n\n }\n\n } else {\n\n Consts::NOT_PROXY_SPAN\n\n }\n\n })\n\n }\n\n\n\n pub fn get_widget(&'a self, width: usize, status: ProxyGroupFocusStatus) -> Vec<Spans<'a>> {\n\n let delimiter = Span::raw(\" \");\n\n let prefix = if matches!(status, ProxyGroupFocusStatus::Focused) {\n\n Consts::FOCUSED_INDICATOR_SPAN\n\n } else {\n\n Consts::UNFOCUSED_INDICATOR_SPAN\n\n };\n\n let name = Span::styled(\n\n &self.name,\n\n Style::default()\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 73, "score": 60677.2475728046 }, { "content": "use std::cmp::Ordering;\n\n\n\nuse clashctl_interactive::{ProxySort, ProxySortBy, SortMethod, SortOrder, Sortable};\n\n\n\nuse crate::components::{ProxyGroup, ProxyItem, ProxyTree};\n\n\n\nimpl SortMethod<ProxyItem> for ProxySort {\n\n fn sort_fn(&self, a: &ProxyItem, b: &ProxyItem) -> std::cmp::Ordering {\n\n let cmp = match self.by() {\n\n ProxySortBy::Name => a.name.cmp(&b.name),\n\n ProxySortBy::Type => a.proxy_type.cmp(&b.proxy_type),\n\n ProxySortBy::Delay => {\n\n use Ordering::{Equal as Eq, Greater as Gt, Less as Lt};\n\n match (a.delay(), b.delay()) {\n\n (None, Some(_)) => Gt,\n\n (Some(_), None) => Lt,\n\n (Some(aa), Some(bb)) => {\n\n if aa == 0 {\n\n Gt\n\n } else if bb == 0 {\n", "file_path": "clashctl-tui/src/components/proxy/sort.rs", "rank": 74, "score": 60674.79492152234 }, { "content": " .as_ref()\n\n .map(|x| {\n\n if x.delay > 0 {\n\n let style = Self::get_delay_style(x.delay);\n\n Span::styled(x.delay.to_string(), style)\n\n } else {\n\n Span::styled(Consts::NO_LATENCY_SIGN, Consts::NO_LATENCY_STYLE)\n\n }\n\n })\n\n .unwrap_or_else(|| {\n\n if !x.proxy_type.is_normal() {\n\n Span::raw(\"\")\n\n } else {\n\n Span::styled(Consts::NO_LATENCY_SIGN, Consts::NO_LATENCY_STYLE)\n\n }\n\n });\n\n vec![\n\n prefix,\n\n Consts::DELIMITER_SPAN.clone(),\n\n name,\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 75, "score": 60674.7784402426 }, { "content": "// - [X] Esc for exist expand mode\n\n// - [X] T for test latency of current group\n\n// - [X] S for switch between sorting strategies\n\n// - [ ] / for searching\n\n//\n\n// In order for functions to be implemented, these are required:\n\n// - Remove Enter from InterfaceEvent::ToggleHold\n\n// - Maybe a new InterfaceEvent::Confirm correstponds to Enter\n\n// - `T`, `S`, `/` in proxy event handling\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ProxyTree<'a> {\n\n pub(super) groups: Vec<ProxyGroup<'a>>,\n\n pub(super) expanded: bool,\n\n pub(super) cursor: usize,\n\n pub(super) testing: bool,\n\n pub(super) footer: Footer<'a>,\n\n sort_method: ProxySort,\n\n}\n\n\n\nimpl<'a> Default for ProxyTree<'a> {\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 76, "score": 60673.166668788115 }, { "content": " .fg(Color::White)\n\n .add_modifier(Modifier::BOLD),\n\n );\n\n\n\n let proxy_type = Span::styled(self.proxy_type.to_string(), Consts::PROXY_TYPE_STYLE);\n\n\n\n let count = self.members.len();\n\n let proxy_count = Span::styled(\n\n if matches!(status, ProxyGroupFocusStatus::Expanded) {\n\n format!(\"{}/{}\", self.cursor + 1, count)\n\n } else {\n\n count.to_string()\n\n },\n\n Style::default().fg(Color::Green),\n\n );\n\n\n\n let mut ret = Vec::with_capacity(if matches!(status, ProxyGroupFocusStatus::Expanded) {\n\n self.members.len() + 1\n\n } else {\n\n 2\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 77, "score": 60672.94772405861 }, { "content": " });\n\n\n\n ret.push(Spans::from(vec![\n\n prefix.clone(),\n\n name,\n\n delimiter.clone(),\n\n proxy_type,\n\n delimiter,\n\n proxy_count,\n\n ]));\n\n\n\n if matches!(status, ProxyGroupFocusStatus::Expanded) {\n\n let skipped = self.cursor.saturating_sub(4);\n\n let text_style = get_text_style();\n\n let is_current =\n\n |index: usize| self.current.map(|x| x == index + skipped).unwrap_or(false);\n\n let is_pointed = |index: usize| self.cursor == index + skipped;\n\n\n\n let lines = self.members.iter().skip(skipped).enumerate().map(|(i, x)| {\n\n let prefix = if self.cursor == i + skipped {\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 78, "score": 60672.61688092361 }, { "content": " }\n\n\n\n fn get_delay_span(delay: u64) -> Span<'static> {\n\n match delay {\n\n 0 => Consts::NO_LATENCY_SPAN,\n\n 1..=200 => Consts::LOW_LATENCY_SPAN,\n\n 201..=400 => Consts::MID_LATENCY_SPAN,\n\n 401.. => Consts::HIGH_LATENCY_SPAN,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Default for ProxyGroup<'a> {\n\n fn default() -> Self {\n\n Self {\n\n members: vec![],\n\n current: None,\n\n proxy_type: ProxyType::Selector,\n\n name: String::new(),\n\n cursor: 0,\n\n _life: PhantomData,\n\n }\n\n }\n\n}\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 79, "score": 60672.146593719575 }, { "content": " footer.push_left(FooterItem::span(Span::styled(\" ▶ Select \", style)));\n\n }\n\n\n\n footer.push_left(if self.testing {\n\n FooterItem::span(Span::styled(\" Testing \", highlight.fg(Color::Blue)))\n\n } else {\n\n FooterItem::spans(help_footer(\"Test\", style, highlight)).wrapped()\n\n });\n\n\n\n footer.push_left(tagged_footer(\"Sort\", style, self.sort_method).into());\n\n\n\n if let Some(ref now) = current_group.members[current_group.cursor].now {\n\n footer.push_right(FooterItem::span(Span::raw(now.to_owned())).wrapped());\n\n }\n\n }\n\n self.footer = footer;\n\n self\n\n }\n\n\n\n pub fn replace_with(&mut self, mut new_tree: ProxyTree<'a>) -> &mut Self {\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 80, "score": 60672.14386966679 }, { "content": " Consts::DELIMITER_SPAN.clone(),\n\n proxy_type,\n\n Consts::DELIMITER_SPAN.clone(),\n\n delay_span,\n\n ]\n\n .into()\n\n });\n\n ret.extend(lines);\n\n } else {\n\n ret.extend(\n\n self.get_summary_widget()\n\n .collect::<Vec<_>>()\n\n .chunks(\n\n width\n\n .saturating_sub(Consts::FOCUSED_INDICATOR_SPAN.width() + 2)\n\n .saturating_div(2),\n\n )\n\n .map(|x| {\n\n std::iter::once(if matches!(status, ProxyGroupFocusStatus::Focused) {\n\n Consts::FOCUSED_INDICATOR_SPAN\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 81, "score": 60671.602030737326 }, { "content": " Consts::EXPANDED_FOCUSED_INDICATOR_SPAN\n\n } else {\n\n Consts::EXPANDED_INDICATOR_SPAN\n\n };\n\n let name = Span::styled(\n\n &x.name,\n\n if is_current(i) {\n\n Style::default()\n\n .fg(Color::Blue)\n\n .add_modifier(Modifier::BOLD)\n\n } else if is_pointed(i) {\n\n text_style.fg(Color::LightBlue)\n\n } else {\n\n text_style\n\n },\n\n );\n\n let proxy_type = Span::styled(x.proxy_type.to_string(), Consts::PROXY_TYPE_STYLE);\n\n\n\n let delay_span = x\n\n .history\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 82, "score": 60671.015523792776 }, { "content": " pub(super) _life: PhantomData<&'a ()>,\n\n}\n\n\n\npub enum ProxyGroupFocusStatus {\n\n None,\n\n Focused,\n\n Expanded,\n\n}\n\n\n\nimpl<'a> ProxyGroup<'a> {\n\n pub fn proxy_type(&self) -> ProxyType {\n\n self.proxy_type\n\n }\n\n\n\n pub fn members(&self) -> &Vec<ProxyItem> {\n\n &self.members\n\n }\n\n pub fn get_summary_widget(&self) -> impl Iterator<Item = Span> {\n\n self.members.iter().map(|x| {\n\n if x.proxy_type.is_normal() {\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 83, "score": 60669.5014923288 }, { "content": " if !self.expanded {\n\n let group_name = current_group.name.clone();\n\n let style = Style::default()\n\n .fg(Color::Blue)\n\n .add_modifier(Modifier::REVERSED);\n\n\n\n let highlight = style.add_modifier(Modifier::BOLD);\n\n let sort = tagged_footer(\"Sort\", style, self.sort_method);\n\n\n\n let mut left = vec![\n\n FooterItem::span(Span::styled(\" FREE \", style)),\n\n FooterItem::span(Span::styled(\" SPACE to expand \", style)),\n\n if self.testing {\n\n FooterItem::span(Span::styled(\" Testing \", highlight.fg(Color::Green)))\n\n } else {\n\n FooterItem::spans(help_footer(\"Test\", style, highlight)).wrapped()\n\n },\n\n FooterItem::spans(sort),\n\n ];\n\n\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 84, "score": 60669.42451856319 }, { "content": " footer.append_left(&mut left);\n\n\n\n let name = FooterItem::span(Span::styled(group_name, style)).wrapped();\n\n footer.push_right(name);\n\n\n\n if let Some(now) = current_group.current {\n\n footer.push_right(\n\n FooterItem::span(Span::raw(current_group.members[now].name.to_owned()))\n\n .wrapped(),\n\n );\n\n }\n\n } else {\n\n let style = Style::default()\n\n .fg(Color::Green)\n\n .add_modifier(Modifier::REVERSED);\n\n let highlight = style.add_modifier(Modifier::BOLD);\n\n\n\n footer.push_left(FooterItem::span(Span::styled(\" [^] ▲ ▼ Move \", style)));\n\n\n\n if current_group.proxy_type.is_selector() {\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 85, "score": 60668.87742166713 }, { "content": " #[inline]\n\n pub fn current_group(&self) -> &ProxyGroup {\n\n &self.groups[self.cursor]\n\n }\n\n\n\n #[inline]\n\n pub fn is_testing(&self) -> bool {\n\n self.testing\n\n }\n\n\n\n #[inline]\n\n pub fn start_testing(&mut self) -> &mut Self {\n\n self.testing = true;\n\n self.update_footer()\n\n }\n\n\n\n #[inline]\n\n pub fn end_testing(&mut self) -> &mut Self {\n\n self.testing = false;\n\n self.update_footer()\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 86, "score": 60668.56625092883 }, { "content": " } else {\n\n Consts::UNFOCUSED_INDICATOR_SPAN\n\n })\n\n .chain(x.to_owned().into_iter())\n\n .collect::<Vec<_>>()\n\n .into()\n\n }),\n\n )\n\n }\n\n\n\n ret\n\n }\n\n\n\n fn get_delay_style(delay: u64) -> Style {\n\n match delay {\n\n 0 => Consts::NO_LATENCY_STYLE,\n\n 1..=200 => Consts::LOW_LATENCY_STYLE,\n\n 201..=400 => Consts::MID_LATENCY_STYLE,\n\n 401.. => Consts::HIGH_LATENCY_STYLE,\n\n }\n", "file_path": "clashctl-tui/src/components/proxy/group.rs", "rank": 87, "score": 60668.510306116594 }, { "content": " Lt\n\n } else {\n\n aa.cmp(&bb)\n\n }\n\n }\n\n (None, None) => Eq,\n\n }\n\n }\n\n };\n\n if matches!(self.order(), SortOrder::Descendant) {\n\n cmp.reverse()\n\n } else {\n\n cmp\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Sortable<'a, ProxySort> for ProxyGroup<'a> {\n\n type Item<'b> = ProxyItem;\n\n fn sort_with(&mut self, method: &ProxySort) {\n", "file_path": "clashctl-tui/src/components/proxy/sort.rs", "rank": 88, "score": 60668.343792526786 }, { "content": " let pointed = &self.members[self.cursor].name.clone();\n\n let current = self.current.map(|x| self.members[x].name.clone());\n\n self.members.sort_by(|a, b| method.sort_fn(a, b));\n\n for (i, ProxyItem { name, .. }) in self.members.iter().enumerate() {\n\n if name == pointed {\n\n self.cursor = i;\n\n }\n\n if let Some(ref x) = current {\n\n if name == x {\n\n self.current = Some(i)\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Sortable<'a, ProxySort> for ProxyTree<'a> {\n\n type Item<'b> = ProxyItem;\n\n fn sort_with(&mut self, method: &ProxySort) {\n\n self.groups.iter_mut().for_each(|x| x.sort_with(method))\n\n }\n\n}\n", "file_path": "clashctl-tui/src/components/proxy/sort.rs", "rank": 89, "score": 60668.213107879914 }, { "content": " }\n\n\n\n pub fn sort_groups_with_frequency(&mut self, freq: &HashMap<String, usize>) -> &mut Self {\n\n self.groups\n\n .sort_by(|a, b| match (freq.get(&a.name), freq.get(&b.name)) {\n\n (Some(a_freq), Some(b_freq)) => b_freq.cmp(a_freq),\n\n (Some(_), None) => Ordering::Less,\n\n (None, Some(_)) => Ordering::Greater,\n\n (None, None) => a.name.cmp(&b.name),\n\n });\n\n self\n\n }\n\n\n\n pub fn update_footer(&mut self) -> &mut Self {\n\n let mut footer = Footer::default();\n\n let current_group = match self.groups.get(self.cursor) {\n\n Some(grp) => grp,\n\n _ => return self,\n\n };\n\n\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 90, "score": 60667.44523608321 }, { "content": "\n\n fn hold(&mut self) -> &mut Self {\n\n self.expanded = true;\n\n self\n\n }\n\n\n\n fn handle(&mut self, event: ListEvent) -> Option<Action> {\n\n if self.expanded {\n\n let step = if event.fast { 3 } else { 1 };\n\n let group = &mut self.groups[self.cursor];\n\n match event.code {\n\n KeyCode::Up => {\n\n if group.cursor > 0 {\n\n group.cursor = group.cursor.saturating_sub(step)\n\n }\n\n }\n\n KeyCode::Down => {\n\n let left = group.members.len().saturating_sub(group.cursor + 1);\n\n\n\n group.cursor += left.min(step)\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 91, "score": 60666.93105252437 }, { "content": " }\n\n KeyCode::Right | KeyCode::Enter => {\n\n if group.proxy_type.is_selector() {\n\n let current = group.members[group.cursor].name.to_owned();\n\n return Some(Action::ApplySelection {\n\n group: group.name.to_owned(),\n\n proxy: current,\n\n });\n\n }\n\n }\n\n _ => {}\n\n }\n\n } else {\n\n match event.code {\n\n KeyCode::Up => {\n\n if self.cursor > 0 {\n\n self.cursor = self.cursor.saturating_sub(1)\n\n }\n\n }\n\n KeyCode::Down => {\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 92, "score": 60666.850932215675 }, { "content": " udp: proxy.udp,\n\n now: proxy.now.as_ref().map(Into::into),\n\n }\n\n }\n\n}\n\n\n\nimpl ProxyItem {\n\n pub fn proxy_type(&self) -> ProxyType {\n\n self.proxy_type\n\n }\n\n\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n pub fn delay(&self) -> Option<u64> {\n\n self.history.as_ref().map(|x| x.delay)\n\n }\n\n}\n", "file_path": "clashctl-tui/src/components/proxy/item.rs", "rank": 93, "score": 60666.84293345842 }, { "content": " fn default() -> Self {\n\n let mut ret = Self {\n\n groups: Default::default(),\n\n expanded: Default::default(),\n\n cursor: Default::default(),\n\n footer: Default::default(),\n\n testing: Default::default(),\n\n sort_method: Default::default(),\n\n };\n\n ret.update_footer();\n\n ret\n\n }\n\n}\n\n\n\nimpl<'a> ProxyTree<'a> {\n\n #[inline]\n\n pub fn cursor(&self) -> usize {\n\n self.cursor\n\n }\n\n\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 94, "score": 60666.78385461396 }, { "content": "use crate::mod_use;\n\n\n\nmod_use!(group, item, sort, tree, tree_widget);\n", "file_path": "clashctl-tui/src/components/proxy/mod.rs", "rank": 95, "score": 60666.3878775719 }, { "content": " }\n\n self.groups = new_tree.groups;\n\n let method = self.sort_method;\n\n self.sort_with(&method);\n\n self.update_footer()\n\n }\n\n}\n\n\n\nimpl<'a> From<Proxies> for ProxyTree<'a> {\n\n fn from(val: Proxies) -> Self {\n\n let mut ret = Self {\n\n groups: Vec::with_capacity(val.len()),\n\n ..Default::default()\n\n };\n\n for (name, group) in val.groups() {\n\n let all = group\n\n .all\n\n .as_ref()\n\n .expect(\"ProxyGroup should have member vec\");\n\n let mut members = Vec::with_capacity(all.len());\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 96, "score": 60665.58492543151 }, { "content": " ret.groups.push(ProxyGroup {\n\n _life: PhantomData,\n\n name: name.to_owned(),\n\n proxy_type: group.proxy_type,\n\n cursor: current.unwrap_or_default(),\n\n current,\n\n members,\n\n })\n\n }\n\n\n\n ret\n\n }\n\n}\n\n\n\nimpl<'a> MovableListManage for ProxyTree<'a> {\n\n fn sort(&mut self) -> &mut Self {\n\n let method = self.sort_method;\n\n self.sort_with(&method);\n\n self\n\n }\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 97, "score": 60664.90768101291 }, { "content": " for x in all.iter() {\n\n let member = (\n\n x.as_str(),\n\n val.get(x)\n\n .to_owned()\n\n .expect(\"Group member should be in all proxies\"),\n\n )\n\n .into();\n\n members.push(member);\n\n }\n\n\n\n // if group.now.is_some then it must be in all proxies\n\n // So use map & expect instead of Option#and_then\n\n let current = group.now.as_ref().map(|name| {\n\n members\n\n .iter()\n\n .position(|item: &ProxyItem| &item.name == name)\n\n .expect(\"Group member should be in all proxies\")\n\n });\n\n\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 98, "score": 60664.79821760466 }, { "content": "\n\n fn next_sort(&mut self) -> &mut Self {\n\n self.sort_method.next_self();\n\n let method = self.sort_method;\n\n self.sort_with(&method);\n\n self.update_footer()\n\n }\n\n\n\n fn prev_sort(&mut self) -> &mut Self {\n\n self.sort_method.prev_self();\n\n let method = self.sort_method;\n\n self.sort_with(&method);\n\n self.update_footer()\n\n }\n\n\n\n fn current_pos(&self) -> Coord {\n\n Default::default()\n\n }\n\n\n\n #[inline]\n", "file_path": "clashctl-tui/src/components/proxy/tree.rs", "rank": 99, "score": 60662.399302166436 } ]
Rust
src/widget_themes/classic.rs
fltk-rs/fltk-theme
8e792be048cb6c868c66f526bffd7782f52f54ee
use super::*; use fltk::{app, enums::Color, misc::Tooltip}; fn classic_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("AAWWMMRR", x, y, w, h); } fn classic_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 2, y + 2, w - 4, h - 4); classic_button_up_frame(x, y, w, h, c); } fn classic_check_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("WWMMPPAA", x, y, w, h); } fn classic_check_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 2, y + 2, w - 4, h - 4); classic_check_down_frame(x, y, w, h, c); } fn classic_panel_thin_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("MMWW", x, y, w, h); } fn classic_panel_thin_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 1, y + 1, w - 2, h - 2); classic_panel_thin_up_frame(x, y, w, h, c); } fn classic_spacer_thin_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("WWMM", x, y, w, h); } fn classic_spacer_thin_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 1, y + 1, w - 2, h - 2); classic_spacer_thin_down_frame(x, y, w, h, c); } fn classic_default_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("AAAAGGWWMMRR", x, y, w, h); } fn classic_default_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 3, y + 3, w - 6, h - 6); classic_default_button_up_frame(x, y, w, h, c); } fn classic_radio_round_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(Color::gray_ramp('M' as i32 - 'A' as i32))); draw_arc(x, y, w, h, 45.0, 225.0); set_draw_color(activated_color(Color::gray_ramp('W' as i32 - 'A' as i32))); draw_arc(x, y, w, h, -135.0, 45.0); set_draw_color(activated_color(Color::gray_ramp(0))); draw_arc(x + 1, y + 1, w - 2, h - 2, 45.0, 225.0); set_draw_color(activated_color(Color::gray_ramp('T' as i32 - 'A' as i32))); draw_arc(x + 1, y + 1, w - 2, h - 2, -135.0, 45.0); } fn classic_radio_round_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(Color::gray_ramp('W' as i32 - 'A' as i32))); draw_pie(x + 2, y + 2, w - 4, h - 4, 0.0, 360.0); classic_radio_round_down_frame(x, y, w, h, c); } fn use_classic_scheme() { app::set_scheme(app::Scheme::Base); app::set_frame_type_cb(OS_BUTTON_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_CHECK_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_BUTTON_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb(OS_CHECK_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4); app::set_frame_type_cb(OS_PANEL_THIN_UP_BOX, classic_panel_thin_up_box, 1, 1, 2, 2); app::set_frame_type_cb( OS_SPACER_THIN_DOWN_BOX, classic_spacer_thin_down_box, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_PANEL_THIN_UP_FRAME, classic_panel_thin_up_frame, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_SPACER_THIN_DOWN_FRAME, classic_spacer_thin_down_frame, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_RADIO_ROUND_DOWN_BOX, classic_radio_round_down_box, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_HOVERED_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_HOVERED_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb( OS_DEPRESSED_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_INPUT_THIN_DOWN_BOX, classic_check_down_box, 2, 3, 4, 6); app::set_frame_type_cb( OS_INPUT_THIN_DOWN_FRAME, classic_check_down_frame, 2, 3, 4, 6, ); app::set_frame_type_cb( OS_DEFAULT_BUTTON_UP_BOX, classic_default_button_up_box, 3, 3, 6, 6, ); app::set_frame_type_cb( OS_DEFAULT_HOVERED_UP_BOX, classic_default_button_up_box, 3, 3, 6, 6, ); app::set_frame_type_cb( OS_DEFAULT_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4, ); app::set_frame_type2(OS_TOOLBAR_BUTTON_HOVER_BOX, FrameType::FlatBox); app::set_frame_type_cb(OS_TABS_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_SWATCH_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_MINI_BUTTON_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb( OS_MINI_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_MINI_BUTTON_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb( OS_MINI_DEPRESSED_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4, ); app::set_frame_type_cb(FrameType::UpBox, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(FrameType::DownBox, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb( FrameType::RoundDownBox, classic_radio_round_down_box, 2, 2, 4, 4, ); app::set_frame_type2(OS_BG_BOX, FrameType::FlatBox); } fn use_classic_colors() { app::background(0xD4, 0xD0, 0xC8); app::background2(0xFF, 0xFF, 0xFF); app::foreground(0x00, 0x00, 0x00); app::set_color(Color::Inactive, 0x5F, 0x5F, 0x5F); app::set_color(Color::Selection, 0x0A, 0x24, 0x6A); app::set_color(Color::Free, 0xD4, 0xD0, 0xC8); Tooltip::set_color(Color::from_rgb(0xFF, 0xFF, 0xE1)); Tooltip::set_text_color(Color::ForeGround); } pub(crate) fn use_classic_theme() { use_classic_scheme(); use_classic_colors(); use_native_settings(); }
use super::*; use fltk::{app, enums::Color, misc::Tooltip}; fn classic_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("AAWWMMRR", x, y, w, h); } fn classic_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 2, y + 2, w - 4, h - 4); classic_button_up_frame(x, y, w, h, c); } fn classic_check_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("WWMMPPAA", x, y, w, h); } fn classic_check_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 2, y + 2, w - 4, h - 4); classic_check_down_frame(x, y, w, h, c); } fn classic_panel_thin_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("MMWW", x, y, w, h); } fn classic_panel_thin_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 1, y + 1, w - 2, h - 2); classic_panel_thin_up_frame(x, y, w, h, c); } fn classic_spacer_thin_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("WWMM", x, y, w, h); } fn classic_spacer_thin_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 1, y + 1, w - 2, h - 2); classic_spacer_thin_down_frame(x, y, w, h, c); } fn classic_default_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("AAAAGGWWMMRR", x, y, w, h); } fn classic_default_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 3, y + 3, w - 6, h - 6); classic_default_button_up_frame(x, y, w, h, c); } fn classic_radio_round_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(Color::gray_ramp('M' as i32 - 'A' as i32))); draw_arc(x, y, w, h, 45.0, 225.0); set_draw_color(activated_color(Color::gray_ramp('W' as i32 - 'A' as i32))); draw_arc(x, y, w, h, -135.0, 45.0); set_draw_color(activated_color(Color::gray_ramp(0))); draw_arc(x + 1, y + 1, w - 2, h - 2, 45.0, 225.0); set_draw_color(activated_color(Color::gray_ramp('T' as i32 - 'A' as i32))); draw_arc(x + 1, y + 1, w - 2, h - 2, -135.0, 45.0); } fn classic_radio_round_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(Color::gray_ramp('W' as i32 - 'A' as i32))); draw_pie(x + 2, y + 2, w - 4, h - 4, 0.0, 360.0); classic_radio_round_down_frame(x, y, w, h, c); } fn use_classic_scheme() { app::set_scheme(app::Scheme::Base); app::set_frame_type_cb(OS_BUTTON_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_CHECK_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_BUTTON_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb(OS_CHECK_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4); app::set_frame_type_cb(OS_PANEL_THIN_UP_BOX, classic_panel_thin_up_box, 1, 1, 2, 2); app::set_frame_type_cb( OS_SPACER_THIN_DOWN_BOX, classic_spacer_thin_down_box, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_PANEL_THIN_UP_FRAME, classic_panel_thin_up_frame, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_SPACER_THIN_DOWN_FRAME, classic_spacer_thin_down_frame, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_RADIO_ROUND_DOWN_BOX, classic_radio_round_down_box, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_HOVERED_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_HOVERED_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb( OS_DEPRESSED_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_INPUT_THIN_DOWN_BOX, classic_check_down_box, 2, 3, 4, 6); app::set_frame_type_cb( OS_INPUT_THIN_DOWN_FRAME, classic_check_down_frame, 2, 3, 4, 6, ); app::set_frame_type_cb( OS_DEFAULT_BUTTON_UP_BOX, classic_default_button_up_box, 3, 3, 6, 6, ); app::set_frame_type_cb( OS_DEFAULT_HOVERED_UP_BOX, classic_default_button_up_box, 3, 3, 6, 6, ); app::set_frame_type_cb( OS_DEFAULT_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4, ); app::set_frame_type2(OS_TOOLBAR_BUTTON_HOVER_BOX, FrameType::FlatBox); app::set_frame_type_cb(OS_TABS_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_SWATCH_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_MINI_BUTTON_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb( OS_MINI_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_MINI_BUTTON_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb( OS_MINI_DEPRESSED_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4, ); app::set_frame_type_cb(FrameType::UpBox, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(FrameType::DownBox, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb( FrameType::RoundDownBox, classic_radio_round_down_box, 2, 2, 4, 4, ); app::set_frame_type2(OS_BG_BOX, FrameType::FlatBox); }
pub(crate) fn use_classic_theme() { use_classic_scheme(); use_classic_colors(); use_native_settings(); }
fn use_classic_colors() { app::background(0xD4, 0xD0, 0xC8); app::background2(0xFF, 0xFF, 0xFF); app::foreground(0x00, 0x00, 0x00); app::set_color(Color::Inactive, 0x5F, 0x5F, 0x5F); app::set_color(Color::Selection, 0x0A, 0x24, 0x6A); app::set_color(Color::Free, 0xD4, 0xD0, 0xC8); Tooltip::set_color(Color::from_rgb(0xFF, 0xFF, 0xE1)); Tooltip::set_text_color(Color::ForeGround); }
function_block-full_function
[ { "content": "fn up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rect(x, y, w, h, Color::color_average(Color::White, c, 0.2));\n\n}\n\n\n", "file_path": "src/widget_schemes/clean.rs", "rank": 6, "score": 231206.04478592548 }, { "content": "fn up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let col = c.to_rgb();\n\n let bg = Color::BackGround.to_rgb();\n\n let svg = format!(\n\n \"<svg width='{0}' height='{1}'>\n\n <defs>\n\n <linearGradient id='grad1' x1='0%' y1='0%' x2='0%' y2='100%'>\n\n <stop offset='0%' style='stop-color:rgb({2},{3},{4});stop-opacity:1' />\n\n <stop offset='100%' style='stop-color:rgb({5},{6},{7});stop-opacity:1' />\n\n </linearGradient>\n\n </defs>\n\n <rect width='{0}' height='{1}' rx='{8}' fill='url(#grad1)' />\n\n </svg>\",\n\n w,\n\n h,\n\n col.0,\n\n col.1,\n\n col.2,\n\n col.0 - 5,\n\n col.1 - 5,\n\n col.2 - 5,\n\n h / 4,\n\n );\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/aqua.rs", "rank": 7, "score": 231206.04478592548 }, { "content": "fn up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n up_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 8, "score": 231206.04478592548 }, { "content": "fn down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n shade_rect_down(x + 1, y, w - 2, h, c);\n\n down_frame(x, y, w, h, c.darker());\n\n //draw the inner rect.\n\n //frame_rect(x + 1, y + 1, w - 3, h - 3, Color::color_average(c, Color::Black, .65));\n\n}\n\n\n", "file_path": "src/widget_schemes/gleam.rs", "rank": 9, "score": 231206.04478592548 }, { "content": "fn up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rect(x, y, w, h, Color::color_average(Color::White, c, 0.2));\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 10, "score": 231206.04478592548 }, { "content": "fn up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n frame_rect_up(x, y, w - 1, h - 1, c.darker());\n\n}\n\n\n", "file_path": "src/widget_schemes/gleam.rs", "rank": 11, "score": 231206.04478592548 }, { "content": "fn up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n shade_rect_up(x + 1, y, w - 2, h - 1, c);\n\n frame_rect_up(x, y, w - 1, h - 1, c.darker());\n\n //draw the inner rect.\n\n frame_rect(\n\n x + 1,\n\n y + 1,\n\n w - 3,\n\n h - 3,\n\n Color::color_average(c, Color::White, 0.25),\n\n );\n\n}\n\n\n", "file_path": "src/widget_schemes/gleam.rs", "rank": 12, "score": 231206.04478592548 }, { "content": "fn down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n if w > 6 && h > 6 {\n\n draw_rect_fill(x + 2, y + 2, w - 4, h - 5, c.darker());\n\n down_frame(x, y, w, h, c);\n\n } else {\n\n narrow_thin_box(x, y, w, h, c);\n\n }\n\n}\n\n\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 13, "score": 231206.04478592548 }, { "content": "fn rect(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n draw_rect_fill(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_schemes/clean.rs", "rank": 14, "score": 231206.04478592548 }, { "content": "fn up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n frame_rect(x, y, w, h - 1, \"KLDIIJLM\", c);\n\n}\n\n\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 15, "score": 231206.04478592548 }, { "content": "fn down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n frame_rect(x, y, w, h - 1, \"LLLLTTRR\", c);\n\n}\n\n\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 16, "score": 231206.04478592548 }, { "content": "fn down_round(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n shade_round(x, y, w, h, \"STUVWWWVT\", c);\n\n frame_round(x, y, w, h, \"IJLM\", c);\n\n}\n\n\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 17, "score": 231206.04478592548 }, { "content": "fn down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let col = c.to_rgb();\n\n let svg = format!(\n\n \"<svg viewBox='0 0 {0} {1}'>\n\n <defs>\n\n <linearGradient id='grad1' x1='0%' y1='0%' x2='0%' y2='100%'>\n\n <stop offset='0%' style='stop-color:rgb({2},{3},{4});stop-opacity:1' />\n\n <stop offset='100%' style='stop-color:rgb({5},{6},{7});stop-opacity:1' />\n\n </linearGradient>\n\n </defs>\n\n <rect width='{0}' height='{1}' rx='{8}' fill='url(#grad1)' />\n\n </svg>\",\n\n w,\n\n h,\n\n col.0,\n\n col.1,\n\n col.2,\n\n col.0,\n\n col.1,\n\n col.2,\n\n h / 4\n\n );\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/aqua.rs", "rank": 18, "score": 231206.04478592548 }, { "content": "fn up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n if w > 8 && h > 8 {\n\n shade_rect(x + 1, y + 1, w - 2, h - 3, \"RVQNOPQRSTUVWVQ\", c);\n\n\n\n frame_rect(x, y, w, h - 1, \"IJLM\", c);\n\n } else {\n\n thin_up_box(x, y, w, h, c);\n\n }\n\n}\n\n\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 19, "score": 231206.04478592548 }, { "content": "fn down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rect(\n\n x - 2,\n\n y - 2,\n\n w + 4,\n\n h + 4,\n\n Color::color_average(Color::Black, Color::White, 0.3),\n\n );\n\n rectf(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 20, "score": 231206.04478592548 }, { "content": "fn down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rectf(x, y, w, h, c);\n\n rect(x, y, w, h, Color::color_average(Color::White, c, 0.2));\n\n}\n\n\n", "file_path": "src/widget_schemes/clean.rs", "rank": 21, "score": 231206.04478592548 }, { "content": "fn rectf(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/clean.rs", "rank": 22, "score": 231206.04478592548 }, { "content": "fn up_round(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n shade_round(x, y, w, h, \"RVQNOPQRSTUVWVQ\", c);\n\n frame_round(x, y, w, h, \"IJLM\", c);\n\n}\n\n\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 23, "score": 231206.04478592548 }, { "content": "fn down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 24, "score": 231206.04478592548 }, { "content": "fn up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rectf(x, y, w, h, c);\n\n rect(x, y, w, h, Color::color_average(Color::White, c, 0.2));\n\n}\n\n\n", "file_path": "src/widget_schemes/clean.rs", "rank": 25, "score": 231206.04478592548 }, { "content": "fn down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rect(x, y, w, h, Color::color_average(Color::Black, c, 0.2));\n\n}\n\n\n", "file_path": "src/widget_schemes/clean.rs", "rank": 26, "score": 231206.04478592548 }, { "content": "fn down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n frame_rect_down(x, y, w - 1, h - 1, c.darker());\n\n}\n\n\n", "file_path": "src/widget_schemes/gleam.rs", "rank": 27, "score": 231206.04478592548 }, { "content": "fn rect(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n draw_rect_fill(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 28, "score": 231206.04478592548 }, { "content": "fn rectf(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 29, "score": 231206.04478592548 }, { "content": "fn border_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rectf(x, y, w, h, c);\n\n rect(x, y, w, h, Color::color_average(Color::White, c, 0.2));\n\n}\n\n\n\npub(crate) fn use_clean_scheme() {\n\n use fltk::enums::FrameType::*;\n\n app::reload_scheme().ok();\n\n app::set_scheme(app::Scheme::Base);\n\n app::set_frame_type_cb(UpBox, up_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(DownBox, down_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(ThinUpBox, up_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(ThinDownBox, down_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(UpFrame, up_frame, 1, 1, 2, 2);\n\n app::set_frame_type_cb(DownFrame, down_frame, 1, 1, 2, 2);\n\n app::set_frame_type_cb(RoundUpBox, up_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(RoundDownBox, down_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(BorderBox, border_box, 1, 1, 2, 2);\n\n}\n", "file_path": "src/widget_schemes/clean.rs", "rank": 30, "score": 229248.10759259772 }, { "content": "fn border_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rectf(x, y, w, h, c);\n\n rect(x, y, w, h, Color::color_average(Color::White, c, 0.2));\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 31, "score": 229248.10759259772 }, { "content": "fn hover_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rect(\n\n x - 2,\n\n y - 2,\n\n w + 4,\n\n h + 4,\n\n Color::color_average(Color::Black, Color::White, 0.2),\n\n );\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 32, "score": 229248.10759259772 }, { "content": "fn hover_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n hover_up_frame(x - 2, y - 2, w + 4, h + 4, c);\n\n rect(\n\n x + 2,\n\n y + 2,\n\n w - 4,\n\n h - 4,\n\n Color::color_average(Color::Black, Color::White, 0.3),\n\n );\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 33, "score": 229248.10759259772 }, { "content": "fn default_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rectf(\n\n x,\n\n y,\n\n w,\n\n h,\n\n Color::color_average(Color::Black, Color::White, 0.3),\n\n );\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 34, "score": 229248.10759259772 }, { "content": "fn border_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x, y, w, h);\n\n set_draw_color(activated_color(c.darker()));\n\n draw_rect(x + 1, y + 1, w - 2, h - 2);\n\n}\n\n\n\npub(crate) fn use_gleam_scheme() {\n\n use fltk::enums::FrameType::*;\n\n app::reload_scheme().ok();\n\n app::set_scheme(app::Scheme::Gleam);\n\n app::set_visible_focus(false);\n\n app::set_frame_type_cb(UpBox, up_box, 2, 2, 4, 4);\n\n app::set_frame_type_cb(DownBox, down_box, 2, 2, 3, 3);\n\n app::set_frame_type_cb(ThinUpBox, up_box, 2, 2, 3, 3);\n\n app::set_frame_type_cb(ThinDownBox, down_box, 2, 2, 3, 3);\n\n app::set_frame_type_cb(UpFrame, up_frame, 2, 2, 3, 3);\n\n app::set_frame_type_cb(DownFrame, down_frame, 2, 2, 3, 3);\n\n app::set_frame_type_cb(RoundUpBox, up_box, 2, 2, 3, 3);\n\n app::set_frame_type_cb(RoundDownBox, down_box, 2, 2, 3, 3);\n\n app::set_frame_type_cb(BorderBox, border_box, 1, 1, 2, 2);\n\n}\n", "file_path": "src/widget_schemes/gleam.rs", "rank": 35, "score": 229248.10759259772 }, { "content": "fn border_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n draw::draw_box(\n\n FrameType::RFlatBox,\n\n x + 1,\n\n y + 1,\n\n w - 2,\n\n h - 2,\n\n Color::from_rgba_tuple(*crate::colors::aqua::dark::systemCyanColor),\n\n );\n\n}\n\n\n", "file_path": "src/widget_schemes/aqua.rs", "rank": 36, "score": 229248.10759259772 }, { "content": "fn round_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let col = Color::BackGround.to_rgb();\n\n let fg = Color::contrast(Color::BackGround, Color::BackGround).to_rgb();\n\n let svg = format!(\n\n \"<svg width='{}' height='{}'>\n\n <circle cx='{}' cy='{}' r='{}' stroke='rgb({},{},{})' stroke-width='1' fill='rgb({},{},{})'/>\n\n </svg>\",\n\n w,\n\n h,\n\n w / 2,\n\n h / 2,\n\n (w as f64 - 1.0) / 2.0,\n\n fg.0,\n\n fg.1,\n\n fg.2,\n\n col.0,\n\n col.1,\n\n col.2\n\n );\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 37, "score": 229248.10759259772 }, { "content": "fn depressed_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n depressed_down_frame(x, y, w, h, c);\n\n set_draw_color(Color::color_average(Color::Black, Color::White, 0.2));\n\n draw_rectf(x, y, w, h);\n\n}\n\n\n\npub(crate) fn use_fluent_scheme() {\n\n app::set_visible_focus(false);\n\n app::set_scrollbar_size(15);\n\n use self::frames::*;\n\n use fltk::enums::FrameType::*;\n\n app::set_scheme(app::Scheme::Base);\n\n app::set_frame_type_cb(UpBox, up_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(DownBox, down_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(ThinUpBox, up_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(ThinDownBox, down_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(UpFrame, up_frame, 1, 1, 2, 2);\n\n app::set_frame_type_cb(DownFrame, down_frame, 1, 1, 2, 2);\n\n app::set_frame_type_cb(RoundUpBox, round_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(RoundDownBox, round_box, 1, 1, 2, 2);\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 38, "score": 229248.10759259772 }, { "content": "fn border_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(c);\n\n draw_rectf(x, y, w, h);\n\n set_draw_color(c.darker());\n\n draw_rect(x, y, w, h);\n\n}\n\n\n\npub(crate) fn use_crystal_scheme() {\n\n use fltk::enums::FrameType::*;\n\n app::reload_scheme().ok();\n\n app::set_scheme(app::Scheme::Base);\n\n app::set_frame_type_cb(UpBox, up_box, 4, 4, 8, 8);\n\n app::set_frame_type_cb(DownBox, down_box, 2, 2, 4, 4);\n\n app::set_frame_type_cb(UpFrame, up_frame, 2, 2, 4, 4);\n\n app::set_frame_type_cb(DownFrame, down_frame, 2, 2, 4, 4);\n\n app::set_frame_type_cb(ThinUpBox, thin_up_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(ThinDownBox, down_box, 1, 1, 2, 2);\n\n app::set_frame_type_cb(RoundUpBox, up_round, 1, 1, 2, 2);\n\n app::set_frame_type_cb(RoundDownBox, down_round, 1, 1, 2, 2);\n\n app::set_frame_type_cb(BorderBox, border_box, 1, 1, 2, 2);\n\n}\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 39, "score": 229248.10759259772 }, { "content": "fn thin_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n if w > 4 && h > 4 {\n\n shade_rect(x + 1, y + 1, w - 2, h - 3, \"RQOQSUWQ\", c);\n\n frame_rect(x, y, w, h - 1, \"IJLM\", c);\n\n } else {\n\n narrow_thin_box(x, y, w, h, c);\n\n }\n\n}\n\n\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 40, "score": 229248.10759259772 }, { "content": "fn depressed_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rect(\n\n x - 2,\n\n y - 2,\n\n w + 4,\n\n h + 4,\n\n Color::color_average(Color::Black, Color::White, 0.3),\n\n );\n\n rectf(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_schemes/fluent.rs", "rank": 41, "score": 229248.10759259772 }, { "content": "fn metro_depressed_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(Color::from_rgb(0x56, 0x9D, 0xE5)));\n\n draw_rect(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_themes/metro.rs", "rank": 42, "score": 227381.10122388275 }, { "content": "fn rounded_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let (r, g, b) = c.to_rgb();\n\n let svg = format!(\"<?xml version='1.0'?><svg width='{}' height='{}' xmlns='http://www.w3.org/2000/svg'>\n\n <rect stroke-width='2' stroke='rgb(200, 200, 200)' rx='15' width='{}' height='{}' fill='rgb({}, {}, {})' y='1' x='1'/>\n\n </svg>\",w, h, w - 2, h - 2, r, g, b);\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/svg_based.rs", "rank": 43, "score": 227381.10122388275 }, { "content": "fn rflat_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let (r, g, b) = c.to_rgb();\n\n let svg = format!(\"<?xml version='1.0'?><svg width='{}' height='{}' xmlns='http://www.w3.org/2000/svg'>\n\n <rect stroke-width='2' stroke='none' rx='15' width='{}' height='{}' fill='rgb({}, {}, {})' y='1' x='1'/>\n\n </svg>\",w, h, w - 2, h - 2, r, g, b);\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/svg_based.rs", "rank": 44, "score": 227381.10122388275 }, { "content": "fn narrow_thin_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n if h <= 0 || w <= 0 {\n\n return;\n\n }\n\n set_draw_color(shade_color(Color::gray_ramp('R' as i32), c));\n\n draw_rectf(x + 1, y + 1, w - 2, h - 2);\n\n set_draw_color(shade_color(Color::gray_ramp('I' as i32), c));\n\n if w > 1 {\n\n draw_xyline(x + 1, y, x + w - 2);\n\n draw_xyline(x + 1, y + h - 1, x + w - 2);\n\n }\n\n if h > 1 {\n\n draw_yxline(x, y + 1, y + h - 2);\n\n draw_yxline(x + w - 1, y + 1, y + h - 2);\n\n }\n\n}\n\n\n", "file_path": "src/widget_schemes/crystal.rs", "rank": 45, "score": 227381.10122388275 }, { "content": "fn oval_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let (r, g, b) = c.to_rgb();\n\n let svg = format!(\"<?xml version='1.0'?><svg width='{}' height='{}' xmlns='http://www.w3.org/2000/svg'>\n\n <rect stroke-width='2' rx='90' width='{}' height='{}' stroke='rgb({}, {}, {})' fill='none' y='1' x='1'/>\n\n </svg>\",w, h, w - 2, h - 2, r, g, b);\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/svg_based.rs", "rank": 46, "score": 227381.10122388275 }, { "content": "fn metro_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(Color::from_rgb(0xAC, 0xAC, 0xAC)));\n\n draw_rect(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_themes/metro.rs", "rank": 47, "score": 227381.10122388275 }, { "content": "fn greybird_check_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x + 1, y + 1, w - 2, h - 2);\n\n greybird_check_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 48, "score": 227381.10122388275 }, { "content": "fn metro_depressed_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n vertical_gradient(\n\n x + 1,\n\n y + 1,\n\n x + w - 2,\n\n y + h - 2,\n\n activated_color(Color::from_rgb(0xDA, 0xEC, 0xFC)),\n\n activated_color(Color::from_rgb(0xC4, 0xE0, 0xFC)),\n\n );\n\n metro_depressed_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/metro.rs", "rank": 49, "score": 227381.10122388275 }, { "content": "fn blue_depressed_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top gradient\n\n vertical_gradient(\n\n x + 1,\n\n y + 2,\n\n x + w - 2,\n\n y + h / 2 - 1,\n\n activated_color(Color::from_rgb(0xEE, 0xCB, 0x8E)),\n\n activated_color(Color::from_rgb(0xF5, 0xC7, 0x79)),\n\n );\n\n // bottom gradient\n\n vertical_gradient(\n\n x + 1,\n\n y + h / 2,\n\n x + w - 2,\n\n y + h - 1,\n\n activated_color(Color::from_rgb(0xF5, 0xBB, 0x57)),\n\n activated_color(Color::from_rgb(0xF3, 0xE1, 0x77)),\n\n );\n\n blue_depressed_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/blue.rs", "rank": 50, "score": 227381.10122388275 }, { "content": "fn oval_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let (r, g, b) = c.to_rgb();\n\n let svg = format!(\"<?xml version='1.0'?><svg width='{}' height='{}' xmlns='http://www.w3.org/2000/svg'>\n\n <rect stroke-width='2' stroke='rgb(200, 200, 200)' rx='90' width='{}' height='{}' fill='rgb({}, {}, {})' y='1' x='1'/>\n\n </svg>\",w, h, w - 2, h - 2, r, g, b);\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/svg_based.rs", "rank": 51, "score": 227381.10122388275 }, { "content": "fn blue_hovered_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // outer border\n\n set_draw_color(activated_color(Color::from_rgb(0xFF, 0xDB, 0x00)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // top inner borders\n\n set_draw_color(activated_color(Color::from_rgb(0xFF, 0xFC, 0xF8)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n draw_yxline(x + 1, y + 2, y + h / 2 - 1);\n\n draw_yxline(x + w - 2, y + 2, y + h / 2 - 1);\n\n draw_point(x + 2, y + 2);\n\n draw_point(x + w - 3, y + 2);\n\n // bottom inner borders\n\n set_draw_color(activated_color(Color::from_rgb(0xFF, 0xFA, 0xE2)));\n\n draw_yxline(x + 1, y + h / 2, y + h - 3);\n\n draw_yxline(x + w - 2, y + h / 2, y + h - 3);\n\n draw_xyline(x + 2, y + h - 2, x + w - 3);\n\n draw_point(x + 2, y + h - 3);\n\n draw_point(x + w - 3, y + h - 3);\n\n // corners\n\n set_draw_color(activated_color(Color::from_rgb(0xF7, 0xD7, 0x3F)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/blue.rs", "rank": 52, "score": 227381.10122388275 }, { "content": "fn dark_depressed_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x27, 0x27, 0x27)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n // side outer borders\n\n set_draw_color(activated_color(Color::from_rgb(0x2C, 0x2C, 0x2C)));\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // bottom outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x30, 0x30, 0x30)));\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n // top inner border\n\n set_draw_color(activated_color(Color::from_rgb(0x33, 0x33, 0x33)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n // corners\n\n set_draw_color(activated_color(Color::from_rgb(0x32, 0x32, 0x32)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n draw_xyline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n set_draw_color(activated_color(Color::from_rgb(0x4B, 0x4B, 0x4B)));\n\n draw_point(x, y);\n\n draw_point(x + w - 1, y);\n\n draw_point(x, y + h - 1);\n\n draw_point(x + w - 1, y + h - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 53, "score": 227381.10122388275 }, { "content": "fn metro_hovered_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n vertical_gradient(\n\n x + 1,\n\n y + 1,\n\n x + w - 2,\n\n y + h - 2,\n\n activated_color(Color::from_rgb(0xEC, 0xF4, 0xFC)),\n\n activated_color(Color::from_rgb(0xDC, 0xEC, 0xFC)),\n\n );\n\n metro_hovered_up_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/metro.rs", "rank": 54, "score": 227381.10122388275 }, { "content": "fn dark_tabs_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x28, 0x28, 0x28)));\n\n draw_xyline(x + 1, y, x + w - 2);\n\n draw_xyline(x + 1, y + h - 1, x + w - 2);\n\n draw_yxline(x, y + 1, y + h - 2);\n\n draw_yxline(x + w - 1, y + 1, y + h - 2);\n\n // top inner border\n\n set_draw_color(activated_color(Color::from_rgb(0x6A, 0x6A, 0x6A)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 55, "score": 227381.10122388275 }, { "content": "fn dark_swatch_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x25, 0x25, 0x25)));\n\n draw_rect(x, y, w, h);\n\n // inner border\n\n set_draw_color(activated_color(Color::from_rgb(0xFF, 0xFF, 0xFF)));\n\n draw_rect(x + 1, y + 1, w - 2, h - 2);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 56, "score": 227381.10122388275 }, { "content": "fn dark_depressed_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n vertical_gradient(\n\n x + 1,\n\n y + 2,\n\n x + w - 2,\n\n y + h - 1,\n\n activated_color(Color::from_rgb(0x3F, 0x3F, 0x3F)),\n\n activated_color(Color::from_rgb(0x37, 0x37, 0x37)),\n\n );\n\n dark_depressed_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 57, "score": 227381.10122388275 }, { "content": "fn metro_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n if w >= h {\n\n vertical_gradient(\n\n x + 1,\n\n y + 1,\n\n x + w - 2,\n\n y + h - 2,\n\n activated_color(Color::from_rgb(0xF0, 0xF0, 0xF0)),\n\n activated_color(Color::from_rgb(0xE5, 0xE5, 0xE5)),\n\n );\n\n } else {\n\n horizontal_gradient(\n\n x + 1,\n\n y + 1,\n\n x + w - 2,\n\n y + h - 2,\n\n activated_color(Color::from_rgb(0xF0, 0xF0, 0xF0)),\n\n activated_color(Color::from_rgb(0xE5, 0xE5, 0xE5)),\n\n );\n\n }\n\n metro_button_up_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/metro.rs", "rank": 58, "score": 227381.10122388275 }, { "content": "fn blue_tabs_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x + 1, y + 1, w - 2, h - 2);\n\n blue_tabs_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/blue.rs", "rank": 59, "score": 227381.10122388275 }, { "content": "fn greybird_depressed_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top gradient\n\n vertical_gradient(\n\n x + 1,\n\n y + 1,\n\n x + w - 2,\n\n y + 4,\n\n activated_color(Color::from_rgb(0xAF, 0xAF, 0xAF)),\n\n activated_color(Color::from_rgb(0xB4, 0xB4, 0xB4)),\n\n );\n\n vertical_gradient(\n\n x + 1,\n\n y + 5,\n\n x + w - 2,\n\n y + h - 1,\n\n activated_color(Color::from_rgb(0xB4, 0xB4, 0xB4)),\n\n activated_color(Color::from_rgb(0xAA, 0xAA, 0xAA)),\n\n );\n\n greybird_depressed_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 60, "score": 227381.10122388275 }, { "content": "fn greybird_tabs_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(Color::from_rgb(0xD9, 0xD9, 0xD9)));\n\n draw_rectf(x + 2, y + 2, w - 3, h - 2);\n\n greybird_tabs_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 61, "score": 227381.10122388275 }, { "content": "fn radio_round_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let col = c.to_rgb();\n\n let svg = format!(\n\n \"<svg width='{}' height='{}'>\n\n <circle cx='{}' cy='{}' r='{}' fill='rgb({},{},{})'/>\n\n </svg>\",\n\n w,\n\n h,\n\n w / 2,\n\n h / 2,\n\n (w as f64 - 1.0) / 2.0,\n\n col.0,\n\n col.1,\n\n col.2\n\n );\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/aqua.rs", "rank": 62, "score": 227381.10122388275 }, { "content": "fn greybird_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top outer border\n\n set_draw_color(activated_color(Color::from_rgb(0xA6, 0xA6, 0xA6)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n // side outer borders\n\n set_draw_color(activated_color(Color::from_rgb(0x96, 0x96, 0x96)));\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // bottom outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x87, 0x87, 0x87)));\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n // top inner border\n\n set_draw_color(activated_color(Color::from_rgb(0xEE, 0xEE, 0xEE)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n // side inner borders\n\n set_draw_color(activated_color(Color::from_rgb(0xE4, 0xE4, 0xE4)));\n\n draw_yxline(x + 1, y + 2, y + h - 3);\n\n draw_yxline(x + w - 2, y + 2, y + h - 3);\n\n // top corners\n\n set_draw_color(activated_color(Color::from_rgb(0xB8, 0xB8, 0xB8)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n // bottom corners\n\n set_draw_color(activated_color(Color::from_rgb(0xA0, 0xA0, 0xA0)));\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 63, "score": 227381.10122388275 }, { "content": "fn greybird_hovered_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top outer border\n\n set_draw_color(activated_color(Color::from_rgb(0xAE, 0xAE, 0xAE)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n // side outer borders\n\n set_draw_color(activated_color(Color::from_rgb(0x9E, 0x9E, 0x9E)));\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // bottom outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x8E, 0x8E, 0x8E)));\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n // top inner border\n\n set_draw_color(activated_color(Color::from_rgb(0xF3, 0xF3, 0xF3)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n // side inner borders\n\n set_draw_color(activated_color(Color::from_rgb(0xED, 0xED, 0xED)));\n\n draw_yxline(x + 1, y + 2, y + h - 3);\n\n draw_yxline(x + w - 2, y + 2, y + h - 3);\n\n // top corners\n\n set_draw_color(activated_color(Color::from_rgb(0xC0, 0xC0, 0xC0)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n // bottom corners\n\n set_draw_color(activated_color(Color::from_rgb(0xA7, 0xA7, 0xA7)));\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 64, "score": 227381.10122388275 }, { "content": "fn metro_check_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(Color::from_rgb(0x70, 0x70, 0x70)));\n\n draw_rect(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_themes/metro.rs", "rank": 65, "score": 227381.10122388275 }, { "content": "fn dark_tabs_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x + 1, y + 1, w - 2, h - 2);\n\n dark_tabs_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 66, "score": 227381.10122388275 }, { "content": "fn rounded_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let (r, g, b) = c.to_rgb();\n\n let svg = format!(\"<?xml version='1.0'?><svg width='{}' height='{}' xmlns='http://www.w3.org/2000/svg'>\n\n <rect stroke-width='2' rx='15' width='{}' height='{}' stroke='rgb({}, {}, {})' fill='none' y='1' x='1'/>\n\n </svg>\",w, h, w - 2, h - 2, r, g, b);\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/svg_based.rs", "rank": 67, "score": 227381.10122388275 }, { "content": "fn blue_depressed_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // outer border\n\n set_draw_color(activated_color(Color::from_rgb(0xC2, 0x9B, 0x29)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // top inner border\n\n set_draw_color(activated_color(Color::from_rgb(0xE3, 0xC1, 0x85)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n // corners\n\n set_draw_color(activated_color(Color::from_rgb(0xCB, 0xAB, 0x53)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/blue.rs", "rank": 68, "score": 227381.10122388275 }, { "content": "fn greybird_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n if w >= h {\n\n vertical_gradient(\n\n x + 2,\n\n y + 2,\n\n x + w - 3,\n\n y + h - 2,\n\n activated_color(Color::from_rgb(0xDB, 0xDB, 0xDB)),\n\n activated_color(Color::from_rgb(0xCC, 0xCC, 0xCC)),\n\n );\n\n } else {\n\n horizontal_gradient(\n\n x + 2,\n\n y + 2,\n\n x + w - 3,\n\n y + h - 2,\n\n activated_color(Color::from_rgb(0xDB, 0xDB, 0xDB)),\n\n activated_color(Color::from_rgb(0xCC, 0xCC, 0xCC)),\n\n );\n\n }\n\n greybird_button_up_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 69, "score": 227381.10122388275 }, { "content": "fn greybird_depressed_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x8A, 0x8A, 0x8A)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n // side outer borders\n\n set_draw_color(activated_color(Color::from_rgb(0x7D, 0x7D, 0x7D)));\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // bottom outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x71, 0x71, 0x71)));\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n // top corners\n\n set_draw_color(activated_color(Color::from_rgb(0x98, 0x98, 0x98)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n // bottom corners\n\n set_draw_color(activated_color(Color::from_rgb(0x88, 0x88, 0x88)));\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 70, "score": 227381.10122388275 }, { "content": "fn greybird_hovered_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n vertical_gradient(\n\n x + 2,\n\n y + 2,\n\n x + w - 3,\n\n y + h - 2,\n\n activated_color(Color::from_rgb(0xE6, 0xE6, 0xE6)),\n\n activated_color(Color::from_rgb(0xD6, 0xD6, 0xD6)),\n\n );\n\n greybird_hovered_up_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 71, "score": 227381.10122388275 }, { "content": "fn blue_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n if w >= h {\n\n // top gradient\n\n vertical_gradient(\n\n x + 2,\n\n y + 2,\n\n x + w - 3,\n\n y + h / 2 - 1,\n\n activated_color(Color::from_rgb(0xF0, 0xF6, 0xFB)),\n\n activated_color(Color::from_rgb(0xE2, 0xEA, 0xF3)),\n\n );\n\n // bottom gradient\n\n vertical_gradient(\n\n x + 2,\n\n y + h / 2,\n\n x + w - 3,\n\n y + h - 3,\n\n activated_color(Color::from_rgb(0xD5, 0xE0, 0xED)),\n\n activated_color(Color::from_rgb(0xD7, 0xE2, 0xEF)),\n\n );\n", "file_path": "src/widget_themes/blue.rs", "rank": 72, "score": 227381.10122388275 }, { "content": "fn blue_tabs_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // border\n\n set_draw_color(activated_color(Color::from_rgb(0x87, 0x97, 0xAA)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // top corners\n\n set_draw_color(activated_color(Color::from_rgb(0x9B, 0xAA, 0xBB)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n // bottom corners\n\n set_draw_color(activated_color(Color::from_rgb(0xA1, 0xAE, 0xBD)));\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/blue.rs", "rank": 73, "score": 227381.10122388275 }, { "content": "fn metro_check_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(Color::from_rgb(0xFF, 0xFF, 0xFF)));\n\n draw_rectf(x + 1, y + 1, w - 2, h - 2);\n\n metro_check_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/metro.rs", "rank": 74, "score": 227381.10122388275 }, { "content": "fn dark_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n if w >= h {\n\n vertical_gradient(\n\n x + 1,\n\n y + 2,\n\n x + w - 2,\n\n y + h - 1,\n\n activated_color(Color::from_rgb(0x75, 0x75, 0x75)),\n\n activated_color(Color::from_rgb(0x62, 0x62, 0x62)),\n\n );\n\n } else {\n\n horizontal_gradient(\n\n x + 1,\n\n y + 2,\n\n x + w - 2,\n\n y + h - 1,\n\n activated_color(Color::from_rgb(0x75, 0x75, 0x75)),\n\n activated_color(Color::from_rgb(0x62, 0x62, 0x62)),\n\n );\n\n }\n\n dark_button_up_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 75, "score": 227381.10122388275 }, { "content": "fn blue_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x87, 0x97, 0xAA)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // top inner borders\n\n set_draw_color(activated_color(Color::from_rgb(0xF6, 0xFA, 0xFE)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n draw_yxline(x + 1, y + 2, y + h / 2 - 1);\n\n draw_yxline(x + w - 2, y + 2, y + h / 2 - 1);\n\n draw_point(x + 2, y + 2);\n\n draw_point(x + w - 3, y + 2);\n\n // bottom inner borders\n\n set_draw_color(activated_color(Color::from_rgb(0xFE, 0xFF, 0xFF)));\n\n draw_yxline(x + 1, y + h / 2, y + h - 3);\n\n draw_yxline(x + w - 2, y + h / 2, y + h - 3);\n\n draw_xyline(x + 2, y + h - 2, x + w - 3);\n\n draw_point(x + 2, y + h - 3);\n", "file_path": "src/widget_themes/blue.rs", "rank": 77, "score": 227381.10122388275 }, { "content": "fn metro_hovered_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(Color::from_rgb(0x7E, 0xB4, 0xEA)));\n\n draw_rect(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_themes/metro.rs", "rank": 78, "score": 227381.10122388275 }, { "content": "fn dark_swatch_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x + 2, y + 2, w - 4, h - 4);\n\n dark_swatch_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 79, "score": 227381.10122388275 }, { "content": "fn dark_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x30, 0x30, 0x30)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n // side outer borders\n\n set_draw_color(activated_color(Color::from_rgb(0x2C, 0x2C, 0x2C)));\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // bottom outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x27, 0x27, 0x27)));\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n // top inner border\n\n set_draw_color(activated_color(Color::from_rgb(0x91, 0x91, 0x91)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n // corners\n\n set_draw_color(activated_color(Color::from_rgb(0x47, 0x47, 0x47)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 80, "score": 227381.10122388275 }, { "content": "fn greybird_check_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top border\n\n set_draw_color(activated_color(Color::from_rgb(0x80, 0x80, 0x80)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n // side borders\n\n set_draw_color(activated_color(Color::from_rgb(0x89, 0x89, 0x89)));\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // bottom border\n\n set_draw_color(activated_color(Color::from_rgb(0x90, 0x90, 0x90)));\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n // top corners\n\n set_draw_color(activated_color(Color::from_rgb(0xA6, 0xA6, 0xA6)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n // bottom corners\n\n set_draw_color(activated_color(Color::from_rgb(0xB0, 0xB0, 0xB0)));\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 81, "score": 227381.10122388275 }, { "content": "fn oflat_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let (r, g, b) = c.to_rgb();\n\n let svg = format!(\"<?xml version='1.0'?><svg width='{}' height='{}' xmlns='http://www.w3.org/2000/svg'>\n\n <rect stroke-width='2' stroke='none' rx='90' width='{}' height='{}' fill='rgb({}, {}, {})' y='1' x='1'/>\n\n </svg>\",w, h, w - 2, h - 2, r, g, b);\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n\npub(crate) fn use_svg_based_scheme() {\n\n use fltk::enums::FrameType::*;\n\n app::reload_scheme().ok();\n\n app::set_scheme(app::Scheme::Base);\n\n app::set_frame_type_cb(RoundedFrame, rounded_frame, 0, 0, 0, 0);\n\n app::set_frame_type_cb(RoundedBox, rounded_box, 0, 0, 0, 0);\n\n app::set_frame_type_cb(RFlatBox, rflat_box, 0, 0, 0, 0);\n\n app::set_frame_type_cb(OvalBox, oval_box, 0, 0, 0, 0);\n\n app::set_frame_type_cb(OvalFrame, oval_frame, 0, 0, 0, 0);\n\n app::set_frame_type_cb(OFlatFrame, oflat_box, 0, 0, 0, 0);\n\n}\n", "file_path": "src/widget_schemes/svg_based.rs", "rank": 82, "score": 227381.10122388275 }, { "content": "fn blue_hovered_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top gradient\n\n vertical_gradient(\n\n x + 2,\n\n y + 2,\n\n x + w - 3,\n\n y + h / 2 - 1,\n\n activated_color(Color::from_rgb(0xFF, 0xF0, 0xDF)),\n\n activated_color(Color::from_rgb(0xFF, 0xE2, 0xC2)),\n\n );\n\n // bottom gradient\n\n vertical_gradient(\n\n x + 2,\n\n y + h / 2,\n\n x + w - 3,\n\n y + h - 3,\n\n activated_color(Color::from_rgb(0xFF, 0xCF, 0x6A)),\n\n activated_color(Color::from_rgb(0xFF, 0xE9, 0x83)),\n\n );\n\n blue_hovered_up_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/blue.rs", "rank": 83, "score": 227381.10122388275 }, { "content": "fn greybird_tabs_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top outer border\n\n set_draw_color(activated_color(Color::from_rgb(0xA6, 0xA6, 0xA6)));\n\n draw_xyline(x + 2, y, x + w - 3);\n\n // side outer borders\n\n set_draw_color(activated_color(Color::from_rgb(0x96, 0x96, 0x96)));\n\n draw_yxline(x, y + 2, y + h - 3);\n\n draw_yxline(x + w - 1, y + 2, y + h - 3);\n\n // bottom outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x87, 0x87, 0x87)));\n\n draw_xyline(x + 2, y + h - 1, x + w - 3);\n\n // top inner border\n\n set_draw_color(activated_color(Color::from_rgb(0xEE, 0xEE, 0xEE)));\n\n draw_xyline(x + 2, y + 1, x + w - 3);\n\n // side inner borders\n\n set_draw_color(activated_color(Color::from_rgb(0xE4, 0xE4, 0xE4)));\n\n draw_yxline(x + 1, y + 2, y + h - 3);\n\n draw_yxline(x + w - 2, y + 2, y + h - 3);\n\n // top corners\n\n set_draw_color(activated_color(Color::from_rgb(0xB8, 0xB8, 0xB8)));\n\n draw_xyline2(x, y + 1, x + 1, y);\n\n draw_yxline2(x + w - 2, y, y + 1, x + w - 1);\n\n // bottom corners\n\n set_draw_color(activated_color(Color::from_rgb(0xA0, 0xA0, 0xA0)));\n\n draw_xyline2(x, y + h - 2, x + 1, y + h - 1);\n\n draw_yxline2(x + w - 2, y + h - 1, y + h - 2, x + w - 1);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 85, "score": 227381.10122388275 }, { "content": "fn default_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n let col = c.to_rgb();\n\n let bg = Color::BackGround.to_rgb();\n\n let svg = format!(\n\n \"<svg width='{0}' height='{1}'>\n\n <defs>\n\n <linearGradient id='grad1' x1='0%' y1='0%' x2='0%' y2='100%'>\n\n <stop offset='0%' style='stop-color:rgb({2},{3},{4});stop-opacity:1' />\n\n <stop offset='100%' style='stop-color:rgb({5},{6},{7});stop-opacity:1' />\n\n </linearGradient>s\n\n </defs>\n\n <rect width='{0}' height='{1}' rx='{8}' fill='url(#grad1)' />\n\n </svg>\",\n\n w,\n\n h,\n\n col.0,\n\n col.1,\n\n col.2,\n\n col.0 - 5,\n\n col.1 - 5,\n\n col.2 - 5,\n\n h / 4,\n\n );\n\n let mut image = image::SvgImage::from_data(&svg).unwrap();\n\n image.draw(x, y, w, h);\n\n}\n\n\n", "file_path": "src/widget_schemes/aqua.rs", "rank": 86, "score": 227381.10122388275 }, { "content": "fn dark_panel_thin_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x + 1, y + 1, w - 2, h - 2);\n\n dark_panel_thin_up_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 87, "score": 225598.29581949307 }, { "content": "fn dark_spacer_thin_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x + 1, y + 1, w - 2, h - 2);\n\n dark_spacer_thin_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 89, "score": 225598.29581949307 }, { "content": "fn greybird_panel_thin_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top and left borders\n\n set_draw_color(activated_color(Color::from_rgb(0xDA, 0xDA, 0xDA)));\n\n draw_yxline2(x, y + h - 2, y, x + w - 2);\n\n // bottom and right borders\n\n set_draw_color(activated_color(Color::from_rgb(0xC1, 0xC1, 0xC1)));\n\n draw_xyline2(x, y + h - 1, x + w - 1, y);\n\n}\n\n\n", "file_path": "src/widget_themes/greybird.rs", "rank": 92, "score": 225598.29581949307 }, { "content": "fn dark_panel_thin_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top and left borders\n\n set_draw_color(activated_color(Color::from_rgb(0x6A, 0x6A, 0x6A)));\n\n draw_yxline2(x, y + h - 2, y, x + w - 2);\n\n // bottom and right borders\n\n set_draw_color(activated_color(Color::from_rgb(0x28, 0x28, 0x28)));\n\n draw_xyline2(x, y + h - 1, x + w - 1, y);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 93, "score": 225598.29581949307 }, { "content": "fn dark_radio_round_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(Color::from_rgb(0x30, 0x30, 0x30)));\n\n draw_arc(x, y, w, h, 0.0, 360.0);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 94, "score": 225598.29581949307 }, { "content": "fn dark_spacer_thin_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top and left borders\n\n set_draw_color(activated_color(Color::from_rgb(0x38, 0x38, 0x38)));\n\n draw_yxline2(x, y + h - 2, y, x + w - 2);\n\n // bottom and right borders\n\n set_draw_color(activated_color(Color::from_rgb(0x74, 0x74, 0x74)));\n\n draw_xyline2(x, y + h - 1, x + w - 1, y);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 95, "score": 225598.29581949307 }, { "content": "fn dark_input_thin_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top and side outer borders\n\n set_draw_color(activated_color(Color::from_rgb(0x30, 0x30, 0x30)));\n\n draw_xyline(x, y, x + w - 1);\n\n draw_yxline(x, y + 1, y + h - 2);\n\n draw_yxline(x + w - 1, y + 1, y + h - 2);\n\n // bottom outer border\n\n set_draw_color(activated_color(Color::from_rgb(0x29, 0x29, 0x29)));\n\n draw_xyline(x, y + h - 1, x + w - 1);\n\n // top inner border\n\n set_draw_color(activated_color(Color::from_rgb(0x37, 0x37, 0x37)));\n\n draw_xyline(x + 1, y + 1, x + w - 2);\n\n // top and side innermost borders\n\n set_draw_color(activated_color(Color::from_rgb(0x39, 0x39, 0x39)));\n\n draw_xyline(x + 1, y + 2, x + w - 2);\n\n draw_yxline(x + 1, y + 3, y + h - 2);\n\n draw_yxline(x + w - 2, y + 3, y + h - 2);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 96, "score": 225598.29581949307 }, { "content": "fn dark_radio_round_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n // top edges\n\n set_draw_color(activated_color(Color::from_rgb(0x75, 0x75, 0x75)));\n\n draw_arc(x + 1, y + 1, w - 2, h - 2, 0.0, 180.0);\n\n // bottom edges\n\n set_draw_color(activated_color(Color::from_rgb(0x62, 0x62, 0x62)));\n\n draw_arc(x + 1, y + 1, w - 2, h - 2, 180.0, 360.0);\n\n // gradient\n\n vertical_gradient(\n\n x + 2,\n\n y + 2,\n\n x + w - 3,\n\n y + h - 3,\n\n Color::from_rgb(0x74, 0x74, 0x74),\n\n Color::from_rgb(0x63, 0x63, 0x63),\n\n );\n\n dark_radio_round_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 98, "score": 225598.29581949307 }, { "content": "fn dark_input_thin_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n set_draw_color(activated_color(c));\n\n draw_rectf(x + 2, y + 3, w - 4, h - 4);\n\n dark_input_thin_down_frame(x, y, w, h, c);\n\n}\n\n\n", "file_path": "src/widget_themes/dark.rs", "rank": 99, "score": 225598.29581949307 } ]
Rust
src/tools/builder.rs
feenkcom/gtoolkit-maestro-rs
1c1651b6ddaf8648bce6037fa24e34ca08be7a93
use crate::create::FileToCreate; use crate::download::{FileToDownload, FilesToDownload}; use crate::{ Application, Checker, Downloader, ExecutableSmalltalk, FileToMove, ImageSeed, InstallerError, Result, Smalltalk, SmalltalkCommand, SmalltalkExpressionBuilder, SmalltalkScriptToExecute, SmalltalkScriptsToExecute, BUILDING, CREATING, DOWNLOADING, EXTRACTING, MOVING, SPARKLE, }; use crate::{FileToUnzip, FilesToUnzip}; use clap::{AppSettings, ArgEnum, Clap}; use feenk_releaser::{Version, VersionBump}; use file_matcher::FileNamed; use indicatif::HumanDuration; use reqwest::StatusCode; use std::path::PathBuf; use std::str::FromStr; use std::time::Instant; use url::Url; pub const DEFAULT_PHARO_IMAGE: &str = "https://dl.feenk.com/pharo/Pharo9.0-SNAPSHOT.build.1564.sha.f5f541c.arch.64bit.zip"; #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct BuildOptions { #[clap(long)] pub overwrite: bool, #[clap(long, default_value = "cloner", possible_values = Loader::VARIANTS, case_insensitive = true)] pub loader: Loader, #[clap(long, parse(try_from_str = url_parse), conflicts_with_all(&["image_zip", "image_file"]))] pub image_url: Option<Url>, #[clap(long, parse(from_os_str), conflicts_with_all(&["image_url", "image_file"]))] pub image_zip: Option<PathBuf>, #[clap(long, parse(from_os_str), conflicts_with_all(&["image_url", "image_zip"]))] pub image_file: Option<PathBuf>, #[clap(long, parse(from_os_str))] pub public_key: Option<PathBuf>, #[clap(long, parse(from_os_str))] pub private_key: Option<PathBuf>, #[clap(long, parse(try_from_str = BuildVersion::from_str), default_value = BuildVersion::BleedingEdge.abstract_name())] pub version: BuildVersion, } impl BuildOptions { pub fn image_seed(&self) -> ImageSeed { if let Some(ref image_zip) = self.image_zip { return ImageSeed::Zip(image_zip.clone()); } if let Some(ref image_url) = self.image_url { return ImageSeed::Url(image_url.clone()); } if let Some(ref image_file) = self.image_file { return ImageSeed::Image(image_file.clone()); } return ImageSeed::Url( url_parse(DEFAULT_PHARO_IMAGE) .unwrap_or_else(|_| panic!("Failed to parse url: {}", DEFAULT_PHARO_IMAGE)), ); } } fn url_parse(val: &str) -> Result<Url> { Url::parse(val).map_err(|error| error.into()) } #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct ReleaseBuildOptions { #[clap(flatten)] pub build_options: BuildOptions, #[clap(long, default_value = VersionBump::Patch.to_str(), possible_values = VersionBump::variants(), case_insensitive = true)] pub bump: VersionBump, #[clap(long)] pub no_gt_world: bool, } #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct LocalBuildOptions { #[clap(flatten)] pub build_options: BuildOptions, #[clap(long)] pub no_gt_world: bool, } impl BuildOptions { fn ssh_keys(&self) -> Result<Option<(PathBuf, PathBuf)>> { let public_key = self.public_key()?; let private_key = self.private_key()?; match (&private_key, &public_key) { (Some(private), Some(public)) => Ok(Some((private.clone(), public.clone()))), (None, None) => Ok(None), _ => InstallerError::SshKeysConfigurationError(private_key, public_key).into(), } } fn public_key(&self) -> Result<Option<PathBuf>> { if let Some(ref key) = self.public_key { if key.exists() { Ok(Some(to_absolute::canonicalize(key).map_err(|error| { InstallerError::CanonicalizeError(key.clone(), error) })?)) } else { return InstallerError::PublicKeyDoesNotExist(key.clone()).into(); } } else { Ok(None) } } fn private_key(&self) -> Result<Option<PathBuf>> { if let Some(ref key) = self.private_key { if key.exists() { Ok(Some(to_absolute::canonicalize(key).map_err(|error| { InstallerError::CanonicalizeError(key.clone(), error) })?)) } else { return InstallerError::PrivateKeyDoesNotExist(key.clone()).into(); } } else { Ok(None) } } } impl BuildOptions { pub fn new() -> Self { Self { overwrite: false, loader: Loader::Cloner, image_url: None, image_zip: None, image_file: None, public_key: None, private_key: None, version: BuildVersion::BleedingEdge, } } pub fn should_overwrite(&self) -> bool { self.overwrite } pub fn overwrite(&mut self, overwrite: bool) { self.overwrite = overwrite; } pub fn loader(&mut self, loader: Loader) { self.loader = loader; } } #[derive(ArgEnum, Copy, Clone, Debug)] #[repr(u32)] pub enum Loader { #[clap(name = "cloner")] Cloner, #[clap(name = "metacello")] Metacello, } impl FromStr for Loader { type Err = String; fn from_str(s: &str) -> std::result::Result<Self, String> { <Loader as ArgEnum>::from_str(s, true) } } impl ToString for Loader { fn to_string(&self) -> String { (Loader::VARIANTS[*self as usize]).to_owned() } } #[derive(Debug, Clone)] pub enum BuildVersion { LatestRelease, BleedingEdge, Version(Version), } impl BuildVersion { pub fn abstract_name(&self) -> &str { match self { BuildVersion::LatestRelease => "latest-release", BuildVersion::BleedingEdge => "bleeding-edge", BuildVersion::Version(_) => "vX.Y.Z", } } } impl FromStr for BuildVersion { type Err = InstallerError; fn from_str(s: &str) -> Result<Self> { let version = s.to_string().to_lowercase(); let version_str = version.as_str(); match version_str { "latest-release" => Ok(BuildVersion::LatestRelease), "bleeding-edge" => Ok(BuildVersion::BleedingEdge), _ => Ok(BuildVersion::Version(Version::parse(version_str)?)), } } } impl ToString for BuildVersion { fn to_string(&self) -> String { match self { BuildVersion::Version(version) => version.to_string(), _ => self.abstract_name().to_string(), } } } pub struct Builder; #[derive(Serialize)] pub struct LoaderVersionInfo { gtoolkit_version: String, releaser_version: String, } impl Builder { pub fn new() -> Self { Self {} } pub async fn resolve_loader_version_info( &self, build_options: &BuildOptions, ) -> Result<LoaderVersionInfo> { let gtoolkit_version_string = match &build_options.version { BuildVersion::LatestRelease => { format!( "v{}", Application::latest_gtoolkit_image_version() .await? .to_string() ) } BuildVersion::BleedingEdge => "main".to_string(), BuildVersion::Version(version) => { format!("v{}", version.to_string()) } }; let releaser_version_string = match &build_options.version { BuildVersion::BleedingEdge => "main".to_string(), _ => { let releaser_version_file_url_string = format!( "https://raw.githubusercontent.com/feenkcom/gtoolkit/{}/gtoolkit-releaser.version", &gtoolkit_version_string ); let releaser_version_file_url = Url::parse(&releaser_version_file_url_string)?; let releaser_version_file_response = reqwest::get(releaser_version_file_url.clone()).await?; if releaser_version_file_response.status() != StatusCode::OK { return InstallerError::FailedToDownloadReleaserVersion( releaser_version_file_url.clone(), releaser_version_file_response.status(), ) .into(); } let releaser_version_file_content = releaser_version_file_response.text().await?; let releaser_version = Version::parse(releaser_version_file_content)?; format!("v{}", releaser_version.to_string()) } }; Ok(LoaderVersionInfo { gtoolkit_version: gtoolkit_version_string, releaser_version: releaser_version_string, }) } pub async fn build( &self, application: &mut Application, build_options: &BuildOptions, ) -> Result<()> { let started = Instant::now(); let image_seed = build_options.image_seed(); application.set_image_seed(image_seed.clone())?; Checker::new() .check(application, build_options.should_overwrite()) .await?; application.serialize_into_file()?; println!("{}Downloading files...", DOWNLOADING); let pharo_vm = FileToDownload::new( Url::parse(application.pharo_vm_url())?, application.workspace(), "pharo-vm.zip", ); let files_to_download = FilesToDownload::new() .extend(Downloader::files_to_download(application)) .add(pharo_vm.clone()) .maybe_add(image_seed.file_to_download(application)); files_to_download.download().await?; println!("{}Extracting files...", EXTRACTING); let files_to_unzip = FilesToUnzip::new() .extend(Downloader::files_to_unzip(application)) .add(FileToUnzip::new( pharo_vm.path(), application.workspace().join("pharo-vm"), )) .maybe_add(image_seed.file_to_unzip(application)); files_to_unzip.unzip().await?; if !image_seed.is_image_file() { println!("{}Moving files...", MOVING); let seed_image = FileNamed::wildmatch(format!("*.{}", application.image_extension())) .within(image_seed.seed_image_directory(application)) .find()?; let seed_smalltalk = Smalltalk::new(application.pharo_executable(), seed_image, application); let seed_evaluator = seed_smalltalk.evaluator(); SmalltalkCommand::new("save") .arg( application .workspace() .join(application.image_name()) .display() .to_string(), ) .execute(&seed_evaluator)?; FileToMove::new( FileNamed::wildmatch("*.sources") .within(image_seed.seed_image_directory(application)) .find()?, application.workspace(), ) .move_file() .await?; } let loader_template_string = match build_options.loader { Loader::Cloner => include_str!("../st/clone-gt.st"), Loader::Metacello => include_str!("../st/load-gt.st"), }; let loader_template = mustache::compile_str(loader_template_string)?; let loader_version_info = self.resolve_loader_version_info(build_options).await?; let loader_script = loader_template.render_to_string(&loader_version_info)?; let loader_script_file_name = format!("load-gt-{}.st", &loader_version_info.gtoolkit_version); println!("{}Creating build scripts...", CREATING); FileToCreate::new( application.workspace().join("load-patches.st"), include_str!("../st/load-patches.st"), ) .create() .await?; FileToCreate::new( application.workspace().join("load-taskit.st"), include_str!("../st/load-taskit.st"), ) .create() .await?; FileToCreate::new( application.workspace().join(&loader_script_file_name), loader_script, ) .create() .await?; let gtoolkit = application.gtoolkit(); let pharo = application.pharo(); println!("{}Preparing the image...", BUILDING); SmalltalkScriptsToExecute::new() .add(SmalltalkScriptToExecute::new("load-patches.st")) .add(SmalltalkScriptToExecute::new("load-taskit.st")) .execute(pharo.evaluator().save(true)) .await?; println!("{}Building Glamorous Toolkit...", BUILDING); let ssh_keys = build_options.ssh_keys()?; let mut scripts_to_execute = SmalltalkScriptsToExecute::new(); if let Some((private, public)) = ssh_keys { scripts_to_execute.add( SmalltalkExpressionBuilder::new() .add("IceCredentialsProvider useCustomSsh: true") .add(format!( "IceCredentialsProvider sshCredentials publicKey: '{}'; privateKey: '{}'", private.display(), public.display() )) .build(), ); } scripts_to_execute .add(SmalltalkScriptToExecute::new(&loader_script_file_name)) .execute(gtoolkit.evaluator().save(true)) .await?; println!("{} Done in {}", SPARKLE, HumanDuration(started.elapsed())); Ok(()) } }
use crate::create::FileToCreate; use crate::download::{FileToDownload, FilesToDownload}; use crate::{ Application, Checker, Downloader, ExecutableSmalltalk, FileToMove, ImageSeed, InstallerError, Result, Smalltalk, SmalltalkCommand, SmalltalkExpressionBuilder, SmalltalkScriptToExecute, SmalltalkScriptsToExecute, BUILDING, CREATING, DOWNLOADING, EXTRACTING, MOVING, SPARKLE, }; use crate::{FileToUnzip, FilesToUnzip}; use clap::{AppSettings, ArgEnum, Clap}; use feenk_releaser::{Version, VersionBump}; use file_matcher::FileNamed; use indicatif::HumanDuration; use reqwest::StatusCode; use std::path::PathBuf; use std::str::FromStr; use std::time::Instant; use url::Url; pub const DEFAULT_PHARO_IMAGE: &str = "https://dl.feenk.com/pharo/Pharo9.0-SNAPSHOT.build.1564.sha.f5f541c.arch.64bit.zip"; #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct BuildOptions { #[clap(long)] pub overwrite: bool, #[clap(long, default_value = "cloner", possible_values = Loader::VARIANTS, case_insensitive = true)] pub loader: Loader, #[clap(long, parse(try_from_str = url_parse), conflicts_with_all(&["image_zip", "image_file"]))] pub image_url: Option<Url>, #[clap(long, parse(from_os_str), conflicts_with_all(&["image_url", "image_file"]))] pub image_zip: Option<PathBuf>, #[clap(long, parse(from_os_str), conflicts_with_all(&["image_url", "image_zip"]))] pub image_file: Option<PathBuf>, #[clap(long, parse(from_os_str))] pub public_key: Option<PathBuf>, #[clap(long, parse(from_os_str))] pub private_key: Option<PathBuf>, #[clap(long, parse(try_from_str = BuildVersion::from_str), default_value = BuildVersion::BleedingEdge.abstract_name())] pub version: BuildVersion, } impl BuildOptions { pub fn image_seed(&self) -> ImageSeed { if let Some(ref image_zip) = self.image_zip { return ImageSeed::Zip(image_zip.clone()); } if let Some(ref image_url) = self.image_url { return ImageSeed::Url(image_url.clone()); } if let Some(ref image_file) = self.image_file { return ImageSeed::Image(image_file.clone()); } return ImageSeed::Url( url_parse(DEFAULT_PHARO_IMAGE) .unwrap_or_else(|_| panic!("Failed to parse url: {}", DEFAULT_PHARO_IMAGE)), ); } } fn url_parse(val: &str) -> Result<Url> { Url::parse(val).map_err(|error| error.into()) } #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct ReleaseBuildOptions { #[clap(flatten)] pub build_options: BuildOptions, #[clap(long, default_value = VersionBump::Patch.to_str(), possible_values = VersionBump::variants(), case_insensitive = true)] pub bump: VersionBump, #[clap(long)] pub no_gt_world: bool, } #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct LocalBuildOptions { #[clap(flatten)] pub build_options: BuildOptions, #[clap(long)] pub no_gt_world: bool, } impl BuildOptions { fn ssh_keys(&self) -> Result<Option<(PathBuf, PathBuf)>> { let public_key = self.public_key()?; let private_key = self.private_key()?; match (&private_key, &public_key) { (Some(private), Some(public)) => Ok(Some((private.clone(), public.clone()))), (None, None) => Ok(None), _ => InstallerError::SshKeysConfigurationError(private_key, public_key).into(), } } fn public_key(&self) -> Result<Option<PathBuf>> { if let Some(ref key) = self.public_key { if key.exists() { Ok(Some(to_absolute::canonicalize(key).map_err(|error| { InstallerError::CanonicalizeError(key.clone(), error) })?)) } else { return InstallerError::PublicKeyDoesNotExist(key.clone()).into(); } } else { Ok(None) } } fn private_key(&self) -> Result<Option<PathBuf>> { if let Some(ref key) = self.private_key { if key.exists() { Ok(Some(to_absolute::canonicalize(key).map_err(|error| { InstallerError::CanonicalizeError(key.clone(), error) })?)) } else { return InstallerError::PrivateKeyDoesNotExist(key.clone()).into(); } } else { Ok(None) } } } impl BuildOptions { pub fn new() -> Self { Self { overwrite: false, loader: Loader::Cloner, image_url: None, image_zip: None, image_file: None, public_key: None, private_key: None, version: BuildVersion::BleedingEdge, } } pub fn should_overwrite(&self) -> bool { self.overwrite } pub fn overwrite(&mut self, overwrite: bool) { self.overwrite = overwrite; } pub fn loader(&mut self, loader: Loader) { self.loader = loader; } } #[derive(ArgEnum, Copy, Clone, Debug)] #[repr(u32)] pub enum Loader { #[clap(name = "cloner")] Cloner, #[clap(name = "metacello")] Metacello, } impl FromStr for Loader { type Err = String; fn from_str(s: &str) -> std::result::Result<Self, String> { <Loader as ArgEnum>::from_str(s, true) } } impl ToString for Loader { fn to_string(&self) -> String { (Loader::VARIANTS[*self as usize]).to_owned() } } #[derive(Debug, Clone)] pub enum BuildVersion { LatestRelease, BleedingEdge, Version(Version), } impl BuildVersion { pub fn abstract_name(&self) -> &str { match self { BuildVersion::LatestRelease => "latest-release", BuildVersion::BleedingEdge => "bleeding-edge", BuildVersion::Version(_) => "vX.Y.Z", } } } impl FromStr for BuildVersion { type Err = InstallerError; fn from_str(s: &str) -> Result<Self> { let version = s.to_string().to_lowercase(); let version_str = version.as_str(); match version_str { "latest-release" => Ok(BuildVersion::LatestRelease), "bleeding-edge" => Ok(BuildVersion::BleedingEdge), _ => Ok(BuildVersion::Version(Version::parse(version_str)?)), } } } impl ToString for BuildVersion { fn to_string(&self) -> String { match self { BuildVersion::Version(version) => version.to_string(), _ => self.abstract_name().to_string(), } } } pub struct Builder; #[derive(Serialize)] pub struct LoaderVersionInfo { gtoolkit_version: String, releaser_version: String, } impl Builder { pub fn new() -> Self { Self {} } pub async fn resolve_loader_version_info( &self, build_options: &BuildOptions, ) -> Result<LoaderVersionInfo> { let gtoolkit_version_string = match &build_options.version { BuildVersion::LatestRelease => { format!( "v{}", Application::latest_gtoolkit_image_version() .await? .to_string() ) } BuildVersion::BleedingEdge => "main".to_string(), BuildVersion::Version(version) => { format!("v{}", version.to_string()) } }; let releaser_version_string = match &build_options.version { BuildVersion::BleedingEdge => "main".to_string(), _ => { let releaser_version_file_url_string = format!( "https://raw.githubusercontent.com/feenkcom/gtoolkit/{}/gtoolkit-releaser.version", &gtoolkit_version_string ); let releaser_version_file_url = Url::parse(&releaser_version_file_url_string)?; let releaser_version_file_response = reqwest::get(releaser_version_file_url.clone()).await?; if releaser_version_file_response.status() != StatusCode::OK { return InstallerError::FailedToDownloadReleaserVersion( releaser_version_file_url.clone(), releaser_version_file_response.status(), ) .into(); } let releaser_version_file_content = releaser_version_file_response.text().await?; let releaser_version = Version::parse(releaser_version_file_content)?; format!("v{}", releaser_version.to_string()) } }; Ok(LoaderVersionInfo { gtoolkit_version: gtoolkit_version_string, releaser_version: releaser_version_string, }) }
}
pub async fn build( &self, application: &mut Application, build_options: &BuildOptions, ) -> Result<()> { let started = Instant::now(); let image_seed = build_options.image_seed(); application.set_image_seed(image_seed.clone())?; Checker::new() .check(application, build_options.should_overwrite()) .await?; application.serialize_into_file()?; println!("{}Downloading files...", DOWNLOADING); let pharo_vm = FileToDownload::new( Url::parse(application.pharo_vm_url())?, application.workspace(), "pharo-vm.zip", ); let files_to_download = FilesToDownload::new() .extend(Downloader::files_to_download(application)) .add(pharo_vm.clone()) .maybe_add(image_seed.file_to_download(application)); files_to_download.download().await?; println!("{}Extracting files...", EXTRACTING); let files_to_unzip = FilesToUnzip::new() .extend(Downloader::files_to_unzip(application)) .add(FileToUnzip::new( pharo_vm.path(), application.workspace().join("pharo-vm"), )) .maybe_add(image_seed.file_to_unzip(application)); files_to_unzip.unzip().await?; if !image_seed.is_image_file() { println!("{}Moving files...", MOVING); let seed_image = FileNamed::wildmatch(format!("*.{}", application.image_extension())) .within(image_seed.seed_image_directory(application)) .find()?; let seed_smalltalk = Smalltalk::new(application.pharo_executable(), seed_image, application); let seed_evaluator = seed_smalltalk.evaluator(); SmalltalkCommand::new("save") .arg( application .workspace() .join(application.image_name()) .display() .to_string(), ) .execute(&seed_evaluator)?; FileToMove::new( FileNamed::wildmatch("*.sources") .within(image_seed.seed_image_directory(application)) .find()?, application.workspace(), ) .move_file() .await?; } let loader_template_string = match build_options.loader { Loader::Cloner => include_str!("../st/clone-gt.st"), Loader::Metacello => include_str!("../st/load-gt.st"), }; let loader_template = mustache::compile_str(loader_template_string)?; let loader_version_info = self.resolve_loader_version_info(build_options).await?; let loader_script = loader_template.render_to_string(&loader_version_info)?; let loader_script_file_name = format!("load-gt-{}.st", &loader_version_info.gtoolkit_version); println!("{}Creating build scripts...", CREATING); FileToCreate::new( application.workspace().join("load-patches.st"), include_str!("../st/load-patches.st"), ) .create() .await?; FileToCreate::new( application.workspace().join("load-taskit.st"), include_str!("../st/load-taskit.st"), ) .create() .await?; FileToCreate::new( application.workspace().join(&loader_script_file_name), loader_script, ) .create() .await?; let gtoolkit = application.gtoolkit(); let pharo = application.pharo(); println!("{}Preparing the image...", BUILDING); SmalltalkScriptsToExecute::new() .add(SmalltalkScriptToExecute::new("load-patches.st")) .add(SmalltalkScriptToExecute::new("load-taskit.st")) .execute(pharo.evaluator().save(true)) .await?; println!("{}Building Glamorous Toolkit...", BUILDING); let ssh_keys = build_options.ssh_keys()?; let mut scripts_to_execute = SmalltalkScriptsToExecute::new(); if let Some((private, public)) = ssh_keys { scripts_to_execute.add( SmalltalkExpressionBuilder::new() .add("IceCredentialsProvider useCustomSsh: true") .add(format!( "IceCredentialsProvider sshCredentials publicKey: '{}'; privateKey: '{}'", private.display(), public.display() )) .build(), ); } scripts_to_execute .add(SmalltalkScriptToExecute::new(&loader_script_file_name)) .execute(gtoolkit.evaluator().save(true)) .await?; println!("{} Done in {}", SPARKLE, HumanDuration(started.elapsed())); Ok(()) }
function_block-full_function
[ { "content": "pub trait ExecutableSmalltalk {\n\n fn create_command(&self, evaluator: &SmalltalkEvaluator) -> Result<Command>;\n\n fn execute(&self, evaluator: &SmalltalkEvaluator) -> Result<()> {\n\n let mut command = self.create_command(evaluator)?;\n\n if evaluator.is_verbose() {\n\n println!(\"{:?}\", &command);\n\n }\n\n\n\n let status = command.status()?;\n\n\n\n if !status.success() {\n\n return InstallerError::CommandExecutionFailed(command).into();\n\n }\n\n Ok(())\n\n }\n\n fn execute_with_result(&self, evaluator: &SmalltalkEvaluator) -> Result<String> {\n\n let mut command = self.create_command(evaluator)?;\n\n command.stdout(Stdio::piped());\n\n\n\n if evaluator.is_verbose() {\n", "file_path": "src/smalltalk/smalltalk.rs", "rank": 1, "score": 84155.23427618307 }, { "content": "pub fn unzip_task(file_to_unzip: FileToUnzip, multibar: Arc<MultiProgress>) -> Result<()> {\n\n let file = std::fs::File::open(&file_to_unzip.archive).unwrap();\n\n let mut archive = zip::ZipArchive::new(file).unwrap();\n\n\n\n // Create the ProgressBar with the aquired size from before\n\n // and add it to the multibar\n\n let progress_bar = multibar.add(ProgressBar::new(archive.len() as u64));\n\n\n\n // Set Style to the ProgressBar\n\n progress_bar.set_style(\n\n ProgressStyle::default_bar()\n\n .template(\"[{bar:40.cyan/blue}] {percent}% - {msg}\")\n\n .progress_chars(\"#>-\"),\n\n );\n\n\n\n // Set the filename as message part of the progress bar\n\n progress_bar.set_message(\n\n file_to_unzip\n\n .archive\n\n .clone()\n", "file_path": "src/zipping/unzip.rs", "rank": 2, "score": 71692.8396785431 }, { "content": "pub fn zip_folder<F: std::io::Write + std::io::Seek>(\n\n zip: &mut ZipWriter<F>,\n\n src_dir: impl AsRef<Path>,\n\n zip_options: FileOptions,\n\n) -> Result<()> {\n\n let src_dir = src_dir.as_ref();\n\n\n\n let walkdir = WalkDir::new(src_dir);\n\n let it = walkdir.into_iter();\n\n\n\n let mut buffer = Vec::new();\n\n for entry in it {\n\n let entry = entry?;\n\n let path = entry.path();\n\n\n\n let name = path\n\n .strip_prefix(src_dir.parent().expect(\"Could not get a parent folder\"))\n\n .unwrap();\n\n let name = name\n\n .to_str()\n", "file_path": "src/zipping/zip.rs", "rank": 3, "score": 48897.26217228614 }, { "content": "pub fn zip_file<F: std::io::Write + std::io::Seek>(\n\n zip: &mut ZipWriter<F>,\n\n file: impl AsRef<Path>,\n\n mut zip_options: FileOptions,\n\n) -> Result<()> {\n\n let file = file.as_ref();\n\n let name = file\n\n .file_name()\n\n .expect(\"Could not get file name\")\n\n .to_str()\n\n .expect(\"Could not convert file name to Unicode\");\n\n\n\n // Get and Set permissions\n\n #[cfg(unix)]\n\n {\n\n use std::os::unix::fs::PermissionsExt;\n\n\n\n let unix_mode: u32 = std::fs::metadata(file)?.permissions().mode();\n\n zip_options = zip_options.unix_permissions(unix_mode);\n\n }\n", "file_path": "src/zipping/zip.rs", "rank": 4, "score": 48897.26217228614 }, { "content": "#[derive(Serialize)]\n\nstruct ReleaseInfo {\n\n version: String,\n\n os: String,\n\n arch: String,\n\n}\n\n\n\npub struct Release;\n\n\n\nimpl Release {\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n\n\n fn process_template_path(application: &Application, path: impl AsRef<Path>) -> PathBuf {\n\n let new_version = application.image_version();\n\n\n\n let platform = match application.platform() {\n\n PlatformOS::MacOSX8664 => \"MacOS\",\n\n PlatformOS::MacOSAarch64 => \"MacOS\",\n\n PlatformOS::WindowsX8664 => \"Windows\",\n", "file_path": "src/tools/release.rs", "rank": 5, "score": 40189.76761441238 }, { "content": "pub trait GToolkit {\n\n fn get_gtoolkit_version(&self) -> Result<Version>;\n\n fn print_new_commits(&self) -> Result<()>;\n\n fn perform_setup_for_release(&self, bump: VersionBump) -> Result<()>;\n\n fn perform_setup_for_local_build(&self) -> Result<()>;\n\n fn perform_iceberg_clean_up(&self) -> Result<()>;\n\n fn run_examples(&self, packages: &Vec<String>, test_options: &TestOptions) -> Result<()>;\n\n fn run_release_examples(&self, test_options: &TestOptions) -> Result<()>;\n\n fn run_release_slides(&self, test_options: &TestOptions) -> Result<()>;\n\n fn run_architectural_report(&self) -> Result<()>;\n\n}\n\n\n\nimpl<'application> GToolkit for Smalltalk<'application> {\n\n fn get_gtoolkit_version(&self) -> Result<Version> {\n\n let version_string =\n\n SmalltalkCommand::new(\"getgtoolkitversion\").execute_with_result(&self.evaluator())?;\n\n Version::parse(version_string).map_err(|error| error.into())\n\n }\n\n\n\n fn print_new_commits(&self) -> Result<()> {\n", "file_path": "src/gtoolkit.rs", "rank": 6, "score": 36432.09365041114 }, { "content": " println!(\"{:?}\", &command);\n\n }\n\n\n\n let output = command.output()?;\n\n\n\n if !output.status.success() {\n\n return InstallerError::CommandExecutionFailed(command).into();\n\n }\n\n Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())\n\n }\n\n\n\n fn name(&self) -> String;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Smalltalk<'application> {\n\n executable: PathBuf,\n\n image: PathBuf,\n\n application: &'application Application,\n\n}\n", "file_path": "src/smalltalk/smalltalk.rs", "rank": 7, "score": 35478.47265678761 }, { "content": "\n\nimpl<'application> Smalltalk<'application> {\n\n pub fn new(\n\n executable: impl Into<PathBuf>,\n\n image: impl Into<PathBuf>,\n\n application: &'application Application,\n\n ) -> Self {\n\n Self {\n\n executable: executable.into(),\n\n image: image.into(),\n\n application,\n\n }\n\n }\n\n\n\n pub fn executable(&self) -> &Path {\n\n self.executable.as_path()\n\n }\n\n\n\n pub fn image(&self) -> &Path {\n\n self.image.as_path()\n", "file_path": "src/smalltalk/smalltalk.rs", "rank": 8, "score": 35478.399927922335 }, { "content": "use crate::{Application, InstallerError, Result, SmalltalkEvaluator};\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::{Command, Stdio};\n\n\n", "file_path": "src/smalltalk/smalltalk.rs", "rank": 9, "score": 35477.770164821915 }, { "content": " }\n\n\n\n pub fn workspace(&self) -> &Path {\n\n self.application.workspace()\n\n }\n\n\n\n pub fn evaluator(&self) -> SmalltalkEvaluator {\n\n let mut evaluator = SmalltalkEvaluator::new(self);\n\n evaluator.verbose(self.verbose());\n\n evaluator\n\n }\n\n\n\n pub fn verbose(&self) -> bool {\n\n self.application.is_verbose()\n\n }\n\n\n\n pub fn application(&self) -> &Application {\n\n self.application\n\n }\n\n}\n", "file_path": "src/smalltalk/smalltalk.rs", "rank": 10, "score": 35475.63688951135 }, { "content": "use crate::Result;\n\nuse std::path::PathBuf;\n\n\n\npub struct FileToCreate {\n\n content: String,\n\n destination: PathBuf,\n\n}\n\n\n\nimpl FileToCreate {\n\n pub fn new(path: impl Into<PathBuf>, content: impl Into<String>) -> Self {\n\n Self {\n\n content: content.into(),\n\n destination: path.into(),\n\n }\n\n }\n\n\n\n pub async fn create(&self) -> Result<()> {\n\n tokio::fs::write(&self.destination, &self.content).await?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/create.rs", "rank": 11, "score": 30289.02600290141 }, { "content": "use feenk_releaser::Version;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::ops::Deref;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct AppVersion(Version);\n\n\n\nimpl From<Version> for AppVersion {\n\n fn from(version: Version) -> Self {\n\n AppVersion(version)\n\n }\n\n}\n\n\n\nimpl Deref for AppVersion {\n\n type Target = Version;\n\n fn deref(&self) -> &Version {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 12, "score": 30235.98759221912 }, { "content": "impl Display for AppVersion {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct ImageVersion(Version);\n\n\n\nimpl From<Version> for ImageVersion {\n\n fn from(version: Version) -> Self {\n\n ImageVersion(version)\n\n }\n\n}\n\n\n\nimpl Deref for ImageVersion {\n\n type Target = Version;\n\n fn deref(&self) -> &Version {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Display for ImageVersion {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n", "file_path": "src/version.rs", "rank": 13, "score": 30235.161750453914 }, { "content": " #[error(\"Workspace already exists: {0}\")]\n\n WorkspaceAlreadyExists(PathBuf),\n\n #[error(\"Failed to find the latest release of the Glamorous Toolkit VM\")]\n\n GlamorousToolkitAppIsNotYetReleased,\n\n #[error(\"Command {0:?} failed. See install.log or install-errors.log for more info\")]\n\n CommandExecutionFailed(Command),\n\n #[error(\"Both private {0:?} and public key {1:?} must be set, or none\")]\n\n SshKeysConfigurationError(Option<PathBuf>, Option<PathBuf>),\n\n #[error(\"Specified private key {0} does not exist\")]\n\n PrivateKeyDoesNotExist(PathBuf),\n\n #[error(\"Specified public key {0} does not exist\")]\n\n PublicKeyDoesNotExist(PathBuf),\n\n #[error(\"Failed to download {0}, status code {1}\")]\n\n DownloadError(Url, StatusCode),\n\n #[error(\"Failed to read the file name of {0}\")]\n\n FailedToReadFileName(PathBuf),\n\n #[error(\"Failed to read the file extension of {0}\")]\n\n FailedToReadFileExtension(PathBuf),\n\n}\n\n\n\nimpl<T> From<InstallerError> for std::result::Result<T, InstallerError> {\n\n fn from(error: InstallerError) -> Self {\n\n Err(error)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 14, "score": 30194.878291379973 }, { "content": " #[error(\"Failed to serialize as yaml\")]\n\n SerializationAsYamlError(#[from] serde_yaml::Error),\n\n #[error(\"Version parse error\")]\n\n ReleaserError(#[from] feenk_releaser::ReleaserError),\n\n #[error(\"Walkdir error\")]\n\n WalkdirError(#[from] walkdir::Error),\n\n #[error(\"Failed to parse URL\")]\n\n UrlParseError(#[from] url::ParseError),\n\n #[error(\"Task join error\")]\n\n JoinError(#[from] JoinError),\n\n #[error(\"Mustache template error\")]\n\n MustacheErrorr(#[from] mustache::Error),\n\n #[error(\"Failed to detect the latest released version of the gtoolkit-vm from its GitHub repository\")]\n\n FailedToDetectGlamorousAppVersion,\n\n #[error(\"Failed to download releaser version from {0}, with status code {1}\")]\n\n FailedToDownloadReleaserVersion(Url, StatusCode),\n\n #[error(\"Failed to detect the version of the gtoolkit\")]\n\n FailedToDetectGlamorousImageVersion,\n\n #[error(\"Failed to parse the loader {0}\")]\n\n LoaderParseError(String),\n", "file_path": "src/error.rs", "rank": 15, "score": 30193.292043530368 }, { "content": "use reqwest::{StatusCode, Url};\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse thiserror::Error;\n\nuse tokio::task::JoinError;\n\n\n\npub type Result<T> = core::result::Result<T, InstallerError>;\n\n\n\n#[derive(Error, Debug)]\n\npub enum InstallerError {\n\n #[error(\"Input/Output error\")]\n\n IoError(#[from] std::io::Error),\n\n #[error(\"File matcher error\")]\n\n FileMatcherError(#[from] file_matcher::FileMatcherError),\n\n #[error(\"Zip error\")]\n\n ZipError(#[from] zip::result::ZipError),\n\n #[error(\"Failed to perform a request\")]\n\n ReqwestError(#[from] reqwest::Error),\n\n #[error(\"Failed to canonicalize a path {0}\")]\n\n CanonicalizeError(PathBuf, #[source] to_absolute::Error),\n", "file_path": "src/error.rs", "rank": 16, "score": 30193.115725166947 }, { "content": "use futures::{stream, StreamExt};\n\nuse indicatif::{MultiProgress, ProgressBar, ProgressStyle};\n\nuse reqwest::{header, Client, Url};\n\nuse std::path::PathBuf;\n\nuse std::sync::Arc;\n\nuse tokio::io::AsyncWriteExt;\n\n\n\nuse crate::{InstallerError, Result};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct FileToDownload {\n\n url: String,\n\n directory: PathBuf,\n\n file_name: String,\n\n}\n\n\n\nimpl FileToDownload {\n\n pub fn new(\n\n url: impl Into<String>,\n\n directory: impl Into<PathBuf>,\n", "file_path": "src/download.rs", "rank": 17, "score": 29818.51238078947 }, { "content": " file_name: impl Into<String>,\n\n ) -> Self {\n\n Self {\n\n directory: directory.into(),\n\n url: url.into(),\n\n file_name: file_name.into(),\n\n }\n\n }\n\n\n\n pub fn path(&self) -> PathBuf {\n\n self.directory.join(&self.file_name)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct FilesToDownload {\n\n files: Vec<FileToDownload>,\n\n}\n\n\n\nimpl FilesToDownload {\n", "file_path": "src/download.rs", "rank": 18, "score": 29809.760786592782 }, { "content": "\n\n // Change the message on the overall progress indicator.\n\n main_pb.finish_with_message(\"done\");\n\n\n\n // Wait for the progress bars to finish rendering.\n\n // The first ? unwraps the outer join() in which we are waiting for the\n\n // future spawned by tokio::task::spawn_blocking to finishe.\n\n // The second ? unwraps the inner multibar.join().\n\n Ok(multibar.await??)\n\n }\n\n}\n\n\n\npub async fn download_task(\n\n file_to_download: FileToDownload,\n\n multibar: Arc<MultiProgress>,\n\n) -> Result<()> {\n\n // Parse URL into Url type\n\n let url = Url::parse(file_to_download.url.as_str())?;\n\n\n\n // Create a reqwest Client\n", "file_path": "src/download.rs", "rank": 19, "score": 29804.895659644248 }, { "content": " let client = Client::new();\n\n\n\n // We need to determine the file size before we download, so we can create a ProgressBar\n\n // A Header request for the CONTENT_LENGTH header gets us the file size\n\n let download_size = {\n\n let resp = client.head(url.as_str()).send().await?;\n\n if resp.status().is_success() {\n\n resp.headers() // Gives us the HeaderMap\n\n .get(header::CONTENT_LENGTH) // Gives us an Option containing the HeaderValue\n\n .and_then(|ct_len| ct_len.to_str().ok()) // Unwraps the Option as &str\n\n .and_then(|ct_len| ct_len.parse().ok()) // Parses the Option as u64\n\n .unwrap_or(0) // Fallback to 0\n\n } else {\n\n return InstallerError::DownloadError(url, resp.status()).into();\n\n }\n\n };\n\n\n\n // Here we build the actual Request with a RequestBuilder from the Client\n\n let request = client.get(url.as_str());\n\n\n", "file_path": "src/download.rs", "rank": 20, "score": 29804.883633467496 }, { "content": " pub fn new() -> Self {\n\n Self { files: vec![] }\n\n }\n\n\n\n pub fn add(self, file_to_download: FileToDownload) -> Self {\n\n let mut files = self.files.clone();\n\n files.push(file_to_download);\n\n Self { files }\n\n }\n\n\n\n pub fn maybe_add(self, file_to_download: Option<FileToDownload>) -> Self {\n\n if let Some(file_to_download) = file_to_download {\n\n self.add(file_to_download)\n\n } else {\n\n self\n\n }\n\n }\n\n\n\n pub fn extend(self, files_to_download: Self) -> Self {\n\n let mut files = self.files.clone();\n", "file_path": "src/download.rs", "rank": 21, "score": 29802.40068924272 }, { "content": " tokio::task::spawn(download_task(file_to_download.clone(), multibar)).await;\n\n\n\n // Increase main ProgressBar by 1\n\n main_pb.inc(1);\n\n }\n\n });\n\n\n\n // Set up a future to manage rendering of the multiple progress bars.\n\n let multibar = {\n\n // Create a clone of the multibar, which we will move into the task.\n\n let multibar = multibar.clone();\n\n\n\n // multibar.join() is *not* async and will block until all the progress\n\n // bars are done, therefore we must spawn it on a separate scheduler\n\n // on which blocking behavior is allowed.\n\n tokio::task::spawn_blocking(move || multibar.join())\n\n };\n\n\n\n // Wait for the tasks to finish.\n\n tasks.await;\n", "file_path": "src/download.rs", "rank": 22, "score": 29800.86699031351 }, { "content": " files.extend(files_to_download.files);\n\n Self { files }\n\n }\n\n\n\n pub async fn download(self) -> Result<()> {\n\n // Set up a new multi-progress bar.\n\n // The bar is stored in an `Arc` to facilitate sharing between threads.\n\n let multibar = std::sync::Arc::new(indicatif::MultiProgress::new());\n\n // Add an overall progress indicator to the multibar.\n\n // It has as many steps as the download_links Vector and will increment on completion of each task.\n\n let main_pb = std::sync::Arc::new(\n\n multibar\n\n .clone()\n\n .add(indicatif::ProgressBar::new(self.files.len() as u64)),\n\n );\n\n\n\n main_pb.set_style(\n\n indicatif::ProgressStyle::default_bar().template(\"{msg} {bar:10} {pos}/{len}\"),\n\n );\n\n main_pb.set_message(\"total \");\n", "file_path": "src/download.rs", "rank": 23, "score": 29799.377336998325 }, { "content": " // Create the ProgressBar with the aquired size from before\n\n // and add it to the multibar\n\n let progress_bar = multibar.add(ProgressBar::new(download_size));\n\n\n\n // Set Style to the ProgressBar\n\n progress_bar.set_style(\n\n ProgressStyle::default_bar()\n\n .template(\"[{bar:40.cyan/blue}] {bytes}/{total_bytes} - {msg}\")\n\n .progress_chars(\"#>-\"),\n\n );\n\n\n\n // Set the filename as message part of the progress bar\n\n progress_bar.set_message(file_to_download.file_name.clone());\n\n\n\n // Create the output file with tokio's async fs lib\n\n let mut outfile =\n\n tokio::fs::File::create(file_to_download.directory.join(&file_to_download.file_name))\n\n .await?;\n\n\n\n // Do the actual request to download the file\n", "file_path": "src/download.rs", "rank": 24, "score": 29799.00082763903 }, { "content": "\n\n // Make the main progress bar render immediately rather than waiting for the\n\n // first task to finish.\n\n main_pb.tick();\n\n\n\n // Convert download_links Vector into stream\n\n // This is basically a async compatible iterator\n\n let stream = stream::iter(&self.files);\n\n\n\n // Set up a future to iterate over tasks and run up to 2 at a time.\n\n let tasks = stream\n\n .enumerate()\n\n .for_each_concurrent(Some(2), |(_i, file_to_download)| {\n\n // Clone multibar and main_pb. We will move the clones into each task.\n\n let multibar = multibar.clone();\n\n let main_pb = main_pb.clone();\n\n async move {\n\n // Spawn a new tokio task for the current download link\n\n // We need to hand over the multibar, so the ProgressBar for the task can be added\n\n let _task =\n", "file_path": "src/download.rs", "rank": 25, "score": 29798.185142610513 }, { "content": " let mut download = request.send().await?;\n\n\n\n // Do an asynchronous, buffered copy of the download to the output file.\n\n //\n\n // We use the part from the reqwest-tokio example here on purpose\n\n // This way, we are able to increase the ProgressBar with every downloaded chunk\n\n while let Some(chunk) = download.chunk().await? {\n\n progress_bar.inc(chunk.len() as u64); // Increase ProgressBar by chunk size\n\n outfile.write(&chunk).await?; // Write chunk to output file\n\n }\n\n\n\n // Finish the progress bar to prevent glitches\n\n progress_bar.finish();\n\n\n\n // Must flush tokio::io::BufWriter manually.\n\n // It will *not* flush itself automatically when dropped.\n\n outfile.flush().await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/download.rs", "rank": 26, "score": 29793.030001127638 }, { "content": "pub const GTOOLKIT_REPOSITORY_OWNER: &str = \"feenkcom\";\n\npub const GTOOLKIT_REPOSITORY_NAME: &str = \"gtoolkit\";\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Application {\n\n verbose: bool,\n\n workspace: PathBuf,\n\n app_version: AppVersion,\n\n image_version: ImageVersion,\n\n image_name: String,\n\n image_extension: String,\n\n image_seed: ImageSeed,\n\n}\n\n\n\nimpl Application {\n\n pub fn new(\n\n workspace: impl AsRef<Path>,\n\n app_version: AppVersion,\n\n image_version: ImageVersion,\n\n image_seed: ImageSeed,\n", "file_path": "src/application.rs", "rank": 27, "score": 29731.468465088645 }, { "content": " }\n\n\n\n pub fn deserialize_from_file(&mut self) -> Result<()> {\n\n let application: Self =\n\n serde_yaml::from_str(std::fs::read_to_string(self.serialization_file())?.as_str())\n\n .map_err(|error| Into::<InstallerError>::into(error))?;\n\n\n\n self.image_extension = application.image_extension;\n\n self.image_name = application.image_name;\n\n self.image_seed = application.image_seed;\n\n self.app_version = application.app_version;\n\n self.image_version = application.image_version;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn platform(&self) -> PlatformOS {\n\n let os = std::env::consts::OS;\n\n let arch = std::env::consts::ARCH;\n\n\n", "file_path": "src/application.rs", "rank": 28, "score": 29729.243574524793 }, { "content": " PlatformOS::LinuxX8664 => \"bin/GlamorousToolkit-cli\",\n\n })\n\n }\n\n\n\n pub async fn latest_gtoolkit_image_version() -> Result<ImageVersion> {\n\n let latest_version: Option<Version> =\n\n GitHub::new(GTOOLKIT_REPOSITORY_OWNER, GTOOLKIT_REPOSITORY_NAME, None)\n\n .latest_release_version()\n\n .await?;\n\n\n\n if let Some(latest_version) = latest_version {\n\n return Ok(latest_version.into());\n\n };\n\n\n\n InstallerError::FailedToDetectGlamorousImageVersion.into()\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\n#[repr(u32)]\n\npub enum PlatformOS {\n\n MacOSX8664,\n\n MacOSAarch64,\n\n WindowsX8664,\n\n LinuxX8664,\n\n}\n", "file_path": "src/application.rs", "rank": 29, "score": 29727.207353363367 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse crate::{AppVersion, ImageSeed, ImageVersion, InstallerError, Result, Smalltalk};\n\nuse feenk_releaser::{GitHub, Version};\n\nuse file_matcher::{FolderNamed, OneEntry, OneEntryNamed};\n\nuse std::fs::File;\n\nuse std::io::Write;\n\n\n\npub const DEFAULT_IMAGE_NAME: &str = \"GlamorousToolkit\";\n\npub const DEFAULT_IMAGE_EXTENSION: &str = \"image\";\n\n\n\npub const DEFAULT_PHARO_VM_MAC: &str = \"https://dl.feenk.com/pharo/pharo64-mac-headless-stable.zip\";\n\npub const DEFAULT_PHARO_VM_LINUX: &str =\n\n \"https://dl.feenk.com/pharo/pharo64-linux-headless-stable.zip\";\n\npub const DEFAULT_PHARO_VM_WINDOWS: &str =\n\n \"https://dl.feenk.com/pharo/pharo64-win-headless-stable.zip\";\n\n\n\npub const SERIALIZATION_FILE: &str = \"gtoolkit.yaml\";\n\n\n", "file_path": "src/application.rs", "rank": 30, "score": 29723.813704902135 }, { "content": " pub fn gtoolkit(&self) -> Smalltalk {\n\n Smalltalk::new(self.gtoolkit_app_cli(), self.image(), self)\n\n }\n\n\n\n pub fn pharo(&self) -> Smalltalk {\n\n Smalltalk::new(self.pharo_executable(), self.image(), self)\n\n }\n\n\n\n pub fn serialization_file_name(&self) -> &str {\n\n SERIALIZATION_FILE\n\n }\n\n\n\n pub fn serialization_file(&self) -> PathBuf {\n\n self.workspace().join(self.serialization_file_name())\n\n }\n\n\n\n pub fn serialize_into_file(&self) -> Result<()> {\n\n let mut file = File::create(self.serialization_file())?;\n\n file.write(serde_yaml::to_string(self)?.as_bytes())?;\n\n Ok(())\n", "file_path": "src/application.rs", "rank": 31, "score": 29723.699811443577 }, { "content": " ) -> Result<Self> {\n\n let workspace = workspace.as_ref();\n\n let workspace = if workspace.is_relative() {\n\n std::env::current_dir()?.join(workspace)\n\n } else {\n\n workspace.to_path_buf()\n\n };\n\n\n\n Ok(Self {\n\n verbose: false,\n\n workspace,\n\n app_version,\n\n image_version,\n\n image_name: DEFAULT_IMAGE_NAME.to_string(),\n\n image_extension: DEFAULT_IMAGE_EXTENSION.to_string(),\n\n image_seed,\n\n })\n\n }\n\n\n\n pub fn is_verbose(&self) -> bool {\n", "file_path": "src/application.rs", "rank": 32, "score": 29721.151896616168 }, { "content": " pub fn image_extension(&self) -> &str {\n\n self.image_extension.as_str()\n\n }\n\n\n\n pub fn image_seed(&self) -> &ImageSeed {\n\n &self.image_seed\n\n }\n\n\n\n pub fn set_image_seed(&mut self, seed: ImageSeed) -> Result<()> {\n\n match &seed {\n\n ImageSeed::Image(image_file) => {\n\n let seed_image_directory = seed.seed_image_directory(self);\n\n\n\n let workspace =\n\n to_absolute::canonicalize(&seed_image_directory).map_err(|error| {\n\n InstallerError::CanonicalizeError(seed_image_directory, error)\n\n })?;\n\n\n\n self.set_workspace(workspace);\n\n\n", "file_path": "src/application.rs", "rank": 33, "score": 29721.011568609116 }, { "content": " self.verbose\n\n }\n\n\n\n pub fn set_verbose(&mut self, verbose: bool) {\n\n self.verbose = verbose;\n\n }\n\n\n\n pub fn workspace(&self) -> &Path {\n\n self.workspace.as_path()\n\n }\n\n\n\n pub fn set_workspace(&mut self, workspace: impl Into<PathBuf>) {\n\n self.workspace = workspace.into()\n\n }\n\n\n\n /// Returns a name of the image (without .image extension)\n\n pub fn image_name(&self) -> &str {\n\n self.image_name.as_str()\n\n }\n\n\n", "file_path": "src/application.rs", "rank": 34, "score": 29719.38961363071 }, { "content": " }\n\n\n\n pub fn gtoolkit_app(&self) -> &str {\n\n match self.platform() {\n\n PlatformOS::MacOSX8664 => \"GlamorousToolkit.app/Contents/MacOS/GlamorousToolkit\",\n\n PlatformOS::MacOSAarch64 => \"GlamorousToolkit.app/Contents/MacOS/GlamorousToolkit\",\n\n PlatformOS::WindowsX8664 => \"bin/GlamorousToolkit.exe\",\n\n PlatformOS::LinuxX8664 => \"bin/GlamorousToolkit\",\n\n }\n\n }\n\n\n\n pub fn gtoolkit_app_url(&self) -> String {\n\n let version = self.app_version().to_string();\n\n match self.platform() {\n\n PlatformOS::MacOSX8664 => {\n\n format!(\"https://github.com/feenkcom/gtoolkit-vm/releases/download/v{}/GlamorousToolkit-x86_64-apple-darwin.app.zip\", &version)\n\n }\n\n PlatformOS::MacOSAarch64 => {\n\n format!(\"https://github.com/feenkcom/gtoolkit-vm/releases/download/v{}/GlamorousToolkit-aarch64-apple-darwin.app.zip\", &version)\n\n }\n", "file_path": "src/application.rs", "rank": 35, "score": 29718.362434960178 }, { "content": " }\n\n\n\n /// Returns a path to the image with a glamorous application\n\n pub fn image(&self) -> PathBuf {\n\n self.workspace()\n\n .join(format!(\"{}.{}\", self.image_name(), self.image_extension()))\n\n }\n\n\n\n pub fn image_version(&self) -> &ImageVersion {\n\n &self.image_version\n\n }\n\n\n\n pub fn set_image_version(&mut self, version: ImageVersion) {\n\n self.image_version = version;\n\n }\n\n\n\n pub fn app_version(&self) -> &AppVersion {\n\n &self.app_version\n\n }\n\n\n", "file_path": "src/application.rs", "rank": 36, "score": 29717.66846436005 }, { "content": " let file_name = image_file\n\n .file_stem()\n\n .and_then(|name| name.to_str())\n\n .and_then(|name| Some(name.to_string()));\n\n\n\n let file_extension = image_file\n\n .extension()\n\n .and_then(|name| name.to_str())\n\n .and_then(|name| Some(name.to_string()));\n\n\n\n self.image_name =\n\n file_name.ok_or_else(|| InstallerError::FailedToReadFileName(image_file.clone()))?;\n\n self.image_extension = file_extension\n\n .ok_or_else(|| InstallerError::FailedToReadFileExtension(image_file.clone()))?;\n\n }\n\n _ => {}\n\n }\n\n\n\n self.image_seed = seed;\n\n Ok(())\n", "file_path": "src/application.rs", "rank": 37, "score": 29716.70631727657 }, { "content": " vec![FolderNamed::exact(\"bin\")]\n\n }\n\n PlatformOS::LinuxX8664 => {\n\n vec![FolderNamed::exact(\"bin\"), FolderNamed::exact(\"lib\")]\n\n }\n\n };\n\n\n\n folders\n\n .into_iter()\n\n .map(|each| each.within(self.workspace()))\n\n .collect::<Vec<OneEntry>>()\n\n }\n\n\n\n pub fn pharo_vm_url(&self) -> &str {\n\n match self.platform() {\n\n PlatformOS::MacOSX8664 => DEFAULT_PHARO_VM_MAC,\n\n PlatformOS::MacOSAarch64 => DEFAULT_PHARO_VM_MAC,\n\n PlatformOS::WindowsX8664 => DEFAULT_PHARO_VM_WINDOWS,\n\n PlatformOS::LinuxX8664 => DEFAULT_PHARO_VM_LINUX,\n\n }\n", "file_path": "src/application.rs", "rank": 38, "score": 29712.77473054088 }, { "content": " PlatformOS::WindowsX8664 => {\n\n format!(\"https://github.com/feenkcom/gtoolkit-vm/releases/download/v{}/GlamorousToolkit-x86_64-pc-windows-msvc.zip\", &version)\n\n }\n\n PlatformOS::LinuxX8664 => {\n\n format!(\"https://github.com/feenkcom/gtoolkit-vm/releases/download/v{}/GlamorousToolkit-x86_64-unknown-linux-gnu.zip\", &version)\n\n }\n\n }\n\n }\n\n\n\n pub fn gtoolkit_app_entries(&self) -> Vec<Box<dyn OneEntryNamed>> {\n\n match self.platform() {\n\n PlatformOS::MacOSX8664 | PlatformOS::MacOSAarch64 => {\n\n vec![FolderNamed::wildmatch(\"*.app\").boxed()]\n\n }\n\n PlatformOS::WindowsX8664 => {\n\n vec![FolderNamed::exact(\"bin\").boxed()]\n\n }\n\n PlatformOS::LinuxX8664 => {\n\n vec![\n\n FolderNamed::exact(\"bin\").boxed(),\n", "file_path": "src/application.rs", "rank": 39, "score": 29710.804042104544 }, { "content": " FolderNamed::exact(\"lib\").boxed(),\n\n ]\n\n }\n\n }\n\n }\n\n\n\n pub fn pharo_executable(&self) -> PathBuf {\n\n PathBuf::from(match self.platform() {\n\n PlatformOS::MacOSX8664 => \"pharo-vm/Pharo.app/Contents/MacOS/Pharo\",\n\n PlatformOS::MacOSAarch64 => \"pharo-vm/Pharo.app/Contents/MacOS/Pharo\",\n\n PlatformOS::WindowsX8664 => \"pharo-vm/PharoConsole.exe\",\n\n PlatformOS::LinuxX8664 => \"pharo-vm/pharo\",\n\n })\n\n }\n\n\n\n pub fn gtoolkit_app_cli(&self) -> PathBuf {\n\n PathBuf::from(match self.platform() {\n\n PlatformOS::MacOSX8664 => \"GlamorousToolkit.app/Contents/MacOS/GlamorousToolkit-cli\",\n\n PlatformOS::MacOSAarch64 => \"GlamorousToolkit.app/Contents/MacOS/GlamorousToolkit-cli\",\n\n PlatformOS::WindowsX8664 => \"bin/GlamorousToolkit-cli.exe\",\n", "file_path": "src/application.rs", "rank": 40, "score": 29709.629513808803 }, { "content": " match (os, arch) {\n\n (\"macos\", \"aarch64\") => PlatformOS::MacOSAarch64,\n\n (\"macos\", \"x86_64\") => PlatformOS::MacOSX8664,\n\n (\"linux\", \"x86_64\") => PlatformOS::LinuxX8664,\n\n (\"windows\", \"x86_64\") => PlatformOS::WindowsX8664,\n\n (os, arch) => {\n\n panic!(\"Unsupported {}-{}\", os, arch);\n\n }\n\n }\n\n }\n\n\n\n pub fn gtoolkit_app_folders(&self) -> Vec<OneEntry> {\n\n let folders = match self.platform() {\n\n PlatformOS::MacOSX8664 => {\n\n vec![FolderNamed::exact(\"GlamorousToolkit.app\")]\n\n }\n\n PlatformOS::MacOSAarch64 => {\n\n vec![FolderNamed::exact(\"GlamorousToolkit.app\")]\n\n }\n\n PlatformOS::WindowsX8664 => {\n", "file_path": "src/application.rs", "rank": 41, "score": 29709.569262330355 }, { "content": "use crate::{Application, InstallerError, Result, CHECKING};\n\n\n\npub struct Checker;\n\n\n\nimpl Checker {\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n\n\n pub async fn check(&self, application: &Application, should_overwrite: bool) -> Result<()> {\n\n println!(\"{}Checking the system...\", CHECKING);\n\n\n\n if application.image_seed().is_image_file() {\n\n return Ok(());\n\n }\n\n\n\n if should_overwrite && application.workspace().exists() {\n\n tokio::fs::remove_dir_all(application.workspace()).await?;\n\n }\n\n\n", "file_path": "src/tools/checker.rs", "rank": 42, "score": 28636.4382500954 }, { "content": " if application.workspace().exists() {\n\n return InstallerError::WorkspaceAlreadyExists(application.workspace().to_path_buf())\n\n .into();\n\n }\n\n\n\n tokio::fs::create_dir_all(application.workspace()).await?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/tools/checker.rs", "rank": 43, "score": 28610.90830840813 }, { "content": "use crate::Result;\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct FileToMove {\n\n file: PathBuf,\n\n destination: PathBuf,\n\n}\n\n\n\nimpl FileToMove {\n\n pub fn new(file: impl Into<PathBuf>, destination: impl Into<PathBuf>) -> Self {\n\n Self {\n\n file: file.into(),\n\n destination: destination.into(),\n\n }\n\n }\n\n\n\n pub async fn move_file(&self) -> Result<()> {\n\n let file_name = self.file.file_name().unwrap().to_str().unwrap();\n\n\n", "file_path": "src/moving/file.rs", "rank": 44, "score": 28482.95351440639 }, { "content": "use crate::Result;\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct FolderToMove {\n\n folder: PathBuf,\n\n destination: PathBuf,\n\n}\n\n\n\nimpl FolderToMove {\n\n pub fn new(folder: impl Into<PathBuf>, destination: impl Into<PathBuf>) -> Self {\n\n Self {\n\n folder: folder.into(),\n\n destination: destination.into(),\n\n }\n\n }\n\n\n\n pub async fn move_folder(&self) -> Result<()> {\n\n let mut stack = Vec::new();\n\n stack.push(self.folder.clone());\n", "file_path": "src/moving/folder.rs", "rank": 45, "score": 28482.315451607337 }, { "content": " let path = entry.path();\n\n if path.is_dir() {\n\n stack.push(path);\n\n } else {\n\n match path.file_name() {\n\n Some(filename) => {\n\n let dest_path = dest.join(filename);\n\n tokio::fs::copy(&path, &dest_path).await?;\n\n }\n\n None => {}\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/moving/folder.rs", "rank": 46, "score": 28462.51078212944 }, { "content": "\n\n let output_root = self.destination.clone();\n\n let input_root = self.folder.components().count();\n\n\n\n while let Some(working_path) = stack.pop() {\n\n // Generate a relative path\n\n let src: PathBuf = working_path.components().skip(input_root).collect();\n\n\n\n // Create a destination if missing\n\n let dest = if src.components().count() == 0 {\n\n output_root.clone()\n\n } else {\n\n output_root.join(&src)\n\n };\n\n if tokio::fs::metadata(&dest).await.is_err() {\n\n tokio::fs::create_dir_all(&dest).await?;\n\n }\n\n\n\n let mut entries = tokio::fs::read_dir(working_path).await?;\n\n while let Some(entry) = entries.next_entry().await? {\n", "file_path": "src/moving/folder.rs", "rank": 47, "score": 28462.479185215314 }, { "content": "mod file;\n\nmod folder;\n\n\n\npub use file::FileToMove;\n\npub use folder::FolderToMove;\n", "file_path": "src/moving/mod.rs", "rank": 48, "score": 28462.328114846205 }, { "content": " if self.destination.is_dir() {\n\n tokio::fs::copy(&self.file, &self.destination.join(file_name)).await?;\n\n } else {\n\n tokio::fs::copy(&self.file, &self.destination).await?;\n\n }\n\n tokio::fs::remove_file(&self.file).await?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/moving/file.rs", "rank": 49, "score": 28458.917676467936 }, { "content": "use crate::download::{FileToDownload, FilesToDownload};\n\nuse crate::{Application, FileToUnzip, FilesToUnzip, Result, DOWNLOADING, EXTRACTING};\n\n\n\npub struct Downloader;\n\n\n\nimpl Downloader {\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n\n\n pub fn gtoolkit_vm_to_download(application: &Application) -> FileToDownload {\n\n FileToDownload::new(\n\n application.gtoolkit_app_url(),\n\n application.workspace(),\n\n format!(\n\n \"GlamorousToolkitApp-v{}.zip\",\n\n application.app_version().to_string()\n\n ),\n\n )\n\n }\n", "file_path": "src/tools/downloader.rs", "rank": 50, "score": 28227.42422837045 }, { "content": "\n\n pub fn files_to_download(application: &Application) -> FilesToDownload {\n\n FilesToDownload::new().add(Self::gtoolkit_vm_to_download(application))\n\n }\n\n\n\n pub fn files_to_unzip(application: &Application) -> FilesToUnzip {\n\n let gtoolkit_vm = Self::gtoolkit_vm_to_download(application);\n\n FilesToUnzip::new().add(FileToUnzip::new(\n\n gtoolkit_vm.path(),\n\n application.workspace(),\n\n ))\n\n }\n\n\n\n pub async fn download_glamorous_toolkit_vm(&self, application: &Application) -> Result<()> {\n\n println!(\n\n \"{}Downloading GlamorousToolkit App (v{})...\",\n\n DOWNLOADING,\n\n application.app_version().to_string()\n\n );\n\n\n", "file_path": "src/tools/downloader.rs", "rank": 51, "score": 28216.30406210332 }, { "content": " Self::files_to_download(application).download().await?;\n\n\n\n println!(\n\n \"{}Extracting GlamorousToolkit App (v{})...\",\n\n EXTRACTING,\n\n application.app_version().to_string()\n\n );\n\n\n\n Self::files_to_unzip(application).unzip().await?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/tools/downloader.rs", "rank": 52, "score": 28214.38189884186 }, { "content": "use crate::{ExecutableSmalltalk, Result, SmalltalkEvaluator};\n\nuse std::process::Command;\n\n\n\npub struct SmalltalkExpression {\n\n expression: String,\n\n}\n\n\n\nimpl SmalltalkExpression {\n\n pub fn new(expression: impl Into<String>) -> Self {\n\n Self {\n\n expression: expression.into(),\n\n }\n\n }\n\n\n\n pub fn expression(&self) -> &str {\n\n self.expression.as_str()\n\n }\n\n}\n\n\n\nimpl ExecutableSmalltalk for SmalltalkExpression {\n", "file_path": "src/smalltalk/expression.rs", "rank": 74, "score": 27748.56063808198 }, { "content": "use crate::Smalltalk;\n\nuse std::fs::OpenOptions;\n\nuse std::path::Path;\n\nuse std::process::{Command, Stdio};\n\n\n\n#[derive(Debug)]\n\npub struct SmalltalkEvaluator<'smalltalk, 'options> {\n\n smalltalk: &'smalltalk Smalltalk<'options>,\n\n interactive: bool,\n\n should_quit: bool,\n\n should_save: bool,\n\n verbose: bool,\n\n}\n\n\n\nimpl<'smalltalk, 'options> SmalltalkEvaluator<'smalltalk, 'options> {\n\n pub fn new(smalltalk: &'smalltalk Smalltalk<'options>) -> Self {\n\n Self {\n\n smalltalk,\n\n interactive: false,\n\n should_quit: true,\n", "file_path": "src/smalltalk/evaluator.rs", "rank": 75, "score": 27745.37815853456 }, { "content": "use std::ffi::{OsString};\n\nuse crate::{ExecutableSmalltalk, Result, SmalltalkEvaluator};\n\nuse std::process::Command;\n\n\n\npub struct SmalltalkCommand {\n\n command: String,\n\n arguments: Vec<OsString>,\n\n}\n\n\n\nimpl SmalltalkCommand {\n\n pub fn new(command: impl Into<String>) -> Self {\n\n Self {\n\n command: command.into(),\n\n arguments: vec![],\n\n }\n\n }\n\n\n\n pub fn arg(self, arg: impl Into<OsString>) -> Self {\n\n let arg = arg.into();\n\n if arg.is_empty() {\n", "file_path": "src/smalltalk/command.rs", "rank": 76, "score": 27744.928388933313 }, { "content": "\n\npub struct SmalltalkExpressionBuilder {\n\n expressions: Vec<String>,\n\n}\n\n\n\nimpl SmalltalkExpressionBuilder {\n\n pub fn new() -> Self {\n\n Self {\n\n expressions: vec![],\n\n }\n\n }\n\n\n\n pub fn add(&mut self, expression: impl Into<String>) -> &mut Self {\n\n self.expressions.push(expression.into());\n\n self\n\n }\n\n\n\n pub fn build(&mut self) -> SmalltalkExpression {\n\n SmalltalkExpression::new(self.expressions.join(\".\"))\n\n }\n\n}\n", "file_path": "src/smalltalk/expression.rs", "rank": 77, "score": 27743.79025516066 }, { "content": "use crate::{ExecutableSmalltalk, Result, SmalltalkEvaluator};\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\n\n\npub struct SmalltalkScriptToExecute {\n\n script: PathBuf,\n\n}\n\n\n\nimpl SmalltalkScriptToExecute {\n\n pub fn new(script: impl Into<PathBuf>) -> Self {\n\n Self {\n\n script: script.into(),\n\n }\n\n }\n\n}\n\n\n\nimpl ExecutableSmalltalk for SmalltalkScriptToExecute {\n\n fn create_command(&self, evaluator: &SmalltalkEvaluator) -> Result<Command> {\n\n let mut command = evaluator.command();\n\n command\n", "file_path": "src/smalltalk/script.rs", "rank": 78, "score": 27743.046955673308 }, { "content": "use crate::{ExecutableSmalltalk, Result, SmalltalkEvaluator};\n\nuse indicatif::{ProgressBar, ProgressStyle};\n\n\n\npub struct SmalltalkScriptsToExecute {\n\n scripts: Vec<Box<dyn ExecutableSmalltalk>>,\n\n}\n\n\n\nimpl SmalltalkScriptsToExecute {\n\n pub fn new() -> Self {\n\n Self { scripts: vec![] }\n\n }\n\n\n\n pub fn add(&mut self, script: impl Into<Box<dyn ExecutableSmalltalk>>) -> &mut Self {\n\n self.scripts.push(script.into());\n\n self\n\n }\n\n\n\n pub async fn execute(&self, evaluator: &SmalltalkEvaluator<'_, '_>) -> Result<()> {\n\n let mut index = 0 as usize;\n\n let total = self.scripts.len();\n", "file_path": "src/smalltalk/execution.rs", "rank": 79, "score": 27742.60476424087 }, { "content": "impl ExecutableSmalltalk for SmalltalkCommand {\n\n fn create_command(&self, evaluator: &SmalltalkEvaluator) -> Result<Command> {\n\n let mut command = evaluator.command();\n\n command.arg(&self.command);\n\n command.args(&self.arguments);\n\n\n\n Ok(command)\n\n }\n\n\n\n fn name(&self) -> String {\n\n self.command.clone()\n\n }\n\n}\n\n\n\nimpl From<SmalltalkCommand> for Box<(dyn ExecutableSmalltalk + 'static)> {\n\n fn from(command: SmalltalkCommand) -> Self {\n\n Box::new(command)\n\n }\n\n}\n", "file_path": "src/smalltalk/command.rs", "rank": 80, "score": 27738.593574139006 }, { "content": " fn create_command(&self, evaluator: &SmalltalkEvaluator) -> Result<Command> {\n\n let expression = if evaluator.should_save() {\n\n SmalltalkExpressionBuilder::new()\n\n .add(&self.expression)\n\n .add(\"Smalltalk snapshot: true andQuit: false\")\n\n .build()\n\n .expression()\n\n .to_owned()\n\n } else {\n\n self.expression.clone()\n\n };\n\n\n\n let mut command = evaluator.command();\n\n command\n\n .arg(\"eval\")\n\n .arg(if evaluator.should_quit() {\n\n \"\"\n\n } else {\n\n \"--no-quit\"\n\n })\n", "file_path": "src/smalltalk/expression.rs", "rank": 81, "score": 27738.31962973109 }, { "content": " .arg(if evaluator.wants_interactive() {\n\n \"--interactive\"\n\n } else {\n\n \"\"\n\n })\n\n .arg(&expression);\n\n\n\n Ok(command)\n\n }\n\n\n\n fn name(&self) -> String {\n\n self.expression.clone()\n\n }\n\n}\n\n\n\nimpl From<SmalltalkExpression> for Box<(dyn ExecutableSmalltalk + 'static)> {\n\n fn from(expression: SmalltalkExpression) -> Self {\n\n Box::new(expression)\n\n }\n\n}\n", "file_path": "src/smalltalk/expression.rs", "rank": 82, "score": 27734.556805494667 }, { "content": "\n\n fn name(&self) -> String {\n\n self.script.display().to_string()\n\n }\n\n}\n\n\n\nimpl From<SmalltalkScriptToExecute> for Box<(dyn ExecutableSmalltalk + 'static)> {\n\n fn from(script: SmalltalkScriptToExecute) -> Self {\n\n Box::new(script)\n\n }\n\n}\n", "file_path": "src/smalltalk/script.rs", "rank": 83, "score": 27733.422665102542 }, { "content": " .write(true)\n\n .create(true)\n\n .open(self.workspace().join(\"install.log\"))\n\n .unwrap();\n\n\n\n Stdio::from(stdout)\n\n }\n\n\n\n pub fn stderr(&self) -> Stdio {\n\n if self.is_verbose() {\n\n return Stdio::inherit();\n\n }\n\n\n\n let stderr = OpenOptions::new()\n\n .append(true)\n\n .write(true)\n\n .create(true)\n\n .open(self.workspace().join(\"install-errors.log\"))\n\n .unwrap();\n\n\n", "file_path": "src/smalltalk/evaluator.rs", "rank": 84, "score": 27732.362464412043 }, { "content": "mod command;\n\nmod evaluator;\n\nmod execution;\n\nmod expression;\n\nmod script;\n\nmod smalltalk;\n\n\n\npub use command::SmalltalkCommand;\n\npub use evaluator::SmalltalkEvaluator;\n\npub use execution::SmalltalkScriptsToExecute;\n\npub use expression::{SmalltalkExpression, SmalltalkExpressionBuilder};\n\npub use script::SmalltalkScriptToExecute;\n\npub use smalltalk::{ExecutableSmalltalk, Smalltalk};\n", "file_path": "src/smalltalk/mod.rs", "rank": 85, "score": 27731.922826681388 }, { "content": " pub fn verbose(&mut self, verbose: bool) -> &mut Self {\n\n self.verbose = verbose;\n\n self\n\n }\n\n\n\n pub fn workspace(&self) -> &Path {\n\n self.smalltalk.workspace()\n\n }\n\n\n\n pub fn executable(&self) -> &Path {\n\n self.smalltalk.executable()\n\n }\n\n\n\n pub fn image(&self) -> &Path {\n\n self.smalltalk.image()\n\n }\n\n\n\n pub fn should_save(&self) -> bool {\n\n self.should_save\n\n }\n", "file_path": "src/smalltalk/evaluator.rs", "rank": 86, "score": 27731.386747030527 }, { "content": "\n\n pub fn should_quit(&self) -> bool {\n\n self.should_quit\n\n }\n\n\n\n pub fn wants_interactive(&self) -> bool {\n\n self.interactive\n\n }\n\n\n\n pub fn is_verbose(&self) -> bool {\n\n self.verbose\n\n }\n\n\n\n pub fn stdout(&self) -> Stdio {\n\n if self.is_verbose() {\n\n return Stdio::inherit();\n\n }\n\n\n\n let stdout = OpenOptions::new()\n\n .append(true)\n", "file_path": "src/smalltalk/evaluator.rs", "rank": 87, "score": 27730.675035415763 }, { "content": " return self;\n\n };\n\n\n\n let mut args = self.arguments;\n\n args.push(arg);\n\n Self {\n\n command: self.command,\n\n arguments: args,\n\n }\n\n }\n\n\n\n pub fn args(self, args: &Vec<impl AsRef<str>>) -> Self {\n\n let mut command = self;\n\n for arg in args {\n\n command = command.arg(arg.as_ref().to_owned());\n\n }\n\n command\n\n }\n\n}\n\n\n", "file_path": "src/smalltalk/command.rs", "rank": 88, "score": 27728.63851838709 }, { "content": " should_save: false,\n\n verbose: false,\n\n }\n\n }\n\n\n\n pub fn interactive(&mut self, interactive: bool) -> &mut Self {\n\n self.interactive = interactive;\n\n self\n\n }\n\n\n\n pub fn quit(&mut self, should_quit: bool) -> &mut Self {\n\n self.should_quit = should_quit;\n\n self\n\n }\n\n\n\n pub fn save(&mut self, should_save: bool) -> &mut Self {\n\n self.should_save = should_save;\n\n self\n\n }\n\n\n", "file_path": "src/smalltalk/evaluator.rs", "rank": 89, "score": 27728.58017503483 }, { "content": "\n\n for script in &self.scripts {\n\n index += 1;\n\n let pb = if evaluator.is_verbose() {\n\n println!(\"[{}/{}] Executing {:?}\", index, total, script.name());\n\n None\n\n } else {\n\n let pb = ProgressBar::new_spinner();\n\n\n\n pb.enable_steady_tick(120);\n\n pb.set_style(\n\n ProgressStyle::default_spinner()\n\n .tick_strings(&[\n\n \"🌑 \", \"🌒 \", \"🌓 \", \"🌔 \", \"🌕 \", \"🌖 \", \"🌗 \", \"🌘 \", \"✅ \",\n\n ])\n\n .template(\"{prefix:.bold.dim} {spinner:.blue} {wide_msg}\"),\n\n );\n\n pb.set_message(format!(\"Executing {:?}\", script.name()));\n\n pb.set_prefix(format!(\"[{}/{}]\", index, total));\n\n\n", "file_path": "src/smalltalk/execution.rs", "rank": 90, "score": 27724.149870798185 }, { "content": " Stdio::from(stderr)\n\n }\n\n\n\n pub fn command(&self) -> Command {\n\n let relative_executable = self.workspace().join(self.executable());\n\n let executable = to_absolute::canonicalize(&relative_executable).expect(&format!(\n\n \"Failed to canonicalize {}\",\n\n &relative_executable.display()\n\n ));\n\n\n\n let mut command = Command::new(executable);\n\n command\n\n .current_dir(self.workspace())\n\n .stdout(self.stdout())\n\n .stderr(self.stderr())\n\n .arg(self.image());\n\n command\n\n }\n\n}\n", "file_path": "src/smalltalk/evaluator.rs", "rank": 91, "score": 27722.138728140668 }, { "content": " Some(pb)\n\n };\n\n\n\n script.execute(evaluator)?;\n\n\n\n if let Some(ref pb) = pb {\n\n pb.finish_with_message(format!(\"Finished {:?}\", script.name()));\n\n } else {\n\n println!(\"Finished {:?}\", script.name());\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/smalltalk/execution.rs", "rank": 92, "score": 27717.72869877234 }, { "content": " .arg(\"st\")\n\n .arg(if evaluator.should_quit() {\n\n \"--quit\"\n\n } else {\n\n \"--no-quit\"\n\n })\n\n .arg(if evaluator.should_save() {\n\n \"--save\"\n\n } else {\n\n \"\"\n\n })\n\n .arg(if evaluator.wants_interactive() {\n\n \"--interactive\"\n\n } else {\n\n \"\"\n\n })\n\n .arg(self.script.as_path());\n\n\n\n Ok(command)\n\n }\n", "file_path": "src/smalltalk/script.rs", "rank": 93, "score": 27717.72869877234 }, { "content": " pub fn bump(&mut self, bump: VersionBump) {\n\n self.bump = bump;\n\n }\n\n}\n\n\n\n#[derive(ArgEnum, Copy, Clone, Debug)]\n\n#[repr(u32)]\n\npub enum SetupTarget {\n\n /// Setup GlamorousToolkit for the local build.\n\n #[clap(name = \"local-build\")]\n\n LocalBuild,\n\n /// Setup GlamorousToolkit for release\n\n #[clap(name = \"release\")]\n\n Release,\n\n}\n\n\n\nimpl FromStr for SetupTarget {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> std::result::Result<Self, String> {\n", "file_path": "src/tools/setup.rs", "rank": 94, "score": 40.24154324577295 }, { "content": "use crate::gtoolkit::GToolkit;\n\nuse crate::{Application, Result, StartOptions, Starter, BUILDING, CREATING};\n\nuse clap::{AppSettings, ArgEnum, Clap};\n\nuse feenk_releaser::VersionBump;\n\nuse std::str::FromStr;\n\n\n\npub struct Setup;\n\n\n\n#[derive(Clap, Debug, Clone)]\n\n#[clap(setting = AppSettings::ColorAlways)]\n\n#[clap(setting = AppSettings::ColoredHelp)]\n\npub struct SetupOptions {\n\n /// Do not open a default GtWorld\n\n #[clap(long)]\n\n pub no_gt_world: bool,\n\n /// Specify a setup target\n\n #[clap(long, default_value = \"local-build\", possible_values = SetupTarget::VARIANTS, case_insensitive = true)]\n\n pub target: SetupTarget,\n\n /// When building an image for a release, specify which component version to bump\n\n #[clap(long, default_value = VersionBump::Patch.to_str(), possible_values = VersionBump::variants(), case_insensitive = true)]\n", "file_path": "src/tools/setup.rs", "rank": 95, "score": 37.46957869538941 }, { "content": "use crate::FileToUnzip;\n\nuse crate::{Application, FileToDownload};\n\nuse std::path::PathBuf;\n\nuse url::Url;\n\n\n\n/// Represents a seed from which to build am image.\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub enum ImageSeed {\n\n Url(Url),\n\n Zip(PathBuf),\n\n Image(PathBuf),\n\n}\n\n\n\nimpl ImageSeed {\n\n pub fn file_to_download(&self, application: &Application) -> Option<FileToDownload> {\n\n match self {\n\n Self::Url(url) => Some(FileToDownload::new(\n\n url.to_string(),\n\n application.workspace(),\n\n \"seed-image.zip\",\n", "file_path": "src/seed.rs", "rank": 96, "score": 36.03012270969262 }, { "content": "use crate::{Application, ExecutableSmalltalk, ImageSeed, Result, SmalltalkCommand};\n\nuse clap::{AppSettings, Clap};\n\n\n\npub struct Renamer;\n\n\n\n#[derive(Clap, Debug, Clone)]\n\n#[clap(setting = AppSettings::ColorAlways)]\n\n#[clap(setting = AppSettings::ColoredHelp)]\n\npub struct RenameOptions {\n\n /// A new name of the image without the extension\n\n pub name: String,\n\n}\n\n\n\nimpl Renamer {\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n\n\n pub async fn rename(\n\n &self,\n", "file_path": "src/tools/renamer.rs", "rank": 97, "score": 36.0095739917891 }, { "content": "pub use seed::*;\n\npub use smalltalk::*;\n\npub use tools::*;\n\npub use version::*;\n\npub use zipping::*;\n\n\n\nuse crate::options::SubCommand;\n\nuse clap::Clap;\n\nuse options::AppOptions;\n\nuse url::Url;\n\nuse user_error::{UserFacingError, UFE};\n\n\n\nasync fn run() -> Result<()> {\n\n let options: AppOptions = AppOptions::parse();\n\n\n\n let gtoolkit_vm_version = options.fetch_vm_version().await?;\n\n let gtoolkit_image_version = Application::latest_gtoolkit_image_version().await?;\n\n let image_seed = ImageSeed::Url(Url::parse(DEFAULT_PHARO_IMAGE)?);\n\n\n\n let mut application = Application::new(\n", "file_path": "src/main.rs", "rank": 98, "score": 34.2742980130689 }, { "content": " <SetupTarget as ArgEnum>::from_str(s, true)\n\n }\n\n}\n\n\n\nimpl ToString for SetupTarget {\n\n fn to_string(&self) -> String {\n\n (SetupTarget::VARIANTS[*self as usize]).to_owned()\n\n }\n\n}\n\n\n\nimpl Setup {\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n\n\n pub async fn setup(\n\n &self,\n\n application: &mut Application,\n\n setup_options: &SetupOptions,\n\n ) -> Result<()> {\n", "file_path": "src/tools/setup.rs", "rank": 99, "score": 31.46664665479338 } ]
Rust
src/lib.rs
gcarq/seek_bufread
1ff6dff6e449cff969e94bb77aacb89de20cc4ac
use std::fmt; use std::io::{self, BufRead, Read, Seek, SeekFrom}; const DEFAULT_BUF_SIZE: usize = 8 * 1024; pub struct BufReader<R> { inner: R, buf: Box<[u8]>, buf_pos: usize, cap: usize, absolute_pos: u64, } impl<R: Read + Seek> BufReader<R> { pub fn new(inner: R) -> BufReader<R> { BufReader::with_capacity(DEFAULT_BUF_SIZE, inner) } pub fn with_capacity(cap: usize, inner: R) -> BufReader<R> { BufReader { inner: inner, buf: vec![0; cap].into_boxed_slice(), buf_pos: 0, cap: 0, absolute_pos: 0, } } pub fn position(&self) -> u64 { self.absolute_pos } pub fn capacity(&self) -> usize { self.cap } pub fn available(&self) -> usize { self.cap.checked_sub(self.buf_pos).unwrap_or(0) } pub fn into_inner(mut self) -> io::Result<R> { try!(self.inner.seek(SeekFrom::Start(self.absolute_pos))); Ok(self.inner) } fn sync_and_flush(&mut self, pos: SeekFrom) -> io::Result<u64> { self.buf_pos = self.cap; self.absolute_pos = try!(self.inner.seek(pos)); Ok(self.absolute_pos) } fn seek_backward(&mut self, n: i64) -> io::Result<u64> { let n_abs = n.abs() as usize; if self.buf_pos.checked_sub(n_abs).is_some() { self.absolute_pos -= n_abs as u64; self.buf_pos -= n_abs; Ok(self.absolute_pos) } else { let new_pos = self.absolute_pos - n_abs as u64; self.sync_and_flush(SeekFrom::Start(new_pos)) } } fn seek_forward(&mut self, n: usize) -> io::Result<u64> { if self.available().checked_sub(n).is_some() { self.consume(n); Ok(self.absolute_pos) } else { let new_pos = self.absolute_pos + n as u64; self.sync_and_flush(SeekFrom::Start(new_pos)) } } } impl<R: Read> Read for BufReader<R> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let n_exp = buf.len(); let mut n_total = 0; while n_total < n_exp { let n_read = try!(try!(self.fill_buf()).read(&mut buf[n_total..])); if n_read == 0 { break; } self.consume(n_read); n_total += n_read; } Ok(n_total) } } impl<R: Read> BufRead for BufReader<R> { fn fill_buf(&mut self) -> io::Result<&[u8]> { if self.cap == self.buf_pos { self.cap = try!(self.inner.read(&mut self.buf)); self.buf_pos = 0; } Ok(&self.buf[self.buf_pos..self.cap]) } fn consume(&mut self, amt: usize) { self.buf_pos += amt; self.absolute_pos += amt as u64; } } impl<R: Read + Seek> Seek for BufReader<R> { fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { match pos { SeekFrom::Current(n) => { match n >= 0 { true => self.seek_forward(n as usize), false => self.seek_backward(n) } } SeekFrom::Start(n) => { match n.checked_sub(self.absolute_pos) { Some(n_bytes) => self.seek_forward(n_bytes as usize), None => self.sync_and_flush(pos) } } _ => self.sync_and_flush(pos) } } } impl<R> fmt::Debug for BufReader<R> where R: fmt::Debug + Read + Seek { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.debug_struct("BufReader") .field("reader", &self.inner) .field("available", &self.available()) .field("capacity", &self.cap) .field("position", &self.absolute_pos) .finish() } } #[cfg(test)] mod tests { use super::*; use std::io::{self, Cursor, Read, Seek, SeekFrom}; #[test] fn default_behaviour() { let mut reader = BufReader::new(Cursor::new([5, 6, 7, 0, 1, 2, 3, 4])); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 0, 1, 2, 3, 4]); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn default_behaviour_std() { let mut reader = io::BufReader::new(Cursor::new([5, 6, 7, 0, 1, 2, 3, 4])); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 0, 1, 2, 3, 4]); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn small_capacity() { let inner = Cursor::new([5, 6, 7, 0, 1, 2, 3, 4]); let mut reader = BufReader::with_capacity(2, inner); let mut buf = [0, 0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7]); let mut buf = [0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1]); let mut buf = [0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2]); } #[test] fn small_capacity_std() { let inner = Cursor::new([5, 6, 7, 0, 1, 2, 3, 4]); let mut reader = io::BufReader::with_capacity(2, inner); let mut buf = [0, 0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7]); let mut buf = [0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1]); let mut buf = [0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2]); } #[test] fn seek_start() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(10, inner); reader.seek(SeekFrom::Start(3)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 10]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); reader.seek(SeekFrom::Start(13)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); } #[test] fn seek_start_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(10, inner); reader.seek(SeekFrom::Start(3)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 10]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); reader.seek(SeekFrom::Start(13)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); } #[test] fn seek_current_positive() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(20, inner); reader.seek(SeekFrom::Current(2)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2, 3, 4, 5, 6, 7, 8, 9]); reader.seek(SeekFrom::Current(6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [16, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_current_positive_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(20, inner); reader.seek(SeekFrom::Current(2)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2, 3, 4, 5, 6, 7, 8, 9]); reader.seek(SeekFrom::Current(6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [16, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_current_negative() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(3, inner); reader.seek(SeekFrom::Current(4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [4, 5, 6, 7]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); reader.seek(SeekFrom::Current(-4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); } #[test] fn seek_current_negative_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(3, inner); reader.seek(SeekFrom::Current(4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [4, 5, 6, 7]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); reader.seek(SeekFrom::Current(-4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); } #[test] fn seek_end() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(2, inner); reader.seek(SeekFrom::End(-6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12, 13, 14, 15, 16, 0, 0]); reader.seek(SeekFrom::End(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_end_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(2, inner); reader.seek(SeekFrom::End(-6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12, 13, 14, 15, 16, 0, 0]); reader.seek(SeekFrom::End(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn into_inner() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(4, inner); reader.seek(SeekFrom::Current(5)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 8, 9, 10, 11, 12]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 2]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12]); let mut inner = reader.into_inner().unwrap(); let mut buf = [0; 8]; inner.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); } }
use std::fmt; use std::io::{self, BufRead, Read, Seek, SeekFrom}; const DEFAULT_BUF_SIZE: usize = 8 * 1024; pub struct BufReader<R> { inner: R, buf: Box<[u8]>, buf_pos: usize, cap: usize, absolute_pos: u64, } impl<R: Read + Seek> BufReader<R> { pub fn new(inner: R) -> BufReader<R> { BufReader::with_capacity(DEFAULT_BUF_SIZE, inner) } pub fn with_capacity(cap: usize, inner: R) -> BufReader<R> { BufReader { inner: inner, buf: vec![0; cap].into_boxed_slice(), buf_pos: 0, cap: 0, absolute_pos: 0, } } pub fn position(&self) -> u64 { self.absolute_pos } pub fn capacity(&self) -> usize { self.cap } pub fn available(&self) -> usize { self.cap.checked_sub(self.buf_pos).unwrap_or(0) } pub fn into_inner(mut self) -> io::Result<R> { try!(self.inner.seek(SeekFrom::Start(self.absolute_pos))); Ok(self.inner) } fn sync_and_flush(&mut self, pos: SeekFrom) -> io::Result<u64> { self.buf_pos = self.cap; self.absolute_pos = try!(self.inner.seek(pos)); Ok(self.absolute_pos) } fn seek_backward(&mut self, n: i64) -> io::Result<u64> { let n_abs = n.abs() as usize; if self.buf_pos.checked_sub(n_abs).is_some() { self.absolute_pos -= n_abs as u64; self.buf_pos -= n_abs; Ok(self.absolute_pos) } else { let new_pos = self.absolute_pos - n_abs as u64; self.sync_and_flush(SeekFrom::Start(new_pos)) } } fn seek_forward(&mut self, n: usize) -> io::Result<u64> { if self.available().checked_sub(n).is_some() { self.consume(n); Ok(self.absolute_pos) } else { let new_pos = self.absolute_pos + n as u64; self.sync_and_flush(SeekFrom::Start(new_pos)) } } } impl<R: Read> Read for BufReader<R> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let n_exp = buf.len(); let mut n_total = 0; while n_total < n_exp { let n_read = try!(try!(self.fill_buf()).read(&mut buf[n_total..])); if n_read == 0 { break; } self.consume(n_read); n_total += n_read; } Ok(n_total) } } impl<R: Read> BufRead for BufReader<R> { fn fill_buf(&mut self) -> io::Result<&[u8]> { if self.cap == self.buf_pos { self.cap = try!(self.inner.read(&mut self.buf)); self.buf_pos = 0; } Ok(&self.buf[self.buf_pos..self.cap]) } fn consume(&mut self, amt: usize) { self.buf_pos += amt; self.absolute_pos += amt as u64; } } impl<R: Read + Seek> Seek for BufReader<R> { fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { match pos { SeekFrom::Current(n) => { match n >= 0 { true => self.seek_forward(n as usize), false => self.seek_backward(n) } } SeekFrom::Start(n) => { match n.checked_sub(self.absolute_pos) { Some(n_bytes) => self.seek_forward(n_bytes as usize), None => self.sync_and_flush(pos) } } _ => self.sync_and_flush(pos) } } } impl<R> fmt::Debug for BufReader<R> where R: fmt::Debug + Read + Seek { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.debug_struct("BufReader") .field("reader", &self.inner) .field("available", &self.available()) .field("capacity", &self.cap) .field("position", &self.absolute_pos) .finish() } } #[cfg(test)] mod tests { use super::*; use std::io::{self, Cursor, Read, Seek, SeekFrom}; #[test] fn default_behaviour() { let mut reader = BufReader::new(Cursor::new([5, 6, 7, 0, 1, 2, 3, 4])); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 0, 1, 2, 3, 4]); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn default_behaviour_std() { let mut reader = io::BufReader::new(Cursor::new([5, 6, 7, 0, 1, 2, 3, 4])); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 0, 1, 2, 3, 4]); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn small_capacity() { let inner = Cursor::new([5, 6, 7, 0, 1, 2, 3, 4]); let mut reader = BufReader::with_capacity(2, inner); let mut buf = [0, 0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7]); let mut buf = [0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1]); let mut buf = [0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2]); } #[test] fn small_capacity_std() { let inner = Cursor::new([5, 6, 7, 0, 1, 2, 3, 4]); let mut reader = io::BufReader::with_capacity(2, inner); let mut buf = [0, 0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7]); let mut buf = [0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1]); let mut buf = [0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2]); } #[test] fn seek_start() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(10, inner); reader.seek(SeekFrom::Start(3)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 10]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); reader.seek(SeekFrom::Start(13)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); } #[test] fn seek_start_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(10, inner); reader.seek(SeekFrom::Start(3)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 10]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); reader.seek(SeekFrom::Start(13)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); } #[test] fn seek_current_positive() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(20, inner); reader.seek(SeekFrom::Current(2)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2, 3, 4, 5, 6, 7, 8, 9]); reader.seek(SeekFrom::Current(6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [16, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_current_positive_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(20, inner); reader.seek(SeekFrom::Current(2)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2, 3, 4, 5, 6, 7, 8, 9]); reader.seek(SeekFrom::Current(6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [16, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_current_negative() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(3, inner); reader.seek(SeekFrom::Current(4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [4, 5, 6, 7]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); reader.seek(SeekFrom::Current(-4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); } #[test] fn seek_current_negative_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(3, inner); reader.seek(SeekFrom::Current(4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [4, 5, 6, 7]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); reader.seek(SeekFrom::Current(-4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); } #[test] fn seek_end() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(2, inner); reader.seek(Seek
8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_end_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(2, inner); reader.seek(SeekFrom::End(-6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12, 13, 14, 15, 16, 0, 0]); reader.seek(SeekFrom::End(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn into_inner() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(4, inner); reader.seek(SeekFrom::Current(5)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 8, 9, 10, 11, 12]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 2]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12]); let mut inner = reader.into_inner().unwrap(); let mut buf = [0; 8]; inner.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); } }
From::End(-6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12, 13, 14, 15, 16, 0, 0]); reader.seek(SeekFrom::End(0)).unwrap(); let mut buf = [0;
function_block-random_span
[ { "content": "#[bench]\n\nfn read_10mb_default_from_cursor(b: &mut Bencher) {\n\n b.iter(|| {\n\n let inner = Cursor::new(vec![1; 10000000]);\n\n let mut reader = BufReader::new(inner);\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).unwrap();\n\n });\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 0, "score": 57713.2775277368 }, { "content": "#[bench]\n\nfn read_10mb_default_from_cursor_std(b: &mut Bencher) {\n\n b.iter(|| {\n\n let inner = Cursor::new(vec![1; 10000000]);\n\n let mut reader = io::BufReader::new(inner);\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).unwrap();\n\n });\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 1, "score": 55395.98277204172 }, { "content": "#[bench]\n\nfn read_seek_10mb_default_from_file(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = BufReader::new(File::open(\"foo.txt\").unwrap());\n\n let mut buf: Vec<u8> = Vec::with_capacity(100000);\n\n for i in 0..100 {\n\n reader.seek(SeekFrom::Current(i * 100)).unwrap();\n\n reader.read(&mut buf).unwrap();\n\n }\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 2, "score": 55013.896932566735 }, { "content": "#[bench]\n\nfn read_seek_10mb_halfbuf_from_file(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = BufReader::with_capacity(5000000, File::open(\"foo.txt\").unwrap());\n\n let mut buf: Vec<u8> = Vec::with_capacity(100000);\n\n for i in 0..100 {\n\n reader.seek(SeekFrom::Current(i * 100)).unwrap();\n\n reader.read(&mut buf).unwrap();\n\n }\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 3, "score": 55013.896932566735 }, { "content": "#[bench]\n\nfn read_seek_10mb_default_from_file_std(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = io::BufReader::new(File::open(\"foo.txt\").unwrap());\n\n let mut buf: Vec<u8> = Vec::with_capacity(100000);\n\n for i in 0..100 {\n\n reader.seek(SeekFrom::Current(i * 100)).unwrap();\n\n reader.read(&mut buf).unwrap();\n\n }\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n", "file_path": "benches/std_compare.rs", "rank": 4, "score": 52913.01008012969 }, { "content": "#[bench]\n\nfn read_seek_10mb_halfbuf_from_file_std(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = io::BufReader::with_capacity(5000000, File::open(\"foo.txt\").unwrap());\n\n let mut buf: Vec<u8> = Vec::with_capacity(100000);\n\n for i in 0..100 {\n\n reader.seek(SeekFrom::Current(i * 100)).unwrap();\n\n reader.read(&mut buf).unwrap();\n\n }\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 5, "score": 52913.01008012969 }, { "content": "#[bench]\n\nfn read_10mb_fullbuf_from_file(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = BufReader::with_capacity(10000000, File::open(\"foo.txt\").unwrap());\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).unwrap();\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 6, "score": 44055.043626308725 }, { "content": "#[bench]\n\nfn read_10mb_default_from_file(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = BufReader::new(File::open(\"foo.txt\").unwrap());\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).unwrap();\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 7, "score": 44055.043626308725 }, { "content": "#[bench]\n\nfn read_10mb_halfbuf_from_file(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = BufReader::with_capacity(5000000, File::open(\"foo.txt\").unwrap());\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).unwrap();\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 8, "score": 44055.043626308725 }, { "content": "#[bench]\n\nfn read_10mb_fullbuf_from_file_std(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = io::BufReader::with_capacity(10000000, File::open(\"foo.txt\").unwrap());\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).unwrap();\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 9, "score": 42353.495762443454 }, { "content": "#[bench]\n\nfn read_10mb_halfbuf_from_file_std(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = io::BufReader::with_capacity(5000000, File::open(\"foo.txt\").unwrap());\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).unwrap();\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 10, "score": 42353.495762443454 }, { "content": "#[bench]\n\nfn read_10mb_default_from_file_std(b: &mut Bencher) {\n\n let mut f = File::create(\"foo.txt\").unwrap();\n\n f.write_all(&vec![0; 10000000]).unwrap();\n\n b.iter(|| {\n\n let mut reader = io::BufReader::new(File::open(\"foo.txt\").unwrap());\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).unwrap();\n\n });\n\n fs::remove_file(\"foo.txt\").unwrap();\n\n}\n\n\n", "file_path": "benches/std_compare.rs", "rank": 11, "score": 42353.495762443454 }, { "content": "# seek_bufread\n\n\n\n[![Build Status](https://travis-ci.org/gcarq/seek_bufread.svg?branch=master)](https://travis-ci.org/gcarq/seek_bufread) [![Coverage Status](https://coveralls.io/repos/github/gcarq/seek_bufread/badge.svg?branch=master)](https://coveralls.io/github/gcarq/seek_bufread?branch=master) [![Crates.io](https://img.shields.io/crates/v/seek_bufread.svg)](https://crates.io/crates/seek_bufread/)\n\n\n\n**24.04.2019 NOTE:** This library is no longer needed since BufReader provides [seek_relative()](https://doc.rust-lang.org/std/io/struct.BufReader.html#method.seek_relative) with the same functionality. (See [rust#31100](https://github.com/rust-lang/rust/issues/31100)).\n\n\n\n\n\nA drop-in replacement for `std::io::BufReader` with seeking support.\n\n\n\n[Full Documentation](https://gcarq.github.io/seek_bufread)\n\n\n\n## Quick Example\n\n\n\n```rust\n\nuse std::io::{self, Cursor, Read, Seek, SeekFrom};\n\nuse seek_bufread::BufReader;\n\n\n\nlet inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);\n\nlet mut reader = BufReader::new(inner);\n\n\n\nreader.seek(SeekFrom::Current(4)).unwrap();\n\nlet mut buf = [0; 8];\n\n\n\n// read bytes from internal buffer\n\nreader.read(&mut buf).unwrap();\n\nassert_eq!(buf, [4, 5, 6, 7, 8, 9, 10, 11]);\n\n```\n\n\n\n## Usage\n\n\n\nPut this in your `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\nseek_bufread = \"~1.2\"\n\n```\n\n\n\nAnd this in your crate root:\n\n\n\n```rust\n\nextern crate seek_bufread;\n\n```\n\n\n", "file_path": "README.md", "rank": 12, "score": 22652.88689242752 }, { "content": "## Benchmarks\n\n\n\nTests with the suffix `_std` are using the standard `std::io::BufRead`\n\nimplementation. The Overall performance without seek operations is\n\nquite similar between both. With seek operations ``seek_bufread::BufRead``\n\nis significantly faster.\n\n\n\n```\n\ntest tests::read_10mb_default_from_cursor ... bench: 6,044,915 ns/iter (+/- 275,518)\n\ntest tests::read_10mb_default_from_cursor_std ... bench: 6,038,466 ns/iter (+/- 227,145)\n\ntest tests::read_10mb_default_from_file ... bench: 4,213,179 ns/iter (+/- 116,043)\n\ntest tests::read_10mb_default_from_file_std ... bench: 4,224,658 ns/iter (+/- 132,629)\n\ntest tests::read_10mb_fullbuf_from_file ... bench: 7,280,470 ns/iter (+/- 209,827)\n\ntest tests::read_10mb_fullbuf_from_file_std ... bench: 7,448,666 ns/iter (+/- 2,720,199)\n\ntest tests::read_10mb_halfbuf_from_file ... bench: 5,962,017 ns/iter (+/- 415,952)\n\ntest tests::read_10mb_halfbuf_from_file_std ... bench: 5,904,902 ns/iter (+/- 240,471)\n\ntest tests::read_seek_10mb_default_from_file ... bench: 6,621 ns/iter (+/- 145)\n\ntest tests::read_seek_10mb_default_from_file_std ... bench: 59,651 ns/iter (+/- 1,993)\n\ntest tests::read_seek_10mb_halfbuf_from_file ... bench: 1,168,511 ns/iter (+/- 63,956)\n\ntest tests::read_seek_10mb_halfbuf_from_file_std ... bench: 62,872,335 ns/iter (+/- 5,344,766)\n\n```\n\n\n\n## License\n\n\n\nApache-2.0\n", "file_path": "README.md", "rank": 13, "score": 22649.085219652752 }, { "content": "#![feature(test)]\n\n\n\nextern crate seek_bufread;\n\nextern crate test;\n\n\n\nuse seek_bufread::BufReader;\n\n\n\nuse test::Bencher;\n\nuse std::fs::{self, File};\n\nuse std::io::{self, Cursor, Read, Write, Seek, SeekFrom};\n\n\n\n#[bench]\n", "file_path": "benches/std_compare.rs", "rank": 23, "score": 13.00281863986943 } ]
Rust
src/types/rowkey.rs
TimeExceed/alicloud-tablestore-rust-sdk
d8a7b24c10803e4641d6f0d30e0b9d4ddda100c4
use bytes::Bytes; use crate::{Error, ErrorCode}; use std::convert::TryFrom; use super::*; #[cfg(test)] use quickcheck::{Arbitrary, Gen, empty_shrinker}; #[derive(Debug, Clone, Eq, PartialEq)] pub struct RowKey(pub Vec<RowKeyColumn>); #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExtendedRowKey(pub Vec<ExtendedRowKeyColumn>); #[derive(Debug, Clone, Eq, PartialEq)] pub struct RowKeyColumn { pub name: Name, pub value: RowKeyValue, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExtendedRowKeyColumn { pub name: Name, pub value: ExtendedRowKeyValue, } #[derive(Debug, Clone, Eq, PartialEq)] pub enum RowKeyValue { Int(i64), Str(String), Blob(Bytes), } #[derive(Debug, Clone, Eq, PartialEq)] pub enum ExtendedRowKeyValue { Int(i64), Str(String), Blob(Bytes), InfMin, InfMax, AutoIncr, } impl RowKey { pub fn new(keys: Vec<RowKeyColumn>) -> Self { Self(keys) } pub fn iter(&self) -> impl Iterator<Item=&RowKeyColumn> { self.0.iter() } pub fn into_iter(self) -> impl Iterator<Item=RowKeyColumn> { self.0.into_iter() } } impl ExtendedRowKey { pub fn new(keys: Vec<ExtendedRowKeyColumn>) -> Self { Self(keys) } pub fn iter(&self) -> impl Iterator<Item=&ExtendedRowKeyColumn> { self.0.iter() } pub fn into_iter(self) -> impl Iterator<Item=ExtendedRowKeyColumn> { self.0.into_iter() } } impl From<RowKey> for ExtendedRowKey { fn from(rk: RowKey) -> Self { let ext_rk = rk.into_iter() .map(|x| { ExtendedRowKeyColumn::from(x) }) .collect(); ExtendedRowKey::new(ext_rk) } } impl TryFrom<ExtendedRowKey> for RowKey { type Error = Error; fn try_from(ext_rk: ExtendedRowKey) -> Result<RowKey, Error> { let mut xs = vec![]; let r = ext_rk.into_iter() .try_for_each(|x| { match RowKeyColumn::try_from(x) { Ok(x) => { xs.push(x); Ok(()) } Err(e) => { Err(e) } } }); match r { Ok(_) => Ok(RowKey::new(xs)), Err(e) => Err(e), } } } impl From<RowKeyColumn> for ExtendedRowKeyColumn { fn from(x: RowKeyColumn) -> Self { ExtendedRowKeyColumn{ name: x.name, value: ExtendedRowKeyValue::from(x.value), } } } impl TryFrom<ExtendedRowKeyColumn> for RowKeyColumn { type Error = Error; fn try_from(x: ExtendedRowKeyColumn) -> Result<Self, Error> { Ok(RowKeyColumn{ name: x.name, value: RowKeyValue::try_from(x.value)?, }) } } impl From<RowKeyValue> for ExtendedRowKeyValue { fn from(x: RowKeyValue) -> Self { match x { RowKeyValue::Int(x) => ExtendedRowKeyValue::Int(x), RowKeyValue::Str(x) => ExtendedRowKeyValue::Str(x), RowKeyValue::Blob(x) => ExtendedRowKeyValue::Blob(x), } } } impl TryFrom<ExtendedRowKeyValue> for RowKeyValue { type Error = Error; fn try_from(value: ExtendedRowKeyValue) -> Result<Self, Self::Error> { let msg = "Cannot convert InfMin/InfMax/AutoIncr to PrimaryKeyValue"; match value { ExtendedRowKeyValue::Int(x) => Ok(RowKeyValue::Int(x)), ExtendedRowKeyValue::Str(x) => Ok(RowKeyValue::Str(x)), ExtendedRowKeyValue::Blob(x) => Ok(RowKeyValue::Blob(x)), _ => Err(Error{ code: ErrorCode::ClientUnknown, message: msg.to_string(), }) } } } #[cfg(test)] impl Arbitrary for RowKeyColumn { fn arbitrary<G: Gen>(g: &mut G) -> Self { RowKeyColumn{ name: Name::arbitrary(g), value: RowKeyValue::arbitrary(g), } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let name = self.name.clone(); let value = self.value.clone(); let name_shrinker = self.name .shrink() .map(move |x| { RowKeyColumn{ name: x, value: value.clone(), } }); let value_shrinker = self.value .shrink() .map(move |x| { RowKeyColumn{ name: name.clone(), value: x, } }); Box::new(name_shrinker.chain(value_shrinker)) } } #[cfg(test)] impl Arbitrary for ExtendedRowKeyColumn { fn arbitrary<G: Gen>(g: &mut G) -> Self { ExtendedRowKeyColumn{ name: Name::arbitrary(g), value: ExtendedRowKeyValue::arbitrary(g), } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let name = self.name.clone(); let value = self.value.clone(); let name_shrinker = self.name .shrink() .map(move |x| { ExtendedRowKeyColumn{ name: x, value: value.clone(), } }); let value_shrinker = self.value .shrink() .map(move |x| { ExtendedRowKeyColumn{ name: name.clone(), value: x, } }); Box::new(name_shrinker.chain(value_shrinker)) } } #[cfg(test)] impl Arbitrary for RowKeyValue { fn arbitrary<G: Gen>(g: &mut G) -> Self { loop { let res = ExtendedRowKeyValue::arbitrary(g); match RowKeyValue::try_from(res) { Ok(x) => { return x; } Err(_) => {} } } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let res = ExtendedRowKeyValue::from(self.clone()) .shrink() .map(|x| { RowKeyValue::try_from(x.clone()).unwrap() }); Box::new(res) } } #[cfg(test)] impl Arbitrary for ExtendedRowKeyValue { fn arbitrary<G: Gen>(g: &mut G) -> Self { match g.next_u32() % 6 { 0 => ExtendedRowKeyValue::Int(i64::arbitrary(g)), 1 => ExtendedRowKeyValue::Str(String::arbitrary(g)), 2 => ExtendedRowKeyValue::Blob(Bytes::from(Vec::<u8>::arbitrary(g))), 3 => ExtendedRowKeyValue::InfMin, 4 => ExtendedRowKeyValue::InfMax, 5 => ExtendedRowKeyValue::AutoIncr, _ => unimplemented!() } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { match self { ExtendedRowKeyValue::Int(x) => { let res = x.shrink() .map(|x| { ExtendedRowKeyValue::Int(x) }); Box::new(res) } ExtendedRowKeyValue::Str(x) => { let res = x.shrink() .map(|x| { ExtendedRowKeyValue::Str(x) }); Box::new(res) } ExtendedRowKeyValue::Blob(x) => { let res = x.to_vec().shrink() .map(|x| { ExtendedRowKeyValue::Blob(Bytes::from(x)) }); Box::new(res) } _ => empty_shrinker(), } } } #[cfg(test)] impl Arbitrary for RowKey { fn arbitrary<G: Gen>(g: &mut G) -> Self { loop { let ext = ExtendedRowKey::arbitrary(g); if let Ok(res) = RowKey::try_from(ext) { return res; } } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let ext = ExtendedRowKey::from(self.clone()); let res = ext.shrink() .map(|x| { RowKey::try_from(x.clone()).unwrap() }); Box::new(res) } } #[cfg(test)] impl Arbitrary for ExtendedRowKey { fn arbitrary<G: Gen>(g: &mut G) -> Self { let mut keys = vec![]; loop { let go_on = bool::arbitrary(g); if !go_on { break; } let col = ExtendedRowKeyColumn::arbitrary(g); keys.push(col); } ExtendedRowKey::new(keys) } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let higher = self.0.len(); let mut lower = higher / 2; let xs_fn = move || { if lower >= higher { return None; } let mid = (lower + higher) / 2; lower = mid + 1; return Some(mid); }; let for_cut_tail = self.clone(); let xs = std::iter::from_fn(xs_fn) .map(move |x| { let orig: &[ExtendedRowKeyColumn] = for_cut_tail.0.as_slice(); let mut res: Vec<ExtendedRowKeyColumn> = vec![]; res.extend_from_slice(&orig[0..x]); ExtendedRowKey::new(res) }); let mut res: Box<dyn Iterator<Item = Self>> = Box::new(xs); for i in 0..self.0.len() { let me = self.0.clone(); let ys = self.0[i].shrink() .map(move |x| { let mut me_too = me.clone(); me_too[i] = x.clone(); ExtendedRowKey::new(me_too) }); res = Box::new(res.chain(ys)) } res } }
use bytes::Bytes; use crate::{Error, ErrorCode}; use std::convert::TryFrom; use super::*; #[cfg(test)
y> for ExtendedRowKey { fn from(rk: RowKey) -> Self { let ext_rk = rk.into_iter() .map(|x| { ExtendedRowKeyColumn::from(x) }) .collect(); ExtendedRowKey::new(ext_rk) } } impl TryFrom<ExtendedRowKey> for RowKey { type Error = Error; fn try_from(ext_rk: ExtendedRowKey) -> Result<RowKey, Error> { let mut xs = vec![]; let r = ext_rk.into_iter() .try_for_each(|x| { match RowKeyColumn::try_from(x) { Ok(x) => { xs.push(x); Ok(()) } Err(e) => { Err(e) } } }); match r { Ok(_) => Ok(RowKey::new(xs)), Err(e) => Err(e), } } } impl From<RowKeyColumn> for ExtendedRowKeyColumn { fn from(x: RowKeyColumn) -> Self { ExtendedRowKeyColumn{ name: x.name, value: ExtendedRowKeyValue::from(x.value), } } } impl TryFrom<ExtendedRowKeyColumn> for RowKeyColumn { type Error = Error; fn try_from(x: ExtendedRowKeyColumn) -> Result<Self, Error> { Ok(RowKeyColumn{ name: x.name, value: RowKeyValue::try_from(x.value)?, }) } } impl From<RowKeyValue> for ExtendedRowKeyValue { fn from(x: RowKeyValue) -> Self { match x { RowKeyValue::Int(x) => ExtendedRowKeyValue::Int(x), RowKeyValue::Str(x) => ExtendedRowKeyValue::Str(x), RowKeyValue::Blob(x) => ExtendedRowKeyValue::Blob(x), } } } impl TryFrom<ExtendedRowKeyValue> for RowKeyValue { type Error = Error; fn try_from(value: ExtendedRowKeyValue) -> Result<Self, Self::Error> { let msg = "Cannot convert InfMin/InfMax/AutoIncr to PrimaryKeyValue"; match value { ExtendedRowKeyValue::Int(x) => Ok(RowKeyValue::Int(x)), ExtendedRowKeyValue::Str(x) => Ok(RowKeyValue::Str(x)), ExtendedRowKeyValue::Blob(x) => Ok(RowKeyValue::Blob(x)), _ => Err(Error{ code: ErrorCode::ClientUnknown, message: msg.to_string(), }) } } } #[cfg(test)] impl Arbitrary for RowKeyColumn { fn arbitrary<G: Gen>(g: &mut G) -> Self { RowKeyColumn{ name: Name::arbitrary(g), value: RowKeyValue::arbitrary(g), } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let name = self.name.clone(); let value = self.value.clone(); let name_shrinker = self.name .shrink() .map(move |x| { RowKeyColumn{ name: x, value: value.clone(), } }); let value_shrinker = self.value .shrink() .map(move |x| { RowKeyColumn{ name: name.clone(), value: x, } }); Box::new(name_shrinker.chain(value_shrinker)) } } #[cfg(test)] impl Arbitrary for ExtendedRowKeyColumn { fn arbitrary<G: Gen>(g: &mut G) -> Self { ExtendedRowKeyColumn{ name: Name::arbitrary(g), value: ExtendedRowKeyValue::arbitrary(g), } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let name = self.name.clone(); let value = self.value.clone(); let name_shrinker = self.name .shrink() .map(move |x| { ExtendedRowKeyColumn{ name: x, value: value.clone(), } }); let value_shrinker = self.value .shrink() .map(move |x| { ExtendedRowKeyColumn{ name: name.clone(), value: x, } }); Box::new(name_shrinker.chain(value_shrinker)) } } #[cfg(test)] impl Arbitrary for RowKeyValue { fn arbitrary<G: Gen>(g: &mut G) -> Self { loop { let res = ExtendedRowKeyValue::arbitrary(g); match RowKeyValue::try_from(res) { Ok(x) => { return x; } Err(_) => {} } } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let res = ExtendedRowKeyValue::from(self.clone()) .shrink() .map(|x| { RowKeyValue::try_from(x.clone()).unwrap() }); Box::new(res) } } #[cfg(test)] impl Arbitrary for ExtendedRowKeyValue { fn arbitrary<G: Gen>(g: &mut G) -> Self { match g.next_u32() % 6 { 0 => ExtendedRowKeyValue::Int(i64::arbitrary(g)), 1 => ExtendedRowKeyValue::Str(String::arbitrary(g)), 2 => ExtendedRowKeyValue::Blob(Bytes::from(Vec::<u8>::arbitrary(g))), 3 => ExtendedRowKeyValue::InfMin, 4 => ExtendedRowKeyValue::InfMax, 5 => ExtendedRowKeyValue::AutoIncr, _ => unimplemented!() } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { match self { ExtendedRowKeyValue::Int(x) => { let res = x.shrink() .map(|x| { ExtendedRowKeyValue::Int(x) }); Box::new(res) } ExtendedRowKeyValue::Str(x) => { let res = x.shrink() .map(|x| { ExtendedRowKeyValue::Str(x) }); Box::new(res) } ExtendedRowKeyValue::Blob(x) => { let res = x.to_vec().shrink() .map(|x| { ExtendedRowKeyValue::Blob(Bytes::from(x)) }); Box::new(res) } _ => empty_shrinker(), } } } #[cfg(test)] impl Arbitrary for RowKey { fn arbitrary<G: Gen>(g: &mut G) -> Self { loop { let ext = ExtendedRowKey::arbitrary(g); if let Ok(res) = RowKey::try_from(ext) { return res; } } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let ext = ExtendedRowKey::from(self.clone()); let res = ext.shrink() .map(|x| { RowKey::try_from(x.clone()).unwrap() }); Box::new(res) } } #[cfg(test)] impl Arbitrary for ExtendedRowKey { fn arbitrary<G: Gen>(g: &mut G) -> Self { let mut keys = vec![]; loop { let go_on = bool::arbitrary(g); if !go_on { break; } let col = ExtendedRowKeyColumn::arbitrary(g); keys.push(col); } ExtendedRowKey::new(keys) } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let higher = self.0.len(); let mut lower = higher / 2; let xs_fn = move || { if lower >= higher { return None; } let mid = (lower + higher) / 2; lower = mid + 1; return Some(mid); }; let for_cut_tail = self.clone(); let xs = std::iter::from_fn(xs_fn) .map(move |x| { let orig: &[ExtendedRowKeyColumn] = for_cut_tail.0.as_slice(); let mut res: Vec<ExtendedRowKeyColumn> = vec![]; res.extend_from_slice(&orig[0..x]); ExtendedRowKey::new(res) }); let mut res: Box<dyn Iterator<Item = Self>> = Box::new(xs); for i in 0..self.0.len() { let me = self.0.clone(); let ys = self.0[i].shrink() .map(move |x| { let mut me_too = me.clone(); me_too[i] = x.clone(); ExtendedRowKey::new(me_too) }); res = Box::new(res.chain(ys)) } res } }
] use quickcheck::{Arbitrary, Gen, empty_shrinker}; #[derive(Debug, Clone, Eq, PartialEq)] pub struct RowKey(pub Vec<RowKeyColumn>); #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExtendedRowKey(pub Vec<ExtendedRowKeyColumn>); #[derive(Debug, Clone, Eq, PartialEq)] pub struct RowKeyColumn { pub name: Name, pub value: RowKeyValue, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExtendedRowKeyColumn { pub name: Name, pub value: ExtendedRowKeyValue, } #[derive(Debug, Clone, Eq, PartialEq)] pub enum RowKeyValue { Int(i64), Str(String), Blob(Bytes), } #[derive(Debug, Clone, Eq, PartialEq)] pub enum ExtendedRowKeyValue { Int(i64), Str(String), Blob(Bytes), InfMin, InfMax, AutoIncr, } impl RowKey { pub fn new(keys: Vec<RowKeyColumn>) -> Self { Self(keys) } pub fn iter(&self) -> impl Iterator<Item=&RowKeyColumn> { self.0.iter() } pub fn into_iter(self) -> impl Iterator<Item=RowKeyColumn> { self.0.into_iter() } } impl ExtendedRowKey { pub fn new(keys: Vec<ExtendedRowKeyColumn>) -> Self { Self(keys) } pub fn iter(&self) -> impl Iterator<Item=&ExtendedRowKeyColumn> { self.0.iter() } pub fn into_iter(self) -> impl Iterator<Item=ExtendedRowKeyColumn> { self.0.into_iter() } } impl From<RowKe
random
[ { "content": "fn peek_tag(inp: &mut dyn Buf) -> Result<super::Tag, Error> {\n\n if !inp.has_remaining() {\n\n return issue_error();\n\n }\n\n let xs = inp.bytes();\n\n assert!(!xs.is_empty());\n\n Ok(super::Tag::try_from(xs[0])?)\n\n}\n\n\n", "file_path": "src/plainbuffer/serde.rs", "rank": 0, "score": 34952.810237154394 }, { "content": "fn peek_and_expect(inp: &mut dyn Buf, exp: super::Tag) -> bool {\n\n if let Ok(tag) = peek_tag(inp) {\n\n if tag == exp {\n\n return true;\n\n }\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/plainbuffer/serde.rs", "rank": 1, "score": 34952.810237154394 }, { "content": "use bytes::Bytes;\n\nuse crate::Error;\n\nuse crate::protocol as pb;\n\nuse std::convert::TryFrom;\n\nuse super::*;\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub struct ListTableRequest {}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ListTableResponse {\n\n pub base: super::BaseResponse,\n\n pub tables: Vec<Name>,\n\n}\n\n\n\nimpl From<ListTableRequest> for pb::ListTableRequest {\n\n fn from(_: ListTableRequest) -> pb::ListTableRequest {\n\n pb::ListTableRequest{}\n\n }\n\n}\n", "file_path": "src/types/list_table.rs", "rank": 2, "score": 7.220136271344534 }, { "content": "use bytes::Bytes;\n\nuse crate::Error;\n\nuse crate::protocol as pb;\n\nuse std::convert::TryFrom;\n\nuse super::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct DeleteTableRequest {\n\n pub name: Name,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct DeleteTableResponse {\n\n pub base: super::BaseResponse,\n\n}\n\n\n\nimpl From<DeleteTableRequest> for pb::DeleteTableRequest {\n\n fn from(x: DeleteTableRequest) -> pb::DeleteTableRequest {\n\n pb::DeleteTableRequest{\n\n table_name: x.name.into(),\n", "file_path": "src/types/delete_table.rs", "rank": 3, "score": 7.18573271579551 }, { "content": " let write = {\n\n let write = u16::arbitrary(g);\n\n let write = write as i32;\n\n SettableValue::Value(write + 1)\n\n };\n\n Self{\n\n read,\n\n write,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod ut {\n\n use super::*;\n\n use tokio::stream::StreamExt;\n\n use tokio::sync::*;\n\n\n\n #[quickcheck]\n\n fn tableoptions_serde_is_identity_0(oracle: TableOptions) -> bool {\n", "file_path": "src/types/table_options.rs", "rank": 4, "score": 6.657463134608813 }, { "content": "use bytes::Bytes;\n\nuse crate::Error;\n\nuse crate::protocol as pb;\n\nuse std::convert::TryFrom;\n\nuse super::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct CreateTableRequest {\n\n pub table_meta: TableMeta,\n\n pub options: TableOptions,\n\n}\n\n\n\nimpl CreateTableRequest {\n\n pub fn new(table_meta: TableMeta) -> Self {\n\n Self{\n\n table_meta,\n\n options: TableOptions::default_for_create(),\n\n }\n\n }\n\n}\n", "file_path": "src/types/create_table.rs", "rank": 5, "score": 6.546578260223846 }, { "content": "use bytes::Bytes;\n\nuse crate::Error;\n\nuse crate::protocol as pb;\n\nuse crate::plainbuffer::PbufSerde;\n\nuse std::convert::{TryFrom};\n\nuse super::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct PutRowRequest {\n\n pub table_name: Name,\n\n pub row: Row,\n\n pub condition: Condition,\n\n pub in_return: InReturn,\n\n}\n\n\n\nimpl PutRowRequest {\n\n pub fn new<T: ToString>(table_name: T, row: Row) -> Result<Self, Error> {\n\n Ok(Self{\n\n table_name: Name::new(table_name),\n\n row,\n", "file_path": "src/types/put_row.rs", "rank": 6, "score": 6.238379275465103 }, { "content": "use crate::protocol;\n\nuse std::convert::From;\n\nuse super::*;\n\n\n\n#[cfg(test)]\n\nuse quickcheck::{Arbitrary, Gen};\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct TableMeta {\n\n pub name: Name,\n\n pub schema: Vec<PkeyColumnSchema>,\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct PkeyColumnSchema {\n\n pub name: Name,\n\n pub type_: PkeyValueType,\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n", "file_path": "src/types/table_meta.rs", "rank": 7, "score": 6.156085952776907 }, { "content": "use bytes::Bytes;\n\nuse super::*;\n\n\n\n#[cfg(test)]\n\nuse quickcheck::{Arbitrary, Gen, empty_shrinker};\n\n\n\n#[derive(Debug, Clone)]\n\npub enum AttrValue {\n\n Str(String),\n\n Int(i64),\n\n Blob(Bytes),\n\n Bool(bool),\n\n Float(f64),\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct Attribute {\n\n pub name: Name,\n\n pub value: AttrValue,\n\n pub timestamp: AttrTimestamp,\n", "file_path": "src/types/attr.rs", "rank": 8, "score": 6.088960150567722 }, { "content": " Self{\n\n auto_increment: bool::arbitrary(g),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod ut {\n\n use crate::protocol;\n\n use super::*;\n\n\n\n #[quickcheck]\n\n fn tablemeta_serde_is_identity_0(oracle: TableMeta) -> bool {\n\n println!(\"oracle: {:?}\", oracle);\n\n let middle = protocol::TableMeta::from(oracle.clone());\n\n println!(\"middle: {:?}\", middle);\n\n let trial = TableMeta::from(middle);\n\n println!(\"trial {:?}\", trial);\n\n oracle == trial\n\n }\n\n}\n", "file_path": "src/types/table_meta.rs", "rank": 10, "score": 5.791086987324883 }, { "content": "use super::*;\n\n\n\n#[cfg(test)]\n\nuse quickcheck::{Arbitrary, Gen};\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct Row {\n\n pub row_key: RowKey,\n\n pub attrs: Vec<Attribute>,\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Arbitrary for Row {\n\n fn arbitrary<G: Gen>(g: &mut G) -> Self {\n\n Row{\n\n row_key: RowKey::arbitrary(g),\n\n attrs: Vec::<Attribute>::arbitrary(g),\n\n }\n\n }\n\n\n", "file_path": "src/types/row.rs", "rank": 11, "score": 5.647178028387966 }, { "content": "use crate::protocol as pb;\n\nuse std::convert::From;\n\nuse super::common::SettableValue;\n\n\n\n#[cfg(test)]\n\nuse quickcheck::{Arbitrary, Gen};\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct TableOptions {\n\n pub capacity_unit: CapacityUnit,\n\n pub time_to_live: SettableValue<chrono::Duration>,\n\n pub max_versions: SettableValue<i32>,\n\n pub deviated_duration: SettableValue<chrono::Duration>,\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct CapacityUnit {\n\n pub read: SettableValue<i32>,\n\n pub write: SettableValue<i32>,\n\n}\n", "file_path": "src/types/table_options.rs", "rank": 12, "score": 5.624750775273845 }, { "content": "use bytes::{Bytes, BytesMut};\n\nuse crate::plainbuffer as pbuf;\n\nuse crate::types::*;\n\nuse pbuf::{Serde, SerdeWithCrc8};\n\n\n\n#[quickcheck]\n", "file_path": "src/plainbuffer/tests/primitives.rs", "rank": 13, "score": 4.795300474638191 }, { "content": "}\n\n\n\nmod common;\n\npub use self::common::*;\n\nmod action;\n\npub use self::action::*;\n\nmod name;\n\npub use self::name::*;\n\nmod table_meta;\n\npub use self::table_meta::*;\n\nmod table_options;\n\npub use self::table_options::*;\n\nmod list_table;\n\npub use self::list_table::*;\n\nmod create_table;\n\npub use self::create_table::*;\n\nmod delete_table;\n\npub use self::delete_table::*;\n\nmod rowkey;\n\npub use self::rowkey::*;\n", "file_path": "src/types/mod.rs", "rank": 14, "score": 4.782585960984373 }, { "content": "#[cfg(test)]\n\n#[macro_use(quickcheck)]\n\nextern crate quickcheck_macros;\n\n#[macro_use]\n\nextern crate log;\n\n\n\npub mod error;\n\npub use self::error::{Error, ErrorCode};\n\n\n\nmod client;\n\npub use self::client::*;\n\n\n\nmod credential;\n\npub use self::credential::*;\n\n\n\nmod endpoint;\n\npub use self::endpoint::*;\n\n\n\nmod types;\n\npub use self::types::*;\n", "file_path": "src/lib.rs", "rank": 15, "score": 4.768806451290937 }, { "content": "use bytes::{Bytes, BytesMut};\n\nuse crate::plainbuffer as pbuf;\n\nuse crate::types::*;\n\nuse pbuf::{Serde, SerdeWithCrc8, PbufSerde};\n\n\n\n#[quickcheck]\n", "file_path": "src/plainbuffer/tests/rows.rs", "rank": 16, "score": 4.74677667055391 }, { "content": "mod attr;\n\npub use self::attr::*;\n\nmod row;\n\npub use self::row::*;\n\nmod put_row;\n\npub use self::put_row::*;\n\nmod condition;\n\npub use self::condition::*;\n\nmod in_return;\n\npub use self::in_return::*;\n", "file_path": "src/types/mod.rs", "rank": 17, "score": 4.74420239603989 }, { "content": "use bytes::Bytes;\n\nuse chrono::prelude::*;\n\nuse crate::{Endpoint, Credential, ClientOptions, Error, ErrorCode, types};\n\nuse crypto::digest::Digest;\n\nuse crypto::mac::Mac;\n\nuse log::*;\n\nuse std::collections::BTreeMap;\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::sync::atomic::{AtomicI64, Ordering};\n\nuse std::sync::Arc;\n\nuse tokio::stream::StreamExt;\n\nuse tokio::sync::{mpsc, oneshot};\n\n\n\n#[derive(Clone)]\n\npub(crate) struct ClientImpl {\n\n endpoint: Endpoint,\n\n credential: Credential,\n\n opts: ClientOptions,\n\n http_clients: hyper::Client<hyper::client::HttpConnector<hyper::client::connect::dns::GaiResolver>, hyper::Body>,\n\n}\n", "file_path": "src/client_impl.rs", "rank": 18, "score": 4.714397094780116 }, { "content": "use bytes::Bytes;\n\nuse crate::Error;\n\nuse quick_protobuf::{MessageRead, BytesReader, message::MessageWrite};\n\n\n", "file_path": "src/types/mod.rs", "rank": 19, "score": 4.595912389302634 }, { "content": "use bytes::Bytes;\n\nuse crate::Error;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\nmod crc;\n\npub(in crate::plainbuffer) use crc::*;\n\nmod consts;\n\npub(in crate::plainbuffer) use consts::*;\n\nmod serde;\n\npub(in crate::plainbuffer) use serde::*;\n\n\n\npub(crate) trait PbufSerde: Sized {\n\n fn to_pbuf(&self) -> Vec<u8>;\n\n fn from_pbuf(buf: Bytes) -> Result<Self, Error>;\n\n}\n\n\n\nimpl PbufSerde for crate::types::Row {\n\n fn to_pbuf(&self) -> Vec<u8> {\n", "file_path": "src/plainbuffer/mod.rs", "rank": 20, "score": 4.308463942974676 }, { "content": " }\n\n let _ = u32::deserialize(inp)?;\n\n let vt = super::VariantType::deserialize_crc8(inp, checksum)?;\n\n match vt {\n\n super::VariantType::Integer => {\n\n let x = i64::deserialize_crc8(inp, checksum)?;\n\n Ok(AttrValue::Int(x))\n\n }\n\n super::VariantType::String => {\n\n let s = String::deserialize_crc8(inp, checksum)?;\n\n Ok(AttrValue::Str(s))\n\n }\n\n super::VariantType::Blob => {\n\n let b = Bytes::deserialize_crc8(inp, checksum)?;\n\n Ok(AttrValue::Blob(b))\n\n }\n\n super::VariantType::Boolean => {\n\n let x = u8::deserialize(inp)?;\n\n super::crc8_u8(checksum, x);\n\n Ok(AttrValue::Bool(x > 0))\n", "file_path": "src/plainbuffer/serde.rs", "rank": 21, "score": 4.2590974591667585 }, { "content": " let x = u8::deserialize(inp)?;\n\n let res = super::Tag::try_from(x)?;\n\n Ok(res)\n\n }\n\n}\n\n\n\nimpl Serde for super::VariantType {\n\n fn serialize(&self, out: &mut dyn BufMut) {\n\n (*self as u8).serialize(out)\n\n }\n\n\n\n fn deserialize(inp: &mut dyn Buf) -> Result<super::VariantType, Error> {\n\n if !inp.has_remaining() {\n\n return issue_error();\n\n }\n\n let x = u8::deserialize(inp)?;\n\n let res = super::VariantType::try_from(x)?;\n\n Ok(res)\n\n }\n\n}\n", "file_path": "src/plainbuffer/serde.rs", "rank": 22, "score": 4.211290410683272 }, { "content": " x.serialize(out);\n\n super::crc8_u8(checksum, x);\n\n }\n\n AttrValue::Float(x) => {\n\n let xs = x.to_le_bytes();\n\n ((xs.len() + 1) as u32).serialize(out);\n\n super::VariantType::Double.serialize_crc8(out, checksum);\n\n out.put_slice(xs.as_ref());\n\n super::crc8_blob(checksum, &xs);\n\n }\n\n }\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Self, Error> {\n\n let expect_cellvalue = super::Tag::deserialize(inp)?;\n\n if expect_cellvalue != super::Tag::CellValue {\n\n return issue_error();\n", "file_path": "src/plainbuffer/serde.rs", "rank": 23, "score": 4.209253719404294 }, { "content": "use tablestore as ots;\n\n\n", "file_path": "examples/create_list_delete_table.rs", "rank": 24, "score": 4.114161382983902 }, { "content": "use tablestore as ots;\n\n\n", "file_path": "tests/utils.rs", "rank": 25, "score": 4.114161382983902 }, { "content": "use tablestore as ots;\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 4.114161382983902 }, { "content": " }\n\n\n\n fn write_message<W: WriterBackend>(&self, w: &mut Writer<W>) -> Result<()> {\n\n w.write_with_tag(10, |w| w.write_message(&self.consumed))?;\n\n for s in &self.schema { w.write_with_tag(18, |w| w.write_message(s))?; }\n\n for s in &self.split_points { w.write_with_tag(26, |w| w.write_bytes(&**s))?; }\n\n for s in &self.locations { w.write_with_tag(34, |w| w.write_message(s))?; }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[allow(non_snake_case)]\n\npub mod mod_ComputeSplitPointsBySizeResponse {\n\n\n\nuse super::*;\n\n\n\n#[derive(Debug, Default, PartialEq, Clone)]\n\npub struct SplitLocation {\n\n pub location: String,\n\n pub repeat: i64,\n", "file_path": "src/protocol.rs", "rank": 27, "score": 4.104058148886984 }, { "content": " msecs.serialize_crc8(out, &mut cell_chksum);\n\n }\n\n super::Tag::CellChecksum.serialize(out);\n\n cell_chksum.serialize(out);\n\n super::crc8_u8(checksum, cell_chksum);\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Self, Error> {\n\n if super::Tag::deserialize(inp)? != super::Tag::Cell {\n\n return issue_error();\n\n }\n\n let mut cell_chksum = 0u8;\n\n let name = Name::deserialize_crc8(inp, &mut cell_chksum)?;\n\n let value = AttrValue::deserialize_crc8(inp, &mut cell_chksum)?;\n\n let tm = if peek_and_expect(inp, super::Tag::CellTimestamp) {\n\n super::Tag::deserialize(inp)?;\n\n let msecs = i64::deserialize_crc8(inp, &mut cell_chksum)?;\n", "file_path": "src/plainbuffer/serde.rs", "rank": 28, "score": 4.073240056761936 }, { "content": "\n\nimpl SerdeWithCrc8 for super::VariantType {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n self.serialize(out);\n\n super::crc8_u8(checksum, *self as u8);\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<super::VariantType, Error> {\n\n let res = super::VariantType::deserialize(inp)?;\n\n super::crc8_u8(checksum, res as u8);\n\n Ok(res)\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for RowKeyColumn {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n let mut cell_chksum = 0u8;\n", "file_path": "src/plainbuffer/serde.rs", "rank": 29, "score": 4.048591410180015 }, { "content": " if !peek_and_expect(inp, super::Tag::Cell) {\n\n break;\n\n }\n\n let attr = Attribute::deserialize_crc8(inp, &mut checksum)?;\n\n attrs.push(attr);\n\n }\n\n }\n\n super::crc8_u8(&mut checksum, 0u8); // placeholder for missing row-delete marker\n\n if peek_and_expect(inp, super::Tag::RowChecksum) {\n\n let _ = super::Tag::deserialize(inp)?;\n\n let exp = u8::deserialize(inp)?;\n\n if checksum != exp {\n\n return issue_error();\n\n }\n\n }\n\n Ok(Row{\n\n row_key,\n\n attrs,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/plainbuffer/serde.rs", "rank": 30, "score": 4.04696329030997 }, { "content": "\n\nmod protocol;\n\nmod client_impl;\n\n\n\nmod client_options;\n\npub use self::client_options::*;\n\n\n\npub(crate) mod plainbuffer;\n\n\n\nmod retry;\n\npub use self::retry::*;\n", "file_path": "src/lib.rs", "rank": 31, "score": 4.02463783152427 }, { "content": "use crate::{Endpoint, Credential, ClientOptions, Error, types};\n\nuse crate::client_impl;\n\nuse log::*;\n\nuse tokio::sync::{mpsc, oneshot};\n\n\n\n#[derive(Clone)]\n\npub struct Client {\n\n cmd_sender: mpsc::Sender<client_impl::Cmd>,\n\n}\n\n\n\nimpl Client {\n\n pub fn new(\n\n endpoint: Endpoint,\n\n credential: Credential,\n\n opts: ClientOptions,\n\n ) -> Result<Client, Error> {\n\n let tx = client_impl::ClientImpl::new(endpoint, credential, opts);\n\n let res = Client{\n\n cmd_sender: tx,\n\n };\n", "file_path": "src/client.rs", "rank": 32, "score": 3.9813213457096723 }, { "content": " ExtendedRowKeyValue::AutoIncr => {\n\n 1u32.serialize(out);\n\n super::VariantType::AutoIncrement.serialize_crc8(out, checksum);\n\n }\n\n }\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<ExtendedRowKeyValue, Error> {\n\n let expect_cellvalue = super::Tag::deserialize(inp)?;\n\n if expect_cellvalue != super::Tag::CellValue {\n\n return issue_error();\n\n }\n\n let exp_payload_len = usize::try_from(u32::deserialize(inp)?).unwrap();\n\n let vt = super::VariantType::deserialize_crc8(inp, checksum)?;\n\n let mut real_payload_len = 1usize;\n\n let res = match vt {\n\n super::VariantType::Integer => {\n", "file_path": "src/plainbuffer/serde.rs", "rank": 33, "score": 3.9795483542156003 }, { "content": "use crate::plainbuffer as pbuf;\n\n\n\n#[test]\n", "file_path": "src/plainbuffer/tests/crc.rs", "rank": 34, "score": 3.974721119983056 }, { "content": " let mut checksum = 0u8;\n\n self.row_key.serialize_crc8(out, &mut checksum);\n\n if !self.attrs.is_empty() {\n\n super::Tag::RowData.serialize(out);\n\n for x in self.attrs.iter() {\n\n x.serialize_crc8(out, &mut checksum);\n\n }\n\n }\n\n super::crc8_u8(&mut checksum, 0); // placeholder for missing row-delete marker\n\n super::Tag::RowChecksum.serialize(out);\n\n checksum.serialize(out);\n\n }\n\n\n\n fn deserialize(inp: &mut dyn Buf) -> Result<Self, Error> {\n\n let mut checksum = 0u8;\n\n let row_key = RowKey::deserialize_crc8(inp, &mut checksum)?;\n\n let mut attrs = vec![];\n\n if peek_and_expect(inp, super::Tag::RowData) {\n\n let _ = super::Tag::deserialize(inp)?;\n\n loop {\n", "file_path": "src/plainbuffer/serde.rs", "rank": 35, "score": 3.966021366312506 }, { "content": " let name = name.as_bytes();\n\n name.serialize(out);\n\n super::crc8_blob(checksum, name);\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Self, Error> {\n\n if super::Tag::deserialize(inp)? != super::Tag::CellName {\n\n return issue_error();\n\n }\n\n let res = String::deserialize(inp)?;\n\n super::crc8_blob(checksum, res.as_bytes());\n\n Ok(res.into())\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for RowKeyValue {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n", "file_path": "src/plainbuffer/serde.rs", "rank": 36, "score": 3.927758326033362 }, { "content": "use std::cmp::min;\n\n\n\nuse rand::Rng;\n\n\n\nuse crate::Action;\n\nuse crate::{Error, ErrorCode};\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum RetryCategory {\n\n Retriable,\n\n Unretriable,\n\n Depends,\n\n}\n\n\n\nimpl RetryCategory {\n\n pub fn calc(err: &Error) -> RetryCategory {\n\n match err.code {\n\n ErrorCode::ClientUnknown => RetryCategory::Unretriable,\n\n ErrorCode::CouldntResolveHost => RetryCategory::Retriable,\n\n ErrorCode::CouldntConnect => RetryCategory::Retriable,\n", "file_path": "src/retry.rs", "rank": 37, "score": 3.898600271457263 }, { "content": "use bytes::{Bytes, Buf, BufMut};\n\nuse crate::{Error, ErrorCode};\n\nuse crate::types::*;\n\nuse std::convert::TryFrom;\n\n\n\npub(crate) trait Serde {\n\n fn serialize(&self, out: &mut dyn BufMut);\n\n fn deserialize(inp: &mut dyn Buf) -> Result<Self, Error> where Self: Sized;\n\n}\n\n\n\npub(crate) trait SerdeWithCrc8 {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8);\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Self, Error> where Self: Sized;\n\n}\n\n\n", "file_path": "src/plainbuffer/serde.rs", "rank": 38, "score": 3.8664663917012847 }, { "content": "\n\n fn try_from(v: Vec<u8>) -> Result<Self, Self::Error> {\n\n super::new_response::<Self, pb::DeleteTableResponse>(&v)\n\n }\n\n}\n\n\n\nimpl super::Request for DeleteTableRequest {\n\n fn action(&self) -> Action {\n\n Action::DeleteTable\n\n }\n\n\n\n fn path(&self) -> String {\n\n self.action().to_string()\n\n }\n\n}\n\n\n\nimpl super::Response for DeleteTableResponse {\n\n fn base_mut_ref(&mut self) -> &mut BaseResponse {\n\n &mut self.base\n\n }\n\n}\n", "file_path": "src/types/delete_table.rs", "rank": 39, "score": 3.865601299066885 }, { "content": "impl SerdeWithCrc8 for ExtendedRowKeyColumn {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n let mut cell_chksum = 0u8;\n\n super::Tag::Cell.serialize(out);\n\n self.name.serialize_crc8(out, &mut cell_chksum);\n\n self.value.serialize_crc8(out, &mut cell_chksum);\n\n super::Tag::CellChecksum.serialize(out);\n\n cell_chksum.serialize(out);\n\n super::crc8_u8(checksum, cell_chksum);\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Self, Error> {\n\n if super::Tag::deserialize(inp)? != super::Tag::Cell {\n\n return issue_error();\n\n }\n\n let mut real_chksum = 0u8;\n\n let name = Name::deserialize_crc8(inp, &mut real_chksum)?;\n", "file_path": "src/plainbuffer/serde.rs", "rank": 40, "score": 3.8482938316103787 }, { "content": " }\n\n super::VariantType::Double => {\n\n let x = u64::deserialize(inp)?;\n\n super::crc8_u64(checksum, x);\n\n Ok(AttrValue::Float(f64::from_le_bytes(x.to_le_bytes())))\n\n }\n\n _ => unimplemented!()\n\n }\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for Attribute {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n super::Tag::Cell.serialize(out);\n\n let mut cell_chksum = 0u8;\n\n self.name.serialize_crc8(out, &mut cell_chksum);\n\n self.value.serialize_crc8(out, &mut cell_chksum);\n\n if let AttrTimestamp::ClientAttach(tm) = &self.timestamp {\n\n super::Tag::CellTimestamp.serialize(out);\n\n let msecs = tm.to_millis();\n", "file_path": "src/plainbuffer/serde.rs", "rank": 41, "score": 3.784436997268865 }, { "content": "use crate::protocol;\n\nuse quick_protobuf::{MessageRead, BytesReader};\n\nuse std::convert::TryFrom;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Error {\n\n pub code: ErrorCode,\n\n pub message: String,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum ErrorCode {\n\n ClientUnknown,\n\n CouldntResolveHost,\n\n CouldntConnect,\n\n OperationTimeout,\n\n SslHandshakeFail,\n\n WriteRequestFail,\n\n CorruptedResponse,\n\n NoAvailableConnection,\n", "file_path": "src/error.rs", "rank": 42, "score": 3.7209701471944383 }, { "content": " }\n\n}\n\n\n\nimpl super::Request for CreateTableRequest {\n\n fn action(&self) -> Action {\n\n Action::CreateTable\n\n }\n\n\n\n fn path(&self) -> String {\n\n self.action().to_string()\n\n }\n\n}\n\n\n\nimpl super::Response for CreateTableResponse {\n\n fn base_mut_ref(&mut self) -> &mut BaseResponse {\n\n &mut self.base\n\n }\n\n}\n", "file_path": "src/types/create_table.rs", "rank": 43, "score": 3.717164588929572 }, { "content": "\n\nimpl TryFrom<Vec<u8>> for ListTableResponse {\n\n type Error = Error;\n\n\n\n fn try_from(v: Vec<u8>) -> Result<Self, Error> {\n\n super::new_response::<Self, pb::ListTableResponse>(&v)\n\n }\n\n}\n\n\n\nimpl super::Request for ListTableRequest {\n\n fn action(&self) -> Action {\n\n Action::ListTable\n\n }\n\n\n\n fn path(&self) -> String {\n\n self.action().to_string()\n\n }\n\n}\n\n\n\nimpl super::Response for ListTableResponse {\n\n fn base_mut_ref(&mut self) -> &mut BaseResponse {\n\n &mut self.base\n\n }\n\n}\n", "file_path": "src/types/list_table.rs", "rank": 44, "score": 3.702065744415987 }, { "content": " x.serialize_crc8(out, checksum);\n\n }\n\n AttrValue::Int(x) => {\n\n ((std::mem::size_of::<i64>() + 1) as u32).serialize(out);\n\n super::VariantType::Integer.serialize_crc8(out, checksum);\n\n x.serialize_crc8(out, checksum);\n\n }\n\n AttrValue::Blob(x) => {\n\n ((x.len() + 1) as u32).serialize(out);\n\n super::VariantType::Blob.serialize_crc8(out, checksum);\n\n x.serialize_crc8(out, checksum);\n\n }\n\n AttrValue::Bool(x) => {\n\n 2u32.serialize(out);\n\n super::VariantType::Boolean.serialize_crc8(out, checksum);\n\n let x = if *x {\n\n 1u8\n\n } else {\n\n 0u8\n\n };\n", "file_path": "src/plainbuffer/serde.rs", "rank": 45, "score": 3.702065744415987 }, { "content": "\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Self, Error> {\n\n if super::Tag::deserialize(inp)? != super::Tag::RowKey {\n\n return issue_error();\n\n }\n\n let mut res = vec![];\n\n while peek_and_expect(inp, super::Tag::Cell) {\n\n let x = ExtendedRowKeyColumn::deserialize_crc8(inp, checksum)?;\n\n println!(\"{:?}\", x);\n\n res.push(x);\n\n }\n\n Ok(ExtendedRowKey::new(res))\n\n }\n\n}\n\n\n\nimpl Serde for Row {\n\n fn serialize(&self, out: &mut dyn BufMut) {\n", "file_path": "src/plainbuffer/serde.rs", "rank": 46, "score": 3.625379302997583 }, { "content": " x.serialize_crc8(out, checksum);\n\n }\n\n ExtendedRowKeyValue::Str(x) => {\n\n ((x.len() + std::mem::size_of::<u32>() + 1) as u32).serialize(out);\n\n super::VariantType::String.serialize_crc8(out, checksum);\n\n x.serialize_crc8(out, checksum);\n\n }\n\n ExtendedRowKeyValue::Blob(x) => {\n\n ((x.len() + std::mem::size_of::<u32>() + 1) as u32).serialize(out);\n\n super::VariantType::Blob.serialize_crc8(out, checksum);\n\n x.serialize_crc8(out, checksum);\n\n }\n\n ExtendedRowKeyValue::InfMin => {\n\n 1u32.serialize(out);\n\n super::VariantType::InfMin.serialize_crc8(out, checksum);\n\n }\n\n ExtendedRowKeyValue::InfMax => {\n\n 1u32.serialize(out);\n\n super::VariantType::InfMax.serialize_crc8(out, checksum);\n\n }\n", "file_path": "src/plainbuffer/serde.rs", "rank": 47, "score": 3.622864180386612 }, { "content": " let x = i64::deserialize_crc8(inp, checksum)?;\n\n real_payload_len += std::mem::size_of_val(&x);\n\n Ok(ExtendedRowKeyValue::Int(x))\n\n }\n\n super::VariantType::String => {\n\n let s = String::deserialize_crc8(inp, checksum)?;\n\n real_payload_len += s.len();\n\n real_payload_len += std::mem::size_of::<u32>();\n\n Ok(ExtendedRowKeyValue::Str(s))\n\n }\n\n super::VariantType::Blob => {\n\n let b = Bytes::deserialize_crc8(inp, checksum)?;\n\n real_payload_len += b.len();\n\n real_payload_len += std::mem::size_of::<u32>();\n\n Ok(ExtendedRowKeyValue::Blob(b))\n\n }\n\n super::VariantType::InfMin => Ok(ExtendedRowKeyValue::InfMin),\n\n super::VariantType::InfMax => Ok(ExtendedRowKeyValue::InfMax),\n\n super::VariantType::AutoIncrement => {\n\n Ok(ExtendedRowKeyValue::AutoIncr)\n", "file_path": "src/plainbuffer/serde.rs", "rank": 48, "score": 3.576949278245594 }, { "content": " }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Vec<u8>, Error> {\n\n let res = Vec::<u8>::deserialize(inp)?;\n\n match u32::try_from(res.len()) {\n\n Ok(x) => {\n\n super::crc8_u32(checksum, x);\n\n }\n\n _ => {\n\n return issue_error();\n\n }\n\n }\n\n super::crc8_blob(checksum, &res);\n\n Ok(res)\n\n }\n\n}\n\n\n", "file_path": "src/plainbuffer/serde.rs", "rank": 49, "score": 3.5476317445020413 }, { "content": " super::Tag::Cell.serialize(out);\n\n self.name.serialize_crc8(out, &mut cell_chksum);\n\n self.value.serialize_crc8(out, &mut cell_chksum);\n\n super::Tag::CellChecksum.serialize(out);\n\n cell_chksum.serialize(out);\n\n super::crc8_u8(checksum, cell_chksum);\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Self, Error> {\n\n let ext = ExtendedRowKeyColumn::deserialize_crc8(inp, checksum)?;\n\n match RowKeyColumn::try_from(ext) {\n\n Ok(x) => Ok(x),\n\n Err(_) => issue_error(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/plainbuffer/serde.rs", "rank": 50, "score": 3.5338762040485623 }, { "content": " }\n\n _ => unimplemented!()\n\n };\n\n if exp_payload_len != real_payload_len {\n\n println!(\"here: {} {}\", exp_payload_len, real_payload_len);\n\n return issue_error();\n\n }\n\n res\n\n }\n\n}\n\n\n\nimpl Serde for super::Tag {\n\n fn serialize(&self, out: &mut dyn BufMut) {\n\n (*self as u8).serialize(out)\n\n }\n\n\n\n fn deserialize(inp: &mut dyn Buf) -> Result<super::Tag, Error> {\n\n if !inp.has_remaining() {\n\n return issue_error();\n\n }\n", "file_path": "src/plainbuffer/serde.rs", "rank": 51, "score": 3.417734731888053 }, { "content": "\n\n fn try_from(v: Vec<u8>) -> Result<Self, Error> {\n\n new_response::<Self, pb::PutRowResponse>(&v)\n\n }\n\n}\n\n\n\nimpl super::Request for PutRowRequest {\n\n fn action(&self) -> Action {\n\n Action::PutRow\n\n }\n\n\n\n fn path(&self) -> String {\n\n self.action().to_string()\n\n }\n\n}\n\n\n\nimpl super::Response for PutRowResponse {\n\n fn base_mut_ref(&mut self) -> &mut BaseResponse {\n\n &mut self.base\n\n }\n\n}\n", "file_path": "src/types/put_row.rs", "rank": 52, "score": 3.3928885181667345 }, { "content": " unimplemented!()\n\n }\n\n}\n\n\n\nimpl<'a> SerdeWithCrc8 for &'a [u8] {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n self.serialize(out);\n\n super::crc8_u32(checksum, self.len() as u32);\n\n super::crc8_blob(checksum, self);\n\n }\n\n\n\n fn deserialize_crc8(\n\n _inp: &mut dyn Buf,\n\n _checksum: &mut u8,\n\n ) -> Result<&'a [u8], Error> {\n\n unimplemented!()\n\n }\n\n}\n\n\n\nimpl Serde for Vec<u8> {\n", "file_path": "src/plainbuffer/serde.rs", "rank": 53, "score": 3.3928885181667345 }, { "content": " println!(\"name: {:?}\", name);\n\n let value = ExtendedRowKeyValue::deserialize_crc8(inp, &mut real_chksum)?;\n\n println!(\"value: {:?}\", value);\n\n deser_check_checksum(inp, real_chksum)?;\n\n println!(\"real checksum: {:x}\", real_chksum);\n\n super::crc8_u8(checksum, real_chksum);\n\n Ok(ExtendedRowKeyColumn{\n\n name,\n\n value,\n\n })\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for AttrValue {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n super::Tag::CellValue.serialize(out);\n\n match self {\n\n AttrValue::Str(x) => {\n\n ((x.len() + 1) as u32).serialize(out);\n\n super::VariantType::String.serialize_crc8(out, checksum);\n", "file_path": "src/plainbuffer/serde.rs", "rank": 54, "score": 3.3162538674406017 }, { "content": " Some(DateTime::from_millis(msecs))\n\n } else {\n\n None\n\n };\n\n deser_check_checksum(inp, cell_chksum)?;\n\n super::crc8_u8(checksum, cell_chksum);\n\n Ok(Attribute{\n\n name,\n\n value,\n\n timestamp: AttrTimestamp::from(tm),\n\n })\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for RowKey {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n super::Tag::RowKey.serialize(out);\n\n self.iter()\n\n .for_each(|x| {\n\n x.serialize_crc8(out, checksum);\n", "file_path": "src/plainbuffer/serde.rs", "rank": 55, "score": 3.2510804841257097 }, { "content": "\n\nuse quick_protobuf::{MessageRead, MessageWrite, BytesReader, Writer, WriterBackend, Result};\n\nuse quick_protobuf::sizeofs::*;\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum PrimaryKeyType {\n\n INTEGER = 1,\n\n STRING = 2,\n\n BINARY = 3,\n\n}\n\n\n\nimpl Default for PrimaryKeyType {\n\n fn default() -> Self {\n\n PrimaryKeyType::INTEGER\n\n }\n\n}\n\n\n\nimpl From<i32> for PrimaryKeyType {\n\n fn from(i: i32) -> Self {\n\n match i {\n", "file_path": "src/protocol.rs", "rank": 56, "score": 3.2227825078389003 }, { "content": "use bytes::Bytes;\n\nuse crate::Error;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Credential {\n\n pub id: Bytes,\n\n pub secret: Bytes,\n\n pub token: Option<Bytes>,\n\n}\n\n\n\nimpl Credential {\n\n pub fn new<P, Q>(\n\n id: P,\n\n secret: Q,\n\n ) -> Result<Credential, Error> \n\n where\n\n P: ToString,\n\n Q: ToString,\n\n {\n\n let res = Credential{\n\n id: Bytes::from(id.to_string()),\n\n secret: Bytes::from(secret.to_string()),\n\n token: None,\n\n };\n\n Ok(res)\n\n }\n\n}\n", "file_path": "src/credential.rs", "rank": 57, "score": 3.179100214810963 }, { "content": "use crate::{Error, ErrorCode};\n\nuse std::convert::TryFrom;\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum VariantType {\n\n Integer = 0x0,\n\n Double = 0x1,\n\n Boolean = 0x2,\n\n String = 0x3,\n\n Null = 0x6,\n\n Blob = 0x7,\n\n InfMin = 0x9,\n\n InfMax = 0xa,\n\n AutoIncrement = 0xb,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum Tag {\n\n None = 0,\n\n RowKey = 0x1,\n", "file_path": "src/plainbuffer/consts.rs", "rank": 58, "score": 3.1577001385706494 }, { "content": "use tablestore as ots;\n\nmod utils;\n\nuse utils::*;\n\n\n\n#[tokio::test]\n\nasync fn create_delete() -> Result<(), ots::Error> {\n\n let (ep, cred) = fetch_endpoint_credential()?;\n\n let opts = ots::ClientOptions::default();\n\n let client = ots::Client::new(ep, cred, opts)?;\n\n let table_name = \"create_delete\".to_string();\n\n {\n\n let meta = ots::TableMeta{\n\n name: table_name.clone().into(),\n\n schema: vec![\n\n ots::PkeyColumnSchema{\n\n name: ots::Name::new(\"haha\"),\n\n type_: ots::PkeyValueType::Str,\n\n }\n\n ]\n\n };\n", "file_path": "tests/test_table_operations.rs", "rank": 59, "score": 2.9963414373803827 }, { "content": "use tablestore as ots;\n\nmod utils;\n\nuse utils::*;\n\n\n\n#[tokio::test]\n\nasync fn concurrency() -> Result<(), ots::Error> {\n\n let (ep, cred) = fetch_endpoint_credential()?;\n\n let mut opts = ots::ClientOptions::default();\n\n opts.concurrency = 2;\n\n let client = ots::Client::new(ep, cred, opts)?;\n\n let x0 = client.list_table();\n\n let x1 = client.list_table();\n\n let x2 = client.list_table();\n\n let (r0, r1, r2) = tokio::join!(x0, x1, x2);\n\n assert!(r0.is_ok());\n\n assert!(r1.is_ok());\n\n assert!(r2.is_err());\n\n Ok(())\n\n}\n", "file_path": "tests/test_concurrency.rs", "rank": 60, "score": 2.940003541273648 }, { "content": " ExtendedRowKeyValue::from(self.clone()).serialize_crc8(out, checksum);\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<RowKeyValue, Error> {\n\n let res = ExtendedRowKeyValue::deserialize_crc8(inp, checksum)?;\n\n let res = RowKeyValue::try_from(res)?;\n\n Ok(res)\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for ExtendedRowKeyValue {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n super::Tag::CellValue.serialize(out);\n\n match self {\n\n ExtendedRowKeyValue::Int(x) => {\n\n ((std::mem::size_of::<i64>() + 1) as u32).serialize(out);\n\n super::VariantType::Integer.serialize_crc8(out, checksum);\n", "file_path": "src/plainbuffer/serde.rs", "rank": 61, "score": 2.6891139471769634 }, { "content": " res.to_le_bytes().iter()\n\n .for_each(|x| {\n\n super::crc8_u8(checksum, *x);\n\n });\n\n Ok(res)\n\n }\n\n}\n\n\n\nimpl Serde for i64 {\n\n fn serialize(&self, out: &mut dyn BufMut) {\n\n (*self as u64).serialize(out)\n\n }\n\n\n\n fn deserialize(inp: &mut dyn Buf) -> Result<i64, Error> {\n\n let x = u64::deserialize(inp)?;\n\n Ok(x as i64)\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for i64 {\n", "file_path": "src/plainbuffer/serde.rs", "rank": 62, "score": 2.5640720756483786 }, { "content": "#[cfg(test)] use quickcheck::{Arbitrary, Gen};\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct Name(String);\n\n\n\nimpl Name {\n\n pub fn new<T: ToString>(name: T) -> Self {\n\n Self(name.to_string())\n\n }\n\n}\n\n\n\nimpl From<String> for Name {\n\n fn from(name: String) -> Name {\n\n Name(name)\n\n }\n\n}\n\n\n\nimpl From<Name> for String {\n\n fn from(x: Name) -> String {\n\n x.0\n", "file_path": "src/types/name.rs", "rank": 63, "score": 2.4949238684785726 }, { "content": "use crate::Error;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Endpoint {\n\n pub address: String,\n\n pub instance: String,\n\n}\n\n\n\nimpl Endpoint {\n\n pub fn new<P, Q>(\n\n address: P,\n\n instance: Q,\n\n ) -> Result<Endpoint, Error> \n\n where\n\n P: ToString,\n\n Q: ToString,\n\n {\n\n let res = Endpoint{\n\n address: address.to_string(),\n\n instance: instance.to_string(),\n\n };\n\n Ok(res)\n\n }\n\n}\n", "file_path": "src/endpoint.rs", "rank": 64, "score": 2.4686641826711253 }, { "content": " return issue_error();\n\n }\n\n Ok(inp.get_u64_le())\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for u64 {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n self.serialize(out);\n\n self.to_le_bytes().iter()\n\n .for_each(|x| {\n\n super::crc8_u8(checksum, *x);\n\n });\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<u64, Error> {\n\n let res = u64::deserialize(inp)?;\n", "file_path": "src/plainbuffer/serde.rs", "rank": 65, "score": 2.380942125056181 }, { "content": " });\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<Self, Error> {\n\n let ext = ExtendedRowKey::deserialize_crc8(inp, checksum)?;\n\n RowKey::try_from(ext)\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for ExtendedRowKey {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n super::Tag::RowKey.serialize(out);\n\n self.iter()\n\n .for_each(|x| {\n\n x.serialize_crc8(out, checksum);\n\n });\n\n }\n", "file_path": "src/plainbuffer/serde.rs", "rank": 66, "score": 2.3102278364885835 }, { "content": "\n\nimpl From<pb::ListTableResponse> for ListTableResponse {\n\n fn from(x: pb::ListTableResponse) -> ListTableResponse {\n\n ListTableResponse{\n\n base: super::BaseResponse::default(),\n\n tables: x.table_names\n\n .into_iter()\n\n .map(|x| {\n\n x.into()\n\n })\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<ListTableRequest> for Bytes {\n\n fn from(x: ListTableRequest) -> Bytes {\n\n serialize_request::<ListTableRequest, pb::ListTableRequest>(x)\n\n }\n\n}\n", "file_path": "src/types/list_table.rs", "rank": 67, "score": 2.3102278364885835 }, { "content": "use crate::protocol as pb;\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum InReturn {\n\n Nothing,\n\n RowKey,\n\n}\n\n\n\nimpl From<InReturn> for pb::ReturnType {\n\n fn from(x: InReturn) -> Self {\n\n match x {\n\n InReturn::Nothing => pb::ReturnType::RT_NONE,\n\n InReturn::RowKey => pb::ReturnType::RT_PK,\n\n }\n\n }\n\n}\n\n\n\nimpl From<pb::ReturnType> for InReturn {\n\n fn from(x: pb::ReturnType) -> Self {\n\n match x {\n", "file_path": "src/types/in_return.rs", "rank": 68, "score": 2.276941259700465 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<pb::PutRowResponse> for PutRowResponse {\n\n fn from(_: pb::PutRowResponse) -> PutRowResponse {\n\n PutRowResponse{\n\n base: super::BaseResponse::default()\n\n }\n\n }\n\n}\n\n\n\nimpl From<PutRowRequest> for Bytes {\n\n fn from(x: PutRowRequest) -> Bytes {\n\n serialize_request::<PutRowRequest, pb::PutRowRequest>(x)\n\n }\n\n}\n\n\n\nimpl TryFrom<Vec<u8>> for PutRowResponse {\n\n type Error = Error;\n", "file_path": "src/types/put_row.rs", "rank": 69, "score": 2.243592839439842 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<pb::DeleteTableResponse> for DeleteTableResponse {\n\n fn from(_x: pb::DeleteTableResponse) -> DeleteTableResponse {\n\n DeleteTableResponse{\n\n base: super::BaseResponse::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<DeleteTableRequest> for Bytes {\n\n fn from(x: DeleteTableRequest) -> Bytes {\n\n serialize_request::<DeleteTableRequest, pb::DeleteTableRequest>(x)\n\n }\n\n}\n\n\n\nimpl TryFrom<Vec<u8>> for DeleteTableResponse {\n\n type Error = Error;\n", "file_path": "src/types/delete_table.rs", "rank": 70, "score": 2.243592839439842 }, { "content": "use std::string::ToString;\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum Action {\n\n CreateTable,\n\n DeleteTable,\n\n ListTable,\n\n PutRow,\n\n}\n\n\n\nimpl ToString for Action {\n\n fn to_string(&self) -> String {\n\n match self {\n\n Action::CreateTable => \"/CreateTable\".to_string(),\n\n Action::DeleteTable => \"/DeleteTable\".to_string(),\n\n Action::ListTable => \"/ListTable\".to_string(),\n\n Action::PutRow => \"/PutRow\".to_string(),\n\n }\n\n }\n\n}\n", "file_path": "src/types/action.rs", "rank": 71, "score": 2.2335749404514433 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub struct CreateTableResponse {\n\n pub base: super::BaseResponse,\n\n}\n\n\n\nimpl From<CreateTableRequest> for pb::CreateTableRequest {\n\n fn from(x: CreateTableRequest) -> pb::CreateTableRequest {\n\n let table_meta = x.table_meta.into();\n\n let (opts, cu) = x.options.into();\n\n pb::CreateTableRequest{\n\n table_meta,\n\n reserved_throughput: pb::ReservedThroughput{\n\n capacity_unit: cu,\n\n },\n\n table_options: Some(opts),\n\n partitions: vec![],\n\n }\n\n }\n\n}\n", "file_path": "src/types/create_table.rs", "rank": 72, "score": 2.1806940469116425 }, { "content": " condition: Condition{\n\n row_exist: RowExistenceExpectation::Ignore,\n\n },\n\n in_return: InReturn::Nothing,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct PutRowResponse {\n\n pub base: super::BaseResponse,\n\n}\n\n\n\nimpl From<PutRowRequest> for pb::PutRowRequest {\n\n fn from(x: PutRowRequest) -> pb::PutRowRequest {\n\n pb::PutRowRequest{\n\n table_name: x.table_name.into(),\n\n row: x.row.to_pbuf(),\n\n condition: x.condition.into(),\n\n return_content: Some(x.in_return.into()),\n", "file_path": "src/types/put_row.rs", "rank": 73, "score": 2.1806940469116425 }, { "content": " fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n self.as_bytes().serialize_crc8(out, checksum);\n\n }\n\n\n\n fn deserialize_crc8(\n\n inp: &mut dyn Buf,\n\n checksum: &mut u8,\n\n ) -> Result<String, Error> {\n\n let raw = Vec::<u8>::deserialize_crc8(inp, checksum)?;\n\n match String::from_utf8(raw) {\n\n Ok(x) => Ok(x),\n\n Err(_) => issue_error()\n\n }\n\n }\n\n}\n\n\n\nimpl SerdeWithCrc8 for Name {\n\n fn serialize_crc8(&self, out: &mut dyn BufMut, checksum: &mut u8) {\n\n super::Tag::CellName.serialize(out);\n\n let name = <&str>::from(self);\n", "file_path": "src/plainbuffer/serde.rs", "rank": 74, "score": 2.029006335663452 }, { "content": "\n\nimpl From<pb::CreateTableResponse> for CreateTableResponse {\n\n fn from(_: pb::CreateTableResponse) -> CreateTableResponse {\n\n CreateTableResponse{\n\n base: super::BaseResponse::default()\n\n }\n\n }\n\n}\n\n\n\nimpl From<CreateTableRequest> for Bytes {\n\n fn from(x: CreateTableRequest) -> Bytes {\n\n serialize_request::<CreateTableRequest, pb::CreateTableRequest>(x)\n\n }\n\n}\n\n\n\nimpl TryFrom<Vec<u8>> for CreateTableResponse {\n\n type Error = Error;\n\n\n\n fn try_from(v: Vec<u8>) -> Result<Self, Error> {\n\n new_response::<Self, pb::CreateTableResponse>(&v)\n", "file_path": "src/types/create_table.rs", "rank": 75, "score": 2.0115163585050304 }, { "content": "use crate::{RetryStrategy, DeadlineRetryStrategy};\n\n\n\n#[derive(Clone)]\n\npub struct ClientOptions {\n\n pub concurrency: i64,\n\n pub retry_strategy: Box<dyn RetryStrategy + Send + Sync>,\n\n}\n\n\n\nimpl Default for ClientOptions {\n\n fn default() -> Self {\n\n Self{\n\n concurrency: 1000,\n\n retry_strategy: Box::new(DeadlineRetryStrategy::new(std::time::Duration::from_secs(300))),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for ClientOptions {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"ClientOptions\")\n\n .field(\"concurrency\", &self.concurrency)\n\n .finish()\n\n }\n\n}\n", "file_path": "src/client_options.rs", "rank": 76, "score": 1.9544005507041717 }, { "content": "use crate::protocol as pb;\n\n\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\n\npub enum RowExistenceExpectation {\n\n Ignore,\n\n ExpectExist,\n\n ExpectNotExist,\n\n}\n\n\n\nimpl From<pb::RowExistenceExpectation> for RowExistenceExpectation {\n\n fn from(x: pb::RowExistenceExpectation) -> Self {\n\n match x {\n\n pb::RowExistenceExpectation::IGNORE => RowExistenceExpectation::Ignore,\n\n pb::RowExistenceExpectation::EXPECT_EXIST => RowExistenceExpectation::ExpectExist,\n\n pb::RowExistenceExpectation::EXPECT_NOT_EXIST => RowExistenceExpectation::ExpectNotExist,\n\n }\n\n }\n\n}\n\n\n\nimpl From<RowExistenceExpectation> for pb::RowExistenceExpectation {\n", "file_path": "src/types/condition.rs", "rank": 77, "score": 1.8466858803618362 } ]
Rust
src/timer.rs
guerinoni/yobemag
ace316106477092b354ccf77b278899685cc5ca1
use std::cell::RefCell; use std::rc::Rc; use crate::{ clock::Clock, interrupt::{InterruptFlag, InterruptKind}, memory_device::ReadWrite, }; pub struct Timer { divider: u8, tima: u8, tma: u8, tac: u8, clock1: Clock, clock2: Clock, interrupt_flag: Rc<RefCell<InterruptFlag>>, } impl Timer { pub fn new(interrupt_flag: Rc<RefCell<InterruptFlag>>) -> Self { Self { divider: 0xAC, tima: 0, tma: 0, tac: 0, clock1: Clock::new(256), clock2: Clock::new(1024), interrupt_flag, } } } impl Timer { pub fn step(&mut self, cycles: u32) { self.divider = self.divider.wrapping_add(self.clock1.step(cycles)); if (self.tac & 0x04) != 0x00 { let n = self.clock2.step(cycles); for _ in 0..n { self.tima = self.tima.wrapping_add(1); if self.tima == 0x00 { self.tima = self.tma; self.interrupt_flag .borrow_mut() .request(InterruptKind::Timer); } } } } } impl ReadWrite for Timer { fn contains(&self, address: usize) -> bool { 0xFF04 == address || 0xFF05 == address || 0xFF06 == address || 0xFF07 == address } fn read_byte(&self, address: usize) -> Result<u8, std::io::Error> { match address { 0xFF04 => Ok(self.divider), 0xFF05 => Ok(self.tima), 0xFF06 => Ok(self.tma), 0xFF07 => Ok(self.tac), _ => Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "can't write byte here", )), } } fn read_word(&self, _address: usize) -> Result<u16, std::io::Error> { unimplemented!() } fn write_byte(&mut self, address: usize, value: u8) -> Result<(), std::io::Error> { match address { 0xFF04 => { self.divider = 0; self.clock1.reset_counter(); } 0xFF05 => self.tima = value, 0xFF06 => self.tma = value, 0xFF07 => { if (self.tac & 0x03) != (value & 0x03) { self.clock2.reset_counter(); let new_period = match value & 0x03 { 0x00 => 1024, 0x01 => 16, 0x02 => 64, 0x03 => 256, _ => { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, format!("period of clock can't be {}", value), )) } }; self.clock2.set_period(new_period); self.tima = self.tma; } self.tac = value; } _ => { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "can't write byte here", )) } } Ok(()) } fn write_word(&mut self, _address: usize, _value: u16) -> Result<(), std::io::Error> { unimplemented!() } }
use std::cell::RefCell; use std::rc::Rc; use crate::{ clock::Clock, interrupt::{InterruptFlag, InterruptKind}, memory_device::ReadWrite, }; pub struct Timer { divider: u8, tima: u8, tma: u8, tac: u8, clock1: Clock, clock2: Clock, interrupt_flag: Rc<RefCell<InterruptFlag>>, } impl Timer { pub fn new(interrupt_flag: Rc<RefCell<InterruptFlag>>) -> Self { Self { divider: 0xAC, tima: 0, tma: 0, tac: 0, clock1: Clock::new(256), clock2: Clock::new(1024), interrupt_flag, } } } impl Timer { pub fn step(&mut self, cycles: u32) { self.divider = self.divider.wrapping_add(self.clock1.step(cycles)); if (self.tac & 0x04) != 0x00 { let n = self.clock2.step(cycles);
} impl ReadWrite for Timer { fn contains(&self, address: usize) -> bool { 0xFF04 == address || 0xFF05 == address || 0xFF06 == address || 0xFF07 == address } fn read_byte(&self, address: usize) -> Result<u8, std::io::Error> { match address { 0xFF04 => Ok(self.divider), 0xFF05 => Ok(self.tima), 0xFF06 => Ok(self.tma), 0xFF07 => Ok(self.tac), _ => Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "can't write byte here", )), } } fn read_word(&self, _address: usize) -> Result<u16, std::io::Error> { unimplemented!() } fn write_byte(&mut self, address: usize, value: u8) -> Result<(), std::io::Error> { match address { 0xFF04 => { self.divider = 0; self.clock1.reset_counter(); } 0xFF05 => self.tima = value, 0xFF06 => self.tma = value, 0xFF07 => { if (self.tac & 0x03) != (value & 0x03) { self.clock2.reset_counter(); let new_period = match value & 0x03 { 0x00 => 1024, 0x01 => 16, 0x02 => 64, 0x03 => 256, _ => { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, format!("period of clock can't be {}", value), )) } }; self.clock2.set_period(new_period); self.tima = self.tma; } self.tac = value; } _ => { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "can't write byte here", )) } } Ok(()) } fn write_word(&mut self, _address: usize, _value: u16) -> Result<(), std::io::Error> { unimplemented!() } }
for _ in 0..n { self.tima = self.tima.wrapping_add(1); if self.tima == 0x00 { self.tima = self.tma; self.interrupt_flag .borrow_mut() .request(InterruptKind::Timer); } } } }
function_block-function_prefix_line
[ { "content": "/// since gameboy check for non original games when loading cartridge.\n\npub fn valid_checksum(data: &[u8]) -> Result<(), std::io::Error> {\n\n let checksum: Wrapping<u8> = data[0x134..0x14D]\n\n .iter()\n\n .cloned()\n\n .fold(Wrapping(0), |acc, v| acc - Wrapping(v) - Wrapping(1));\n\n\n\n if checksum.0 != data[0x14D] {\n\n return Err(std::io::Error::new(\n\n std::io::ErrorKind::InvalidInput,\n\n \"checksum not valid.\",\n\n ));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cartridge_header.rs", "rank": 0, "score": 50294.564731970284 }, { "content": "pub fn convert<T: PartialEq + From<u8> + BitXor<Output = T> + Shl<Output = T> + Clone>(\n\n bits: &[u8],\n\n) -> Result<T, ConversionError> {\n\n if bits.len() > (std::mem::size_of::<T>() * 8) {\n\n return Err(ConversionError::Overflow);\n\n }\n\n if bits.iter().filter(|&&bit| bit != 0 && bit != 1).count() > 0 {\n\n return Err(ConversionError::NonBinaryInput);\n\n }\n\n\n\n Ok(bits.iter().fold(T::from(0), |result, &bit| {\n\n (result << T::from(1)) ^ T::from(bit)\n\n }))\n\n}\n\n\n\npub struct Registers {\n\n pub a: u8,\n\n pub flags: CpuFlag,\n\n pub b: u8,\n\n pub c: u8,\n", "file_path": "src/register.rs", "rank": 1, "score": 41243.48309931891 }, { "content": "fn decode_memory_bank_type(data: &[u8]) -> MemoryBankType {\n\n match data[0x147] {\n\n 0x00 | 0x08..=0x09 => MemoryBankType::NoMemoryBank,\n\n 0x01..=0x03 => MemoryBankType::MBC1,\n\n 0x05..=0x06 => MemoryBankType::MBC2,\n\n 0x0B..=0x0D => MemoryBankType::MMM01,\n\n 0x0F..=0x13 => MemoryBankType::MBC3,\n\n 0x15..=0x17 => MemoryBankType::MBC4,\n\n 0x19..=0x1E => MemoryBankType::MBC5,\n\n _ => panic!(\"unknown memory bank type\"),\n\n }\n\n}\n\n\n", "file_path": "src/cartridge_header.rs", "rank": 2, "score": 36220.46576306707 }, { "content": "/// original games have all nintengo logo bytes inside its cartridge.\n\nfn check_logo(data: &[u8]) -> Result<(), std::io::Error> {\n\n match data[0x104..0x134].iter().cmp(NINTENDO_LOGO.iter()) {\n\n std::cmp::Ordering::Equal => Ok(()),\n\n std::cmp::Ordering::Less | std::cmp::Ordering::Greater => Err(std::io::Error::new(\n\n std::io::ErrorKind::InvalidInput,\n\n \"logo bytes are corrupted.\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/cartridge_header.rs", "rank": 3, "score": 35445.663398167286 }, { "content": "#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]\n\nstruct Palette {\n\n index_0: Color,\n\n index_1: Color,\n\n index_2: Color,\n\n index_3: Color,\n\n}\n\n\n\nimpl From<Palette> for u8 {\n\n fn from(p: Palette) -> Self {\n\n p.index_0 as u8 | (p.index_1 as u8) << 2 | (p.index_2 as u8) << 4 | (p.index_3 as u8) << 6\n\n }\n\n}\n\n\n\nimpl From<u8> for Palette {\n\n fn from(reg: u8) -> Self {\n\n Self {\n\n index_0: (reg & 0b11).into(),\n\n index_1: (reg >> 2 & 0b11).into(),\n\n index_2: (reg >> 4 & 0b11).into(),\n\n index_3: (reg >> 6 & 0b11).into(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gpu.rs", "rank": 4, "score": 33275.516903723874 }, { "content": "pub fn make_cartridge(filename: &str) -> Result<Box<dyn ReadWrite>, std::io::Error> {\n\n let data = fs::read(filename)?;\n\n let header = CartridgeHeader::new(&data)?;\n\n println!(\"Cartridge type {}\", header.memory_bank_type);\n\n match header.memory_bank_type {\n\n MemoryBankType::NoMemoryBank => Ok(Box::new(NoMBCartridge::new(data, header))),\n\n MemoryBankType::MBC1 => Ok(Box::new(MBC1::new(data, header))),\n\n _ => Err(std::io::Error::new(\n\n std::io::ErrorKind::InvalidData,\n\n \"no implementation for this memory bank type.\",\n\n )),\n\n }\n\n}\n", "file_path": "src/cartridge.rs", "rank": 5, "score": 28917.696081274393 }, { "content": "pub struct Clock {\n\n period: u32,\n\n counter: u32,\n\n}\n\n\n\nimpl Clock {\n\n pub fn new(period: u32) -> Self {\n\n Self { period, counter: 0 }\n\n }\n\n\n\n pub fn step(&mut self, cycles: u32) -> u8 {\n\n self.counter += cycles;\n\n let rs = self.counter / self.period;\n\n self.counter %= self.period;\n\n rs as u8\n\n }\n\n\n\n pub(crate) fn reset_counter(&mut self) {\n\n self.counter = 0;\n\n }\n", "file_path": "src/clock.rs", "rank": 6, "score": 28491.635288455407 }, { "content": "\n\n pub(crate) fn set_period(&mut self, new_period: u32) {\n\n self.period = new_period;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn check() {\n\n let mut clock = Clock::new(64);\n\n assert_eq!(clock.step(1), 0);\n\n assert_eq!(clock.counter, 1);\n\n\n\n assert_eq!(clock.step(2), 0);\n\n assert_eq!(clock.counter, 3);\n\n\n\n assert_eq!(clock.step(4), 0);\n", "file_path": "src/clock.rs", "rank": 7, "score": 28477.30361852123 }, { "content": " assert_eq!(clock.counter, 7);\n\n\n\n assert_eq!(clock.step(8), 0);\n\n assert_eq!(clock.counter, 15);\n\n\n\n assert_eq!(clock.step(12), 0);\n\n assert_eq!(clock.counter, 27);\n\n\n\n assert_eq!(clock.step(16), 0);\n\n assert_eq!(clock.counter, 43);\n\n }\n\n}\n", "file_path": "src/clock.rs", "rank": 8, "score": 28465.70805088127 }, { "content": "pub trait ReadWrite {\n\n fn contains(&self, address: usize) -> bool;\n\n\n\n fn read_byte(&self, address: usize) -> Result<u8, std::io::Error>;\n\n fn read_word(&self, address: usize) -> Result<u16, std::io::Error>;\n\n\n\n fn write_byte(&mut self, address: usize, value: u8) -> Result<(), std::io::Error>;\n\n fn write_word(&mut self, address: usize, value: u16) -> Result<(), std::io::Error>;\n\n}\n", "file_path": "src/memory_device.rs", "rank": 16, "score": 25932.069106695635 }, { "content": "fn main() -> Result<(), std::io::Error> {\n\n println!(\"starting yobemag...\");\n\n\n\n let args = env::args().collect::<Vec<_>>();\n\n if args.len() < 2 {\n\n return Err(std::io::Error::new(\n\n std::io::ErrorKind::InvalidInput,\n\n \"missing rom as first arg.\",\n\n ));\n\n }\n\n\n\n let rom = &args[1];\n\n println!(\"load of {}\", &rom);\n\n\n\n let mut emu = emulator::Emulator::new(rom)?;\n\n\n\n loop {\n\n emu.step();\n\n }\n\n\n\n // Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 17, "score": 22250.043419822447 }, { "content": "use std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\nuse crate::gpu::GraphicsProcessingUnit;\n\nuse crate::hdma::{Hdma, HdmaMode};\n\nuse crate::input_output_registers::InputOutputRegisters;\n\nuse crate::internal_memory::InternalMemory;\n\nuse crate::interrupt::InterruptFlag;\n\nuse crate::memory_device::ReadWrite;\n\nuse crate::serial_data_transfer::SerialDataTransfer;\n\nuse crate::sound::Sound;\n\nuse crate::timer::Timer;\n\n\n\n#[derive(Clone, Copy, Eq, PartialEq)]\n\npub enum Speed {\n\n Normal = 0x01,\n\n Double = 0x02,\n\n}\n\n\n\nimpl From<Speed> for u32 {\n", "file_path": "src/mmu.rs", "rank": 18, "score": 20.245988415581415 }, { "content": "use crate::memory_device::ReadWrite;\n\n\n\n#[derive(Default)]\n\npub struct SerialDataTransfer {\n\n // Before a transfer, it holds the next byte that will go out: 0xFF01\n\n // During a transfer, it has a blend of the outgoing and incoming bytes.\n\n // Each cycle, the leftmost bit is shifted\n\n // out (and over the wire) and the incoming bit is shifted in from the other side.\n\n data: u8,\n\n\n\n // Bit 0 - Shift Clock (0=External Clock, 1=Internal Clock)\n\n // Bit 1 - Clock Speed (0=Normal, 1=Fast) ** CGB Mode Only **\n\n // Bit 7 - Transfer Start Flag (0=No transfer is in progress or requested, 1=Transfer in progress, or requested)\n\n control: u8,\n\n\n\n debug_msg: String,\n\n}\n\n\n\nimpl SerialDataTransfer {\n\n pub fn print_serial_debug(&mut self) {\n", "file_path": "src/serial_data_transfer.rs", "rank": 19, "score": 20.045099415828602 }, { "content": " hdma: Hdma::default(),\n\n }\n\n }\n\n\n\n pub fn step(&mut self, cycles: u32) {\n\n self.serial.print_serial_debug();\n\n let cpu_divider: u32 = self.speed.into();\n\n let vram_cycles = self.run_dma();\n\n let gpu_cycles = cycles / cpu_divider + vram_cycles;\n\n let cpu_cycles = cycles + vram_cycles * cpu_divider;\n\n self.timer.step(cpu_cycles);\n\n self.gpu.step(gpu_cycles);\n\n }\n\n\n\n // run_dma_hrampart:\n\n // ldh ($FF00+c), a\n\n // wait:\n\n // dec b\n\n // jr nz,wait\n\n // ret\n", "file_path": "src/mmu.rs", "rank": 20, "score": 18.237160165623436 }, { "content": "use crate::cartridge_header::*;\n\nuse crate::memory_device::*;\n\nuse std::fs;\n\n\n\n#[allow(dead_code)]\n\npub struct NoMBCartridge {\n\n header: CartridgeHeader,\n\n\n\n // 0x0150-0x3FFF\n\n rom: Vec<u8>,\n\n}\n\n\n\nimpl NoMBCartridge {\n\n fn new(rom: Vec<u8>, header: CartridgeHeader) -> NoMBCartridge {\n\n NoMBCartridge { header, rom }\n\n }\n\n}\n\n\n\nimpl ReadWrite for NoMBCartridge {\n\n fn contains(&self, address: usize) -> bool {\n", "file_path": "src/cartridge.rs", "rank": 21, "score": 17.850175082945007 }, { "content": "use crate::{cartridge::make_cartridge, cpu::CentralProcessingUnit, mmu::MemoryManagmentUnit};\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\npub struct Emulator {\n\n mmu: Rc<RefCell<MemoryManagmentUnit>>,\n\n cpu: CentralProcessingUnit,\n\n}\n\n\n\nimpl Emulator {\n\n pub fn new(filename: &str) -> Result<Emulator, std::io::Error> {\n\n let device = make_cartridge(filename)?;\n\n let mmu = Rc::new(RefCell::new(MemoryManagmentUnit::new(device)));\n\n let cpu = CentralProcessingUnit::new(mmu.clone());\n\n Ok(Emulator { mmu, cpu })\n\n }\n\n\n\n pub fn step(&mut self) {\n\n if self.cpu.need_toggle_speed() {\n\n self.mmu.borrow_mut().toggle_speed();\n\n }\n\n\n\n let clock_cycles = self.cpu.step();\n\n self.mmu.borrow_mut().step(clock_cycles);\n\n }\n\n}\n", "file_path": "src/emulator.rs", "rank": 22, "score": 17.527456449400887 }, { "content": "use core::panic;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\nuse crate::{memory_device::ReadWrite, opcodes::*, prefix_opcodes::PrefixOpCode, register::*};\n\n\n\n// One cycle of the master clock is called a \"clock\", or a \"t-cycle\".\n\n// It can either equal 0.25 µs, or 0.125 µs in CGB double-speed.\n\n// The duration of a nop instruction is called a \"nop\" or \"m-cycle\", and equals four clocks.\n\npub struct CentralProcessingUnit {\n\n registers: Registers,\n\n mmu: Rc<RefCell<dyn ReadWrite>>,\n\n stop: bool,\n\n halt: bool,\n\n\n\n // Interrupt master enable flag is reset by DI and prohibits all interrupts.\n\n // It is set by EI and acknowledges the interrupt setting by the IE register.\n\n ime: bool,\n\n}\n\n\n", "file_path": "src/cpu.rs", "rank": 23, "score": 15.9756906805189 }, { "content": " pub fn step(&mut self, cycles: u32) -> u8 {\n\n self.h_blank = false;\n\n let _lol = cycles;\n\n 0\n\n }\n\n}\n\n\n\nimpl ReadWrite for GraphicsProcessingUnit {\n\n fn contains(&self, address: usize) -> bool {\n\n (0x8000..=0x9FFF).contains(&address)\n\n || (0xFE00..=0xFE9F).contains(&address)\n\n || 0xFF40 == address\n\n || 0xFF41 == address\n\n || 0xFF42 == address\n\n || 0xFF43 == address\n\n || 0xFF44 == address\n\n || 0xFF47 == address\n\n || 0xFF48 == address\n\n || 0xFF49 == address\n\n || 0xFF4F == address\n", "file_path": "src/gpu.rs", "rank": 24, "score": 14.766847317991335 }, { "content": " use std::rc::Rc;\n\n\n\n use crate::memory_device::ReadWrite;\n\n use crate::register::{ConditionOperand, Register, RegisterWord};\n\n\n\n use super::CentralProcessingUnit;\n\n\n\n struct MockDevice {\n\n bytes: HashMap<usize, u8>,\n\n words: HashMap<usize, u16>,\n\n }\n\n\n\n impl ReadWrite for MockDevice {\n\n fn contains(&self, address: usize) -> bool {\n\n let _ = address;\n\n true\n\n }\n\n\n\n fn read_byte(&self, address: usize) -> Result<u8, std::io::Error> {\n\n Ok(self.bytes[&address])\n", "file_path": "src/cpu.rs", "rank": 25, "score": 13.99073644033818 }, { "content": "impl CentralProcessingUnit {\n\n pub fn new(mmu: Rc<RefCell<dyn ReadWrite>>) -> CentralProcessingUnit {\n\n CentralProcessingUnit {\n\n registers: Registers::new(),\n\n mmu,\n\n stop: false,\n\n halt: false,\n\n ime: false,\n\n }\n\n }\n\n\n\n pub fn need_toggle_speed(&self) -> bool {\n\n self.registers.program_counter == 0x10\n\n }\n\n\n\n pub fn step(&mut self) -> u32 {\n\n self.exec() as u32\n\n }\n\n\n\n fn exec(&mut self) -> u8 {\n", "file_path": "src/cpu.rs", "rank": 26, "score": 13.555664404092676 }, { "content": "#[derive(Debug, PartialEq, Eq)]\n\npub struct CpuFlag {\n\n // Carry Flag. This bit is set if a carry occurred from the last math operation or if register A is the smaller valuewhen executing the CP instruction.\n\n pub carry: bool,\n\n // Half Carry Flag. This bit is set if a carry occurred from the lowernibble in the last math operation.\n\n pub half_carry: bool,\n\n // Negative Flag. This bit is set if a subtraction was performed in the last math instruction.\n\n pub negative: bool,\n\n // Zero Flag. This bit is set when the result of a math operations zero or two values match when using the CP instruction.\n\n pub zero: bool,\n\n}\n\n\n\nimpl CpuFlag {\n\n pub fn to_u8(&self) -> u8 {\n\n let bits = [\n\n self.zero as u8,\n\n self.negative as u8,\n\n self.half_carry as u8,\n\n self.carry as u8,\n\n 0,\n", "file_path": "src/register.rs", "rank": 27, "score": 13.05650878891316 }, { "content": " pub d: u8,\n\n pub e: u8,\n\n pub h: u8,\n\n pub l: u8,\n\n pub program_counter: u16,\n\n pub stack_pointer: u16,\n\n}\n\n\n\nimpl Registers {\n\n pub fn new() -> Registers {\n\n Registers {\n\n a: 0x11,\n\n flags: CpuFlag::from_u8(0xB0),\n\n b: 0x00,\n\n c: 0x13,\n\n d: 0x00,\n\n e: 0xD8,\n\n h: 0x01,\n\n l: 0x4D,\n\n program_counter: 0x0100,\n", "file_path": "src/register.rs", "rank": 28, "score": 12.583330534603682 }, { "content": "use core::fmt;\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum OpCode {\n\n // LD r, n\n\n // Load to the 8-bit register r, the immediate data n.\n\n // Clock cycles: 8\n\n LdBNext,\n\n LdCNext,\n\n LdDNext,\n\n LdENext,\n\n LdHNext,\n\n LdLNext,\n\n LdANext,\n\n\n\n // LD r, r’\n\n // Load to the 8-bit register r, data from the 8-bit register r’.\n\n // Clock cycles: 4\n\n LdBB,\n\n LdBC,\n", "file_path": "src/opcodes.rs", "rank": 29, "score": 12.461187539076684 }, { "content": "use crate::memory_device::ReadWrite;\n\n\n\n/// InternalMemory holds all memory banks for internal handling of the emulating job, not GPU or\n\n/// cartridge related, just internal stuff to read and write during execution.\n\npub struct InternalMemory {\n\n // working ram bank\n\n wram: [u8; 0x8000],\n\n wram_bank: u8,\n\n // high ram (zero-page): 0xFF80-0xFFFE\n\n hram: [u8; 0x007F],\n\n // interrupt flag (request) register: 0xFF0F\n\n // Bit 0: V-Blank Interrupt Request (INT 40h) (1=Request)\n\n // Bit 1: LCD STAT Interrupt Request (INT 48h) (1=Request)\n\n // Bit 2: Timer Interrupt Request (INT 50h) (1=Request)\n\n // Bit 3: Serial Interrupt Request (INT 58h) (1=Request)\n\n // Bit 4: Joypad Interrupt Request (INT 60h) (1=Request)\n\n interrupt_flag: u8,\n\n // interrupt flag enable: 0xFFFF\n\n interrupt_enable: u8,\n\n}\n", "file_path": "src/internal_memory.rs", "rank": 30, "score": 12.444362658995274 }, { "content": "impl BackgroundPaletteIndex {\n\n /// Get the background palette index's value.\n\n pub(crate) fn value(&self) -> u8 {\n\n self.value\n\n }\n\n}\n\n\n\nimpl ReadWrite for BackgroundPaletteIndex {\n\n fn contains(&self, address: usize) -> bool {\n\n address == 0xFF68\n\n }\n\n\n\n fn read_byte(&self, _address: usize) -> Result<u8, std::io::Error> {\n\n let a = if self.auto_increment { 0x80 } else { 0x00 };\n\n Ok(a | self.value)\n\n }\n\n\n\n fn read_word(&self, address: usize) -> Result<u16, std::io::Error> {\n\n Err(std::io::Error::new(\n\n std::io::ErrorKind::AddrNotAvailable,\n", "file_path": "src/background_palette_index.rs", "rank": 31, "score": 11.992068921839113 }, { "content": " Rst18,\n\n Rst20,\n\n Rst28,\n\n Rst30,\n\n Rst38,\n\n\n\n // The CPU performs no operation during this cycle.\n\n // Clock cycles: 4\n\n Noop,\n\n\n\n // STOP\n\n // CPU operation is stopped.\n\n // Clock cycles: N/A\n\n Stop,\n\n\n\n // HALT\n\n // CPU operation is suspended until an interrupt or reset is recieved. While in\n\n // this halted state, NOPs are executed to maintain memory refresh logic.\n\n // Clock cycles: 4 (+4 for every following NOP)\n\n Halt,\n", "file_path": "src/opcodes.rs", "rank": 32, "score": 11.658187669787942 }, { "content": " Hdma,\n\n}\n\n\n\npub struct Hdma {\n\n // These two registers specify the address at which the transfer will read data from. Normally, this should be\n\n // either in ROM, SRAM or WRAM, thus either in range 0x0000-0x7FF0 or 0xA000-0xDFF0. [Note : this has yet to be tested on\n\n // Echo RAM, OAM, FEXX, IO and HRAM]. Trying to specify a source address in VRAM will cause garbage to be copied.\n\n // The four lower bits of this address will be ignored and treated as 0.\n\n pub source: u16,\n\n // These two registers specify the address within 0x8000-0x9FF0 to which the data will be copied. Only bits 12-4 are\n\n // respected; others are ignored. The four lower bits of this address will be ignored and treated as 0.\n\n pub destination: u16,\n\n active: bool,\n\n pub mode: HdmaMode,\n\n pub remain: u8,\n\n}\n\n\n\nimpl Default for Hdma {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "src/hdma.rs", "rank": 33, "score": 11.477803057043396 }, { "content": "impl InterruptFlag {\n\n pub fn request(&mut self, flag: InterruptKind) {\n\n self.data |= 1 << flag as u8;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn request_vblank() {\n\n let mut interrupt = InterruptFlag::default();\n\n interrupt.request(InterruptKind::VBlank);\n\n assert_eq!(interrupt.data, 1);\n\n }\n\n\n\n #[test]\n\n fn request_lcd() {\n\n let mut interrupt = InterruptFlag::default();\n", "file_path": "src/interrupt.rs", "rank": 34, "score": 11.399137536983455 }, { "content": "use crate::memory_device::ReadWrite;\n\n\n\n#[derive(Default)]\n\npub struct InputOutputRegisters {\n\n /// Mask that holds input comes from gameboy button: 0xFF00.\n\n /// Bit 0 - P10 Input Right or Button A (0=Pressed) (Read Only)\n\n /// Bit 1 - P11 Input Left or Button B (0=Pressed) (Read Only)\n\n /// Bit 2 - P12 Input Up or Select (0=Pressed) (Read Only)\n\n /// Bit 3 - P13 Input Down or Start (0=Pressed) (Read Only)\n\n /// Bit 4 - P14 Select Direction Keys (0=Select)\n\n /// Bit 5 - P15 Select Button Keys (0=Select)\n\n /// Bit 6 and 7 unused.\n\n /// TODO: protect write on read-only register.\n\n buttons: u8,\n\n}\n\n\n\nimpl ReadWrite for InputOutputRegisters {\n\n fn contains(&self, address: usize) -> bool {\n\n 0xFF00 == address\n\n }\n", "file_path": "src/input_output_registers.rs", "rank": 35, "score": 11.309818270629421 }, { "content": " AndA,\n\n\n\n // AND (HL)\n\n // A bitwise AND operation is performed between the byte at the memory\n\n // address specified in the virtual 16-bit register HL and the contents\n\n // of register A, and the result is stored in register A.\n\n // Clock cycles: 8\n\n AndHl,\n\n\n\n // AND n\n\n // A bitwise AND operation is performed between the byte n and the contents of register A, and the result is stored in register A.\n\n // Clock cycles: 8\n\n AndN,\n\n\n\n // DI\n\n // Interrupts are disabled by resetting the Interrupt Master Flag (IME).\n\n // Clock cycles: 4\n\n Di,\n\n\n\n // EI\n", "file_path": "src/opcodes.rs", "rank": 36, "score": 11.240269493903053 }, { "content": " 0,\n\n 0,\n\n 0,\n\n ];\n\n convert(&bits).unwrap()\n\n }\n\n\n\n pub(crate) fn from_u8(value: u8) -> CpuFlag {\n\n CpuFlag {\n\n carry: value & 0b00010000 == 16,\n\n half_carry: value & 0b00100000 == 32,\n\n negative: value & 0b01000000 == 64,\n\n zero: value & 0b10000000 == 128,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ConversionError {\n\n Overflow,\n\n NonBinaryInput,\n\n}\n\n\n", "file_path": "src/register.rs", "rank": 37, "score": 10.966745401181708 }, { "content": " pub fn af(&self) -> u16 {\n\n let ret = (self.a as u16) << 8;\n\n ret | self.flags.to_u8() as u16\n\n }\n\n\n\n pub fn set_af(&mut self, value: u16) {\n\n self.a = (value >> 8_u16) as u8;\n\n self.flags = CpuFlag::from_u8(value as u8);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{CpuFlag, Registers};\n\n\n\n #[test]\n\n fn verify_conversion_flag() {\n\n let flags = CpuFlag {\n\n carry: false,\n\n half_carry: false,\n", "file_path": "src/register.rs", "rank": 38, "score": 10.941638753963339 }, { "content": " // memory address which it now specifies is loaded into the high-order\n\n // byte of qq, and SP is incremented again. The register pair qq may be\n\n // any of BC, DE, HL or AF.\n\n // Clock cycles: 12\n\n PopBc,\n\n PopDe,\n\n PopHl,\n\n PopAf,\n\n\n\n // ADD A, n\n\n // Byte n is read as an integer and added to the contents of register A, and the result is stored in register A.\n\n // Clock cycles: 8\n\n AddaN,\n\n\n\n // ADD A, r:\n\n // The contents of register r are added to the contents of register A\n\n // (the Accumulator) and the result is stored in register A.\n\n // Clock cycles: 4\n\n AddaB,\n\n AddaC,\n", "file_path": "src/opcodes.rs", "rank": 39, "score": 10.49687917549198 }, { "content": "\n\n // LD A, (DE)\n\n // Load to the 8-bit A register, data from the absolute address specified by the 16-bit register DE.\n\n // Clock cycles: 8\n\n LdADe,\n\n\n\n // LD A, (nn)\n\n // Load to the 8-bit A register, data from the absolute address specified by the 16-bit operand nn.\n\n // Clock cycles: 16\n\n LdANn,\n\n\n\n // LD r, (HL)\n\n // Load to the 8-bit register r, data from the absolute address specified by the 16-bit register HL.\n\n // Duration 2 machine cycles\n\n LdHlN,\n\n\n\n // LD (BC), a\n\n // Load to the absolute address specified by the 16-bit register BC, data from the 8-bit A register.\n\n // Clock cycles: 8\n\n LdBcA,\n", "file_path": "src/opcodes.rs", "rank": 40, "score": 10.490442159573664 }, { "content": " // Clock cycles: 8\n\n XorHl,\n\n\n\n // XOR n\n\n // A bitwise XOR operation is performed between the byte n and the contents of register A,\n\n // and the result is stored in register A.\n\n // Clock cycles: 8\n\n XorN,\n\n\n\n // ADD HL, rr\n\n // The contents of the register pair rr are added to the contents of the\n\n // register pair HL and the result is stored in HL. Register pair rr may be any of BC, DE, HL or SP.\n\n // Clock cycles: 8\n\n AddHlBc,\n\n AddHlDe,\n\n AddHlHl,\n\n AddHlSp,\n\n\n\n // INC rr\n\n // The register pair rr is incremented by 1.\n", "file_path": "src/opcodes.rs", "rank": 41, "score": 10.361035587289322 }, { "content": "// FF0F - IF - Interrupt Flag (R/W)\n\n// Bit 0: V-Blank Interrupt Request (INT 40h) (1=Request)\n\n// Bit 1: LCD STAT Interrupt Request (INT 48h) (1=Request)\n\n// Bit 2: Timer Interrupt Request (INT 50h) (1=Request)\n\n// Bit 3: Serial Interrupt Request (INT 58h) (1=Request)\n\n// Bit 4: Joypad Interrupt Request (INT 60h) (1=Request)\n\n#[allow(unused)]\n\npub enum InterruptKind {\n\n VBlank = 0,\n\n LCDStat = 1,\n\n Timer = 2,\n\n Serial = 3,\n\n Joypad = 4,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct InterruptFlag {\n\n pub data: u8,\n\n}\n\n\n", "file_path": "src/interrupt.rs", "rank": 42, "score": 10.18528221140187 }, { "content": " // CALL f, nn\n\n // Only if the condition f is true is the current program counter (return\n\n // address) pushed to the stack, high-order byte first, and the 16-bit word\n\n // nn loaded into the program counter. Execution will them continue from\n\n // the program counter. Condition f may be any of nz, z, nc or c.\n\n // Clock cycles: 24 if condition is met, otherwise 12\n\n CallNzNn,\n\n CallZNn,\n\n CallNcNn,\n\n CallCNn,\n\n\n\n // CP n\n\n // The byte n is compared with (subtracted from) the register A, setting\n\n // the appropriate flags but not storing the result.\n\n // Clock cycles: 8\n\n CpN,\n\n\n\n // RRA\n\n // The contents of register A are rotated right by 1 bit position through the carry flag.\n\n // Clock cycles: 4\n", "file_path": "src/opcodes.rs", "rank": 43, "score": 9.975681719162777 }, { "content": "use crate::memory_device::ReadWrite;\n\n\n\n// BCPS/BGPI - CGB Mode Only - Background Palette Index\n\n// This register is used to address a byte in the CGBs Background Palette Memory.\n\n// Each two byte in that memory define a color value.\n\n// The first 8 bytes define Color 0-3 of Palette 0 (BGP0), and so on for BGP1-7.\n\n// Bit 0-5 Index (00-3F)\n\n// Bit 7 Auto Increment (0=Disabled, 1=Increment after Writing)\n\n// Data can be read/written to/from the specified index address through Register 0xFF69.\n\n// When the Auto Increment bit is set then the index is automatically incremented after each <write> to 0xFF69.\n\n// Auto Increment has no effect when <reading> from 0xFF69,\n\n// so the index must be manually incremented in that case.\n\n// Writing to 0xFF69 during rendering still causes auto-increment to occur.\n\n// Unlike the following, this register can be accessed outside V-Blank and H-Blank.\n\n#[derive(Default)]\n\npub(crate) struct BackgroundPaletteIndex {\n\n value: u8,\n\n auto_increment: bool,\n\n}\n\n\n", "file_path": "src/background_palette_index.rs", "rank": 44, "score": 9.845365428061385 }, { "content": " fn default() -> Self {\n\n Self::Black\n\n }\n\n}\n\n\n\nconst SCREEN_W: usize = 160;\n\nconst SCREEN_H: usize = 144;\n\n\n\n#[allow(dead_code)]\n\npub struct GraphicsProcessingUnit {\n\n // video ram: 0x8000-0x9FFF\n\n vram: [u8; 0x4000],\n\n bank: u8,\n\n\n\n // Digital image with mode RGB.\n\n data: [u8; SCREEN_W * SCREEN_H * 3],\n\n\n\n // The H-Blank DMA transfers 10h bytes of data during each H-Blank, ie. at LY=0-143,\n\n // no data is transferred during V-Blank (LY=144-153), but the transfer will then continue at LY=00.\n\n // The execution of the program is halted during the separate transfers, but the program execution continues\n", "file_path": "src/gpu.rs", "rank": 45, "score": 9.786459084164594 }, { "content": " }\n\n\n\n pub fn set_active(&mut self, status: bool) {\n\n self.active = status;\n\n }\n\n}\n\n\n\nimpl ReadWrite for Hdma {\n\n fn contains(&self, address: usize) -> bool {\n\n address == 0xFF51\n\n || address == 0xFF52\n\n || address == 0xFF53\n\n || address == 0xFF54\n\n || address == 0xFF55\n\n }\n\n\n\n fn read_byte(&self, address: usize) -> Result<u8, std::io::Error> {\n\n match address {\n\n 0xFF51 => Ok((self.source >> 8) as u8),\n\n 0xFF52 => Ok(self.source as u8),\n", "file_path": "src/hdma.rs", "rank": 46, "score": 9.753156310491491 }, { "content": " // A bitwise OR operation is performed between the byte n and the\n\n // contents of register A, and the result is stored in register A.\n\n // Clock cycles: 8\n\n OrN,\n\n\n\n // CP r\n\n // The contents of register R are compared with (subtracted from) the\n\n // register A, setting the appropriate flags but not storing the result.\n\n // Register r may be any of B, C, D, E, H, L or A.\n\n // Clock cycles: 4\n\n CpB,\n\n CpC,\n\n CpD,\n\n CpE,\n\n CpH,\n\n CpL,\n\n CpA,\n\n\n\n // CP (HL)\n\n // The byte at the memory address specified in the register HL is compared\n", "file_path": "src/opcodes.rs", "rank": 47, "score": 9.672846723562186 }, { "content": " SubN,\n\n\n\n // SBC A, r\n\n // The contents of the register r along with the value of the carry\n\n // flag are both subtracted from the register A, and the result is\n\n // stored in register A. Register r may be any of B, C, D, E, H, L or A.\n\n // Clock cycles: 4\n\n SbcAB,\n\n SbcAC,\n\n SbcAD,\n\n SbcAE,\n\n SbcAH,\n\n SbcAL,\n\n SbcAA,\n\n\n\n // SBC A, (HL)\n\n // The byte at the memory address specified in the virtual 16-bit\n\n // register HL and the value of the carry flag are both subtracted from\n\n // the register A, and the result is stored in register A.\n\n // Clock cycles: 8\n", "file_path": "src/opcodes.rs", "rank": 48, "score": 9.570177919540674 }, { "content": "pub enum PrefixOpCode {\n\n /// RLC r\n\n /// The contents of the register r are rotated left by 1 bit position, after the sign bit (7)\n\n /// is copied into the carry flag.\n\n /// Clock cycles: 8\n\n RlcB,\n\n RlcC,\n\n RlcD,\n\n RlcE,\n\n RlcH,\n\n RlcL,\n\n RlcA,\n\n\n\n /// SRL r\n\n /// The contents of the register r are shifted right by 1 bit position, after bit 0\n\n /// is copied into the carry flag. Register r may be any of B, C, D, E, H, L or A.\n\n /// Clock cycles: 8\n\n SrlB,\n\n SrlC,\n\n SrlD,\n", "file_path": "src/prefix_opcodes.rs", "rank": 49, "score": 9.560381297997628 }, { "content": " sound_output: u8,\n\n\n\n /// Channel volume control 0xFF24\n\n /// Bit 7 - Output Vin to SO2 terminal (1=Enable)\n\n /// Bit 6-4 - SO2 output level (volume) (0-7)\n\n /// Bit 3 - Output Vin to SO1 terminal (1=Enable)\n\n /// Bit 2-0 - SO1 output level (volume) (0-7)\n\n channel_control: u8,\n\n}\n\n\n\nimpl Sound {\n\n pub fn new() -> Sound {\n\n Sound {\n\n on: 0,\n\n sound_output: 0,\n\n channel_control: 0,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/sound.rs", "rank": 50, "score": 9.504257794308234 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::fs;\n\n\n\n use crate::cartridge_header::*;\n\n\n\n #[test]\n\n fn verify_len() {\n\n let data = fs::read_to_string(\"./testdata/tetris\")\n\n .expect(\"file not found!\")\n\n .split(',')\n\n .map(|n| n.parse().unwrap())\n\n .collect::<Vec<u8>>();\n\n assert_eq!(32768, data.len());\n\n let header = CartridgeHeader::new(&data);\n\n assert_eq!(header.is_err(), false);\n\n }\n\n\n", "file_path": "src/cartridge_header.rs", "rank": 51, "score": 9.47795768404513 }, { "content": " use crate::gpu::{Color, Palette};\n\n\n\n #[test]\n\n fn palette_from_u8() {\n\n let value = 0b00_01_10_11;\n\n assert_eq!(\n\n Palette::from(value),\n\n Palette {\n\n index_0: Color::Black,\n\n index_1: Color::DarkGray,\n\n index_2: Color::LightGray,\n\n index_3: Color::White,\n\n }\n\n );\n\n }\n\n\n\n #[test]\n\n fn u8_from_palette() {\n\n let palette = Palette {\n\n index_0: Color::Black,\n\n index_1: Color::DarkGray,\n\n index_2: Color::LightGray,\n\n index_3: Color::White,\n\n };\n\n assert_eq!(Into::<u8>::into(palette), 0b00_01_10_11);\n\n }\n\n}\n", "file_path": "src/gpu.rs", "rank": 52, "score": 9.34601882564992 }, { "content": " fn from(item: Speed) -> u32 {\n\n match item {\n\n Speed::Normal => 1,\n\n Speed::Double => 2,\n\n }\n\n }\n\n}\n\n\n\n// Holds all memory space addressable for emulation.\n\npub struct MemoryManagmentUnit {\n\n cartridge: Box<dyn ReadWrite>,\n\n gpu: GraphicsProcessingUnit,\n\n internal: InternalMemory,\n\n serial: SerialDataTransfer,\n\n timer: Timer,\n\n sound: Sound,\n\n\n\n // Bit 7: Current Speed (0=Normal, 1=Double) (Read Only)\n\n // Bit 0: Prepare Speed Switch (0=No, 1=Prepare) (Read/Write)\n\n speed: Speed,\n", "file_path": "src/mmu.rs", "rank": 53, "score": 9.162483151433394 }, { "content": " // Clock cycles: 4\n\n Rlca,\n\n\n\n // RLA\n\n // The contents of register A are rotated left by 1 bit position through the\n\n // carry flag.\n\n // Clock cycles: 4\n\n Rla,\n\n\n\n // RRCA\n\n // The contents of register A are rotated right by 1 bit position, after bit 0\n\n // is copied into the carry flag.\n\n // Clock cycles: 4\n\n Rrca,\n\n\n\n // RET\n\n // The 16-bit word on top of the stack is popped off, low-order byte first,\n\n // and loaded into the program counter, from where execution continues.\n\n // Clock cycles: 16\n\n Ret,\n", "file_path": "src/opcodes.rs", "rank": 54, "score": 8.805967397413827 }, { "content": " fn run_dma_hrampart(&mut self) {\n\n let mmu_src = self.hdma.source;\n\n for i in 0..0x10 {\n\n let b: u8 = self.read_byte((mmu_src + i) as usize).unwrap();\n\n self.gpu\n\n .write_byte((self.hdma.destination + i) as usize, b)\n\n .unwrap();\n\n }\n\n\n\n self.hdma.source += 0x10;\n\n self.hdma.destination += 0x10;\n\n self.hdma.update_remain_after_hrampart();\n\n }\n\n\n\n // Writing to this register launches a DMA transfer from ROM or RAM to OAM memory (sprite attribute table).\n\n // The written value specifies the transfer source address divided by 100h, ie. source & destination are:\n\n // Source: XX00-XX9F ;XX in range from 00-F1h\n\n // Destination: 0xFE00-0xFE9F\n\n // The transfer takes 160 machine cycles: 152 microseconds in normal speed or 76 microseconds in CGB Double Speed Mode.\n\n // On DMG, during this time, the CPU can access only HRAM (memory at FF80-FFFE); on CGB, the bus used by the source area cannot be used (this isn't understood well at the moment, it's recommended to assume same behavior as DMG). For this reason, the programmer must copy a short procedure into HRAM,\n", "file_path": "src/mmu.rs", "rank": 55, "score": 8.631512686448723 }, { "content": " AddaD,\n\n AddaE,\n\n AddaH,\n\n AddaL,\n\n AddaA,\n\n\n\n // ADD A,(HL)\n\n // Clock cycles: 8\n\n AddAHl,\n\n\n\n // ADD SP, s\n\n // The byte s is read as a signed integer and added to the register pair SP.\n\n // Clock cycles: 16\n\n AddSp,\n\n\n\n // ADC r\n\n // Clock cycles: 4\n\n AdcB,\n\n AdcC,\n\n AdcD,\n", "file_path": "src/opcodes.rs", "rank": 56, "score": 8.541780365602499 }, { "content": "use crate::memory_device::ReadWrite;\n\n\n\npub struct Sound {\n\n /// On/Off sound 0xFF26.\n\n /// Bit 7 - All sound on/off (0: stop all sound circuits) (Read/Write)\n\n /// Bit 3 - Sound 4 ON flag (Read Only)\n\n /// Bit 2 - Sound 3 ON flag (Read Only)\n\n /// Bit 1 - Sound 2 ON flag (Read Only)\n\n /// Bit 0 - Sound 1 ON flag (Read Only)\n\n on: u8, // TODO: create dedicated struct for better reading code.\n\n\n\n /// Each channel can be panned hard left, center, or hard right 0xFF25.\n\n /// Bit 7 - Output sound 4 to SO2 terminal\n\n /// Bit 6 - Output sound 3 to SO2 terminal\n\n /// Bit 5 - Output sound 2 to SO2 terminal\n\n /// Bit 4 - Output sound 1 to SO2 terminal\n\n /// Bit 3 - Output sound 4 to SO1 terminal\n\n /// Bit 2 - Output sound 3 to SO1 terminal\n\n /// Bit 1 - Output sound 2 to SO1 terminal\n\n /// Bit 0 - Output sound 1 to SO1 terminal\n", "file_path": "src/sound.rs", "rank": 57, "score": 8.5043030989589 }, { "content": " // Clock cycles: 4\n\n Cpl,\n\n\n\n // JP nn\n\n // The 16-bit word nn is loaded into the program counter, from where execution continues.\n\n // Clock cycles: 16\n\n JpNN,\n\n\n\n // JP HL\n\n // The contents of the register pair HL are loaded into the program\n\n // counter, from where execution continues.\n\n // Clock cycles: 4\n\n JpHl,\n\n\n\n // JR f, PC+dd\n\n // The 8-bit signed integer dd is added to the program counter and the result is stored in the program counter only if the condition f is true.\n\n // Execution will then continue from the program counter.\n\n // Condition f may be any of nz, z, nc or c.\n\n // Clock cycles: 12 if condition is met, otherwise 8\n\n JrNzPcDd,\n", "file_path": "src/opcodes.rs", "rank": 58, "score": 8.333885583184887 }, { "content": " // Clock cycles: 12\n\n IncHl,\n\n\n\n // DEC rr\n\n // The register pair rr is decremented by 1. Register pair rr may be any of BC, DE, HL or SP.\n\n // Clock cycles: 8\n\n DecBc,\n\n DecDe,\n\n DecHl,\n\n DecSp,\n\n\n\n // DEC r\n\n // The register r is decremented by 1.\n\n // Clock cycles: 4\n\n DecB,\n\n DecC,\n\n DecD,\n\n DecE,\n\n DecH,\n\n DecL,\n", "file_path": "src/opcodes.rs", "rank": 59, "score": 8.317285783744405 }, { "content": "use std::env;\n\n\n\nmod background_palette_index;\n\nmod cartridge;\n\nmod cartridge_header;\n\nmod clock;\n\nmod cpu;\n\nmod emulator;\n\nmod gpu;\n\nmod hdma;\n\nmod input_output_registers;\n\nmod internal_memory;\n\nmod interrupt;\n\nmod memory_device;\n\nmod mmu;\n\nmod opcodes;\n\nmod prefix_opcodes;\n\nmod register;\n\nmod serial_data_transfer;\n\nmod sound;\n\nmod timer;\n\n\n", "file_path": "src/main.rs", "rank": 60, "score": 8.272822795946837 }, { "content": " pub fn new(data: &[u8]) -> Result<Self, std::io::Error> {\n\n check_logo(data)?;\n\n valid_checksum(data)?;\n\n\n\n Ok(CartridgeHeader {\n\n // title: String::from_utf8(t).unwrap(),\n\n memory_bank_type: decode_memory_bank_type(data),\n\n ram_size: data[0x149].into(),\n\n // gameboy_color_support: data[0x149].into(),\n\n })\n\n }\n\n\n\n pub fn ram_in_bytes(&self) -> usize {\n\n match self.ram_size {\n\n RamSize::None => 0,\n\n RamSize::OneBankOf2Kb => 2 * 1024,\n\n RamSize::OneBankOf8Kb => 8 * 1024,\n\n RamSize::FourBankOf8Kb => 4 * (8 * 1024),\n\n }\n\n }\n", "file_path": "src/cartridge_header.rs", "rank": 61, "score": 8.23312365562757 }, { "content": " RrA,\n\n\n\n // CCF\n\n // The carry flag is inverted.\n\n // Clock cycles: 4\n\n Ccf,\n\n\n\n // SCF\n\n // The carry flag is set.\n\n // Clock cycles: 4\n\n Scf,\n\n\n\n // LD HL, SP+s\n\n // The byte s is read as a signed integer and added to the register pair SP.\n\n // The result is then loaded into the register pair HL.\n\n // Clock cycles: 12\n\n LdHlSps,\n\n\n\n // RLCA\n\n // The contents of register A are rotated left by 1 bit position, after the sign bit (7) is copied into the carry flag.\n", "file_path": "src/opcodes.rs", "rank": 62, "score": 8.224645924038711 }, { "content": " // Load to the absolute address specified by the 16-bit operand nn, data from the 16-bit SP register.\n\n // Clock cycles: 20\n\n LdNnSP,\n\n\n\n // LD A, (HL+)\n\n // Load to the 8-bit A register, data from the absolute address specified by the 16-bit register HL. The value of\n\n // HL is incremented after the memory read.\n\n // Clock cycles: 8\n\n LdiAHl,\n\n\n\n // LD (HL+), A\n\n // Load to the absolute address specified by the 16-bit register HL, data from the 8-bit A register. The value of\n\n // HL is incremented after the memory write.\n\n // Clock cycles: 8\n\n LdiHlA,\n\n\n\n // LD SP, HL\n\n // Load to the 16-bit SP register, data from the 16-bit HL register.\n\n // Clock cycles: 4\n\n LdSpHl,\n", "file_path": "src/opcodes.rs", "rank": 63, "score": 8.149922755697071 }, { "content": "}\n\n\n\n#[allow(dead_code)]\n\npub struct MBC1 {\n\n header: CartridgeHeader,\n\n\n\n // 0x0150-0x3FFF\n\n rom: Vec<u8>,\n\n ram: Vec<u8>,\n\n\n\n // 0x0000-0x1FFF: RAM Enable (write only lower 4 bits)\n\n // - 00: Disable RAM (default)\n\n // - 0A: Enable RAM\n\n ram_enable: bool,\n\n\n\n // 0x6000-0x7FFF: ROM/RAM Mode Select (write only)\n\n // Selects whether the above register should be used as the upper 2 bits\n\n // of the ROM Bank Number or as the RAM Bank Number.\n\n // - 00 = ROM Banking Mode (up to 8KB RAM, 2MB ROM) (default)\n\n // - 01 = RAM Banking Mode (up to 32KB RAM, 512KB ROM)\n", "file_path": "src/cartridge.rs", "rank": 64, "score": 8.089502646857449 }, { "content": " toggle_speed_request: bool,\n\n\n\n // I/O registers, like joypad.\n\n io_reg: InputOutputRegisters,\n\n\n\n hdma: Hdma,\n\n}\n\n\n\nimpl MemoryManagmentUnit {\n\n pub fn new(cartridge: Box<dyn ReadWrite>) -> MemoryManagmentUnit {\n\n MemoryManagmentUnit {\n\n cartridge,\n\n gpu: GraphicsProcessingUnit::new(),\n\n internal: InternalMemory::new(),\n\n serial: SerialDataTransfer::default(),\n\n timer: Timer::new(Rc::new(RefCell::new(InterruptFlag::default()))),\n\n sound: Sound::new(),\n\n speed: Speed::Normal,\n\n toggle_speed_request: false,\n\n io_reg: InputOutputRegisters::default(),\n", "file_path": "src/mmu.rs", "rank": 65, "score": 8.080885763127855 }, { "content": " SwapB,\n\n SwapC,\n\n SwapD,\n\n SwapE,\n\n SwapH,\n\n SwapL,\n\n SwapA,\n\n}\n\n\n\nimpl From<u8> for PrefixOpCode {\n\n fn from(orig: u8) -> Self {\n\n match orig {\n\n 0x00 => PrefixOpCode::RlcB,\n\n 0x01 => PrefixOpCode::RlcC,\n\n 0x02 => PrefixOpCode::RlcD,\n\n 0x03 => PrefixOpCode::RlcE,\n\n 0x04 => PrefixOpCode::RlcH,\n\n 0x05 => PrefixOpCode::RlcL,\n\n 0x07 => PrefixOpCode::RlcA,\n\n 0x18 => PrefixOpCode::RrB,\n", "file_path": "src/prefix_opcodes.rs", "rank": 66, "score": 8.06033141245995 }, { "content": " // Register r may be any of B, C, D, E, H, L, or A.\n\n // Clock cycles: 4\n\n SubB,\n\n SubC,\n\n SubD,\n\n SubE,\n\n SubH,\n\n SubL,\n\n SubA,\n\n\n\n // SUB (HL)\n\n // The byte at the memory address specified in the virtual 16-bit\n\n // register HL is subtracted from the register A and the result is\n\n // stored in register A.\n\n // Clock cycles: 8\n\n SubHl,\n\n\n\n // SUB n\n\n // Byte n is read as an integer and subtracted from the contents of register A, and the result is stored in register A.\n\n // Clock cycles: 8\n", "file_path": "src/opcodes.rs", "rank": 67, "score": 8.045422360456836 }, { "content": "\n\n // LD (DE), a\n\n // Load to the absolute address specified by the 16-bit register DE, data from the 8-bit A register.\n\n // Clock cycles: 8\n\n LdDeA,\n\n\n\n // LD (nn), A\n\n // Load to the absolute address specified by the 16-bit operand nn, data from the 8-bit A register.\n\n // Clock cycles: 16\n\n LdNnA,\n\n\n\n // LD rr, nn\n\n // Load to the 16-bit register rr, the immediate 16-bit data nn.\n\n // Clock cycles: 12\n\n LdBcNn,\n\n LdDeNn,\n\n LdHlNn,\n\n LdSpNn,\n\n\n\n // LD (HL-), A\n", "file_path": "src/opcodes.rs", "rank": 68, "score": 7.901929659193082 }, { "content": "use std::{fmt, num::Wrapping};\n\n\n\npub struct CartridgeHeader {\n\n // title: String,\n\n pub memory_bank_type: MemoryBankType,\n\n ram_size: RamSize,\n\n // gameboy_color_support: GameBoyColorFlag,\n\n}\n\n\n\n/// Specifies which Memory Bank Controller (if any) is used in\n\n/// the cartridge, and if further external hardware exists\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum MemoryBankType {\n\n NoMemoryBank,\n\n MBC1,\n\n MBC2,\n\n MMM01,\n\n MBC3,\n\n MBC4,\n\n MBC5,\n", "file_path": "src/cartridge_header.rs", "rank": 69, "score": 7.769961145213779 }, { "content": " // Clock cycles: 8\n\n IncBC,\n\n IncDE,\n\n IncHL,\n\n IncSP,\n\n\n\n // INC r\n\n // The register r is incremented by 1.\n\n // Clock cycles: 4\n\n IncB,\n\n IncC,\n\n IncD,\n\n IncE,\n\n IncH,\n\n IncL,\n\n IncA,\n\n\n\n // INC (HL)\n\n // The byte at the memory address specified in the register HL is incremented\n\n // by 1.\n", "file_path": "src/opcodes.rs", "rank": 70, "score": 7.7212251822854165 }, { "content": "use crate::memory_device::ReadWrite;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum HdmaMode {\n\n // When using this transfer method, all data is transferred at once. The execution of the program is halted until\n\n // the transfer has completed. Note that the General Purpose DMA blindly attempts to copy the data, even if the\n\n // CD controller is currently accessing VRAM. So General Purpose DMA should be used only if the Display is disabled,\n\n // or during V-Blank, or (for rather short blocks) during H-Blank. The execution of the program continues when the\n\n // transfer has been completed, and FF55 then contains a value of FFh.\n\n Gdma,\n\n // The H-Blank DMA transfers 10h bytes of data during each H-Blank, ie. at LY=0-143, no data is transferred during\n\n // V-Blank (LY=144-153), but the transfer will then continue at LY=00. The execution of the program is halted\n\n // during the separate transfers, but the program execution continues during the 'spaces' between each data block.\n\n // Note that the program should not change the Destination VRAM bank (FF4F), or the Source ROM/RAM bank (in case\n\n // data is transferred from bankable memory) until the transfer has completed! (The transfer should be paused as\n\n // described below while the banks are switched) Reading from Register FF55 returns the remaining length (divided\n\n // by 10h, minus 1), a value of 0FFh indicates that the transfer has completed. It is also possible to terminate\n\n // an active H-Blank transfer by writing zero to Bit 7 of FF55. In that case reading from FF55 will return how many\n\n // $10 \"blocks\" remained (minus 1) in the lower 7 bits, but Bit 7 will be read as \"1\". Stopping the transfer\n\n // doesn't set HDMA1-4 to $FF.\n", "file_path": "src/hdma.rs", "rank": 71, "score": 7.692739831973871 }, { "content": " // Interrupts are enabled by setting the Interrupt Master Flag (IME).\n\n // Clock cycles: 4\n\n Ei,\n\n\n\n // CALL nn\n\n // The current program counter (return address) is pushed to the stack, high-order byte first.\n\n // The 16-bit word nn is then loaded into the program counter, from where execution continues.\n\n // Clock cycles: 24\n\n CallNn,\n\n\n\n // RST n\n\n // The current program counter is pushed onto the stack, high-order byte\n\n // first.\n\n // The value of the operand n is then loaded into the program counter, from\n\n // where execution continues. Operand n may be any of 0x00, 0x08, 0x10, 0x18,\n\n // 0x20, 0x28, 0x30 or 0x38.\n\n // Clock cycles: 16\n\n Rst00,\n\n Rst08,\n\n Rst10,\n", "file_path": "src/opcodes.rs", "rank": 72, "score": 7.682692386783107 }, { "content": "\n\n // OR r\n\n // A bitwise OR operation is performed between the contents of the register r and the contents of the register A, and the result is stored in register A.\n\n // Clock cycles: 4\n\n OrB,\n\n OrC,\n\n OrD,\n\n OrE,\n\n OrH,\n\n OrL,\n\n OrA,\n\n\n\n // OR (HL)\n\n // A bitwise OR operation is performed between the byte at the memory\n\n // address specified in the virtual 16-bit register HL and the contents\n\n // of register A, and the result is stored in register A.\n\n // Clock cycles: 8\n\n OrHl,\n\n\n\n // OR n\n", "file_path": "src/opcodes.rs", "rank": 73, "score": 7.681066743953476 }, { "content": "\n\n // CB\n\n // Interpret the next byte as a prefix instruction (PrefixOpCode) rather than a normal instruction (OpCode)\n\n CB,\n\n}\n\n\n\nimpl From<u8> for OpCode {\n\n fn from(orig: u8) -> Self {\n\n match orig {\n\n 0x00 => OpCode::Noop,\n\n 0x01 => OpCode::LdBcNn,\n\n 0x02 => OpCode::LdBcA,\n\n 0x03 => OpCode::IncBC,\n\n 0x04 => OpCode::IncB,\n\n 0x05 => OpCode::DecB,\n\n 0x06 => OpCode::LdBNext,\n\n 0x07 => OpCode::Rlca,\n\n 0x08 => OpCode::LdNnSP,\n\n 0x09 => OpCode::AddHlBc,\n\n 0x0A => OpCode::LdABc,\n", "file_path": "src/opcodes.rs", "rank": 74, "score": 7.592386414953319 }, { "content": " source: 0,\n\n destination: 0x8000,\n\n active: false,\n\n mode: HdmaMode::Gdma,\n\n remain: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl Hdma {\n\n pub fn is_active(&self) -> bool {\n\n self.active\n\n }\n\n\n\n pub fn update_remain_after_hrampart(&mut self) {\n\n if self.remain == 0 {\n\n self.remain = 0x7F;\n\n } else {\n\n self.remain -= 1;\n\n }\n", "file_path": "src/hdma.rs", "rank": 75, "score": 7.560960602081037 }, { "content": " stack_pointer: 0xFFFE,\n\n }\n\n }\n\n\n\n pub fn get_register(&self, reg: &Register) -> u8 {\n\n match reg {\n\n Register::B => self.b,\n\n Register::C => self.c,\n\n Register::D => self.d,\n\n Register::E => self.e,\n\n Register::H => self.h,\n\n Register::L => self.l,\n\n Register::A => self.a,\n\n }\n\n }\n\n\n\n pub fn set_register(&mut self, reg: &Register, value: u8) {\n\n match reg {\n\n Register::B => self.b = value,\n\n Register::C => self.c = value,\n", "file_path": "src/register.rs", "rank": 76, "score": 7.5564591758240836 }, { "content": " RegisterWord::BC => self.set_bc(value),\n\n RegisterWord::DE => self.set_de(value),\n\n RegisterWord::HL => self.set_hl(value),\n\n RegisterWord::AF => self.set_af(value),\n\n RegisterWord::SP => self.stack_pointer = value,\n\n }\n\n }\n\n\n\n fn set(reg1: &mut u8, reg2: &mut u8, value: u16) {\n\n *reg1 = (value >> 8_u16) as u8;\n\n *reg2 = value as u8;\n\n }\n\n\n\n pub fn bc(&self) -> u16 {\n\n let ret = (self.b as u16) << 8;\n\n ret | self.c as u16\n\n }\n\n\n\n pub fn set_bc(&mut self, value: u16) {\n\n Registers::set(&mut self.b, &mut self.c, value);\n", "file_path": "src/register.rs", "rank": 77, "score": 7.504329734506648 }, { "content": " SbcAHl,\n\n\n\n // SBC A, n\n\n // Byte n is read as an integer and along with the value of the carry\n\n // flag, it is subtracted from register A, and the result is stored in\n\n // register A.\n\n // Clock cycles: 8\n\n SbcAn,\n\n\n\n // AND r\n\n // A bitwise AND operation is performed between the contents of the\n\n // register r and the contents of the register A, and the result is\n\n // stored in register A. Register r may be any of B, C, D, E, H, L, or A.\n\n // Clock cycles: 4\n\n AndB,\n\n AndC,\n\n AndD,\n\n AndE,\n\n AndH,\n\n AndL,\n", "file_path": "src/opcodes.rs", "rank": 78, "score": 7.448623010657647 }, { "content": " /// Current status of LCD displsy: 0xFF41\n\n /// The LCD controller operates on a 222 Hz = 4.194 MHz dot clock.\n\n /// An entire frame is 154 scanlines, 70224 dots, or 16.74 ms.\n\n /// On scanlines 0 through 143, the LCD controller cycles through modes 2, 3, and 0 once every 456 dots.\n\n /// Scanlines 144 through 153 are mode 1.\n\n /// Bit 1-0 - Mode Flag (Mode 0-3, see below) (Read Only)\n\n /// 0: During H-Blank\n\n /// 1: During V-Blank\n\n /// 2: During Searching OAM\n\n /// 3: During Transferring Data to LCD Driver\n\n /// Bit 2 - Coincidence Flag (0:LYC<>LY, 1:LYC=LY) (Read Only)\n\n /// Bit 3 - Mode 0 H-Blank Interrupt (1=Enable) (Read/Write)\n\n /// Bit 4 - Mode 1 V-Blank Interrupt (1=Enable) (Read/Write)\n\n /// Bit 5 - Mode 2 OAM Interrupt (1=Enable) (Read/Write)\n\n /// Bit 6 - LYC=LY Coincidence Interrupt (1=Enable) (Read/Write)\n\n status: u8,\n\n\n\n // Scroll Y (R/W), Scroll X (R/W)\n\n // Specifies the position in the 256x256 pixels BG map (32x32 tiles) which is to be displayed at the upper/left LCD\n\n // display position. Values in range from 0-255 may be used for X/Y each, the video controller automatically wraps\n", "file_path": "src/gpu.rs", "rank": 79, "score": 7.2937465751241986 }, { "content": " // Load to the absolute address specified by the 16-bit register HL, data from the 8-bit A register. The value of\n\n // HL is decremented after the memory write.\n\n // Clock cycles: 8\n\n LddHlA,\n\n\n\n // LDH A, (n)\n\n // Load to the 8-bit A register, data from the address specified by the 8-bit immediate data n. The full 16-bit\n\n // absolute address is obtained by setting the most significant byte to 0xFF and the least significant byte to the\n\n // value of n, so the possible range is 0xFF00-0xFFFF.\n\n // Clock cycles: 12\n\n LdHAn,\n\n\n\n // LDH (n), A\n\n // Load to the address specified by the 8-bit immediate data n, data from the 8-bit A register. The full 16-bit\n\n // absolute address is obtained by setting the most significant byte to 0xFF and the least significant byte to the\n\n // value of n, so the possible range is 0xFF00-0xFFFF.\n\n // Clock cycles: 12\n\n LdHnA,\n\n\n\n // LDH A, (C)\n", "file_path": "src/opcodes.rs", "rank": 80, "score": 7.278795289605782 }, { "content": "use std::cmp::PartialEq;\n\nuse std::ops::BitXor;\n\nuse std::ops::Shl;\n\n\n\n// Description of register of GB.\n\n// -------------\n\n// | A Flags | ---> Program Status Word\n\n// | B C | ---> B\n\n// | D E | ---> D\n\n// | H L | ---> H\n\n// | SP | ---> Stack Pointer\n\n// | PC | ---> Program Counter\n\n// -------------\n\n\n\n#[derive(Debug)]\n\npub enum Register {\n\n A,\n\n B,\n\n C,\n\n D,\n", "file_path": "src/register.rs", "rank": 81, "score": 7.218935558557124 }, { "content": " // Load to the 8-bit A register, data from the address specified by the 8-bit C register. The full 16-bit absolute\n\n // address is obtained by setting the most significant byte to 0xFF and the least significant byte to the value of C,\n\n // so the possible range is 0xFF00-0xFFFF.\n\n // Clock cycles: 8\n\n LdHAC,\n\n\n\n // LDH (C), A\n\n // Load to the address specified by the 8-bit C register, data from the 8-bit A register. The full 16-bit absolute\n\n // address is obtained by setting the most significant byte to 0xFF and the least significant byte to the value of C,\n\n // so the possible range is 0xFF00-0xFFFF.\n\n // Clock cycles: 8\n\n LdHCA,\n\n\n\n // LD A, (HL-)\n\n // Load to the 8-bit A register, data from the absolute address specified by the 16-bit register HL. The value of\n\n // HL is decremented after the memory read.\n\n // Clock cycles: 8\n\n LddAHl,\n\n\n\n // LD (nn), SP\n", "file_path": "src/opcodes.rs", "rank": 82, "score": 7.1611458592611035 }, { "content": "use crate::{background_palette_index::BackgroundPaletteIndex, memory_device::ReadWrite};\n\n\n\n#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]\n", "file_path": "src/gpu.rs", "rank": 83, "score": 7.131147325186564 }, { "content": " SrlE,\n\n SrlH,\n\n SrlL,\n\n SrlA,\n\n\n\n /// RR r\n\n /// The contents of the register r are rotated right by 1 bit position through the carry flag.\n\n /// Register r may be any of B, C, D, E, H, L or A.\n\n /// Clock cycles: 8\n\n RrB,\n\n RrC,\n\n RrD,\n\n RrE,\n\n RrH,\n\n RrL,\n\n RrA,\n\n\n\n /// SWAP r\n\n /// The upper and lower nibbles of the register r are swapped. Register r may be any of B, C, D, E, H, L or A.\n\n /// Clock cycles: 8\n", "file_path": "src/prefix_opcodes.rs", "rank": 84, "score": 7.057094262049697 }, { "content": " AdcE,\n\n AdcH,\n\n AdcL,\n\n AdcA,\n\n\n\n // ADC A, (HL)\n\n // The byte at the memory address specified in the virtual 16-bit\n\n // register HL along with the value of the carry flag are added to the\n\n // register A and the result is stored in register A.\n\n // Clock cycles: 8\n\n AdcAHl,\n\n\n\n // ADC A, n\n\n // Byte n is read as an integer and added to the contents of register\n\n // A along with the value of the carry flag. The result is then stored in register A.\n\n // Clock cycles: 8\n\n AdcAn,\n\n\n\n // SUB r\n\n // The contents of the register r are subtracted from the contents of register A, and the result is stored in register A.\n", "file_path": "src/opcodes.rs", "rank": 85, "score": 7.023532052432234 }, { "content": " //\n\n // Flags affected:\n\n // Z - Set if result is zero. (Set if A = n.)\n\n // N - Set.\n\n // H - Set if no borrow from bit 4.\n\n // C - Set for no borrow. (Set if A < n.)\n\n fn alu_cp(&mut self, n: u8) {\n\n let result = self.registers.a;\n\n self.alu_sub(n);\n\n self.registers.a = result;\n\n }\n\n\n\n fn cp_r(&mut self, reg: Register) -> u8 {\n\n let v = self.registers.get_register(&reg);\n\n self.alu_cp(v);\n\n\n\n 4\n\n }\n\n\n\n fn cp_hl(&mut self) -> u8 {\n", "file_path": "src/cpu.rs", "rank": 86, "score": 6.992473791001086 }, { "content": " // with (subtracted from) the register A, setting the appropriate flags but\n\n // not storing the result.\n\n // Clock cycles: 8\n\n CpHl,\n\n\n\n // XOR r\n\n // A bitwise XOR operation is performed between the contents of the register r and the contents of the register A, and the result is\n\n // stored in register A.\n\n // Clock cycles: 4\n\n XorB,\n\n XorC,\n\n XorD,\n\n XorE,\n\n XorH,\n\n XorL,\n\n XorA,\n\n\n\n // XOR (HL)\n\n // A bitwise XOR operation is performed between the byte at the memory address specified in the virtual 16-bit register HL and the contents\n\n // of register A, and the result is stored in register A.\n", "file_path": "src/opcodes.rs", "rank": 87, "score": 6.990287562878889 }, { "content": "\n\n // RET f\n\n // Only if the condition f is true is the 16-bit word on top of the stack\n\n // popped off and loaded into the program counter. Execution will then\n\n // continue from the program counter.\n\n // Clock cycles: 20 if condition is met, otherwise 8\n\n RetNz,\n\n RetZ,\n\n RetNc,\n\n RetC,\n\n\n\n // RETI\n\n // The 16-bit word on top of the stack is popped off, low-order byte first,\n\n // and loaded into the program counter. Interrupts are then enabled by setting\n\n // the interrupt master flag (IME), and execution then continues from the\n\n // program counter.\n\n // Clock cycles: 16\n\n RetI,\n\n\n\n // PUSH rr\n", "file_path": "src/opcodes.rs", "rank": 88, "score": 6.9573563030499335 }, { "content": " LdEHL,\n\n LdHHL,\n\n LdLHL,\n\n LdAHL,\n\n\n\n // LD (HL), r\n\n // Load to the absolute address specified by the 16-bit register HL, data from the 8-bit register r.\n\n // Clock cycles: 8\n\n LdHlB,\n\n LdHlC,\n\n LdHlD,\n\n LdHlE,\n\n LdHlH,\n\n LdHlL,\n\n LdHlA,\n\n\n\n // LD A, (BC)\n\n // Load to the 8-bit A register, data from the absolute address specified by the 16-bit register BC.\n\n // Clock cycles: 8\n\n LdABc,\n", "file_path": "src/opcodes.rs", "rank": 89, "score": 6.9573563030499335 }, { "content": " // N - Reset.\n\n // H - Reset.\n\n // C - Reset.\n\n fn alu_or(&mut self, n: u8) {\n\n let result = self.registers.a | n;\n\n self.registers.flags.carry = false;\n\n self.registers.flags.half_carry = false;\n\n self.registers.flags.negative = false;\n\n self.registers.flags.zero = result == 0x00;\n\n self.registers.a = result;\n\n }\n\n\n\n fn or_r(&mut self, reg: Register) -> u8 {\n\n let v = self.registers.get_register(&reg);\n\n self.alu_or(v);\n\n\n\n 4\n\n }\n\n\n\n fn or_hl(&mut self) -> u8 {\n", "file_path": "src/cpu.rs", "rank": 90, "score": 6.954576012349299 }, { "content": " romram_mode: bool,\n\n bank: u8,\n\n}\n\n\n\nimpl MBC1 {\n\n fn new(rom: Vec<u8>, header: CartridgeHeader) -> MBC1 {\n\n let ram_size = header.ram_in_bytes();\n\n MBC1 {\n\n header,\n\n rom,\n\n ram: Vec::with_capacity(ram_size),\n\n ram_enable: false,\n\n romram_mode: false,\n\n bank: 0x01,\n\n }\n\n }\n\n\n\n fn rom_bank(&self) -> u8 {\n\n if self.romram_mode {\n\n self.bank & 0x1F\n", "file_path": "src/cartridge.rs", "rank": 91, "score": 6.931246686789175 }, { "content": " // Flags affected:\n\n // Z - Set if result is zero.\n\n // N - Reset.\n\n // H - Reset.\n\n // C - Contains old bit 7 data.\n\n fn alu_rlc(&mut self, a: u8) -> u8 {\n\n let c = (a & 0x80) >> 7 == 0x01;\n\n let result = (a << 1) | u8::from(c);\n\n self.registers.flags.carry = c;\n\n self.registers.flags.half_carry = false;\n\n self.registers.flags.negative = false;\n\n self.registers.flags.zero = result == 0x00;\n\n\n\n result\n\n }\n\n\n\n fn rlca(&mut self) -> u8 {\n\n self.registers.a = self.alu_rlc(self.registers.a);\n\n self.registers.flags.zero = false;\n\n\n", "file_path": "src/cpu.rs", "rank": 92, "score": 6.877403027933055 }, { "content": "\n\n fn scf(&mut self) -> u8 {\n\n self.registers.flags.carry = true;\n\n self.registers.flags.half_carry = false;\n\n self.registers.flags.negative = false;\n\n\n\n 4\n\n }\n\n\n\n // Rotate A right through Carry flag.\n\n //\n\n // Flags affected:\n\n // Z - Set if result is zero.\n\n // N - Reset.\n\n // H - Reset.\n\n // C - Contains old bit 0 data.\n\n fn alu_rr(&mut self, a: u8) -> u8 {\n\n let c = a & 0x01 == 0x01;\n\n let result = if self.registers.flags.carry {\n\n 0x80 | (a >> 1)\n", "file_path": "src/cpu.rs", "rank": 93, "score": 6.807489840912269 }, { "content": "\n\n fn rla(&mut self) -> u8 {\n\n self.registers.a = self.alu_rl(self.registers.a);\n\n self.registers.flags.zero = false;\n\n\n\n 4\n\n }\n\n\n\n // Rotate A right. Old bit 0 to Carry flag.\n\n //\n\n // Flags affected:\n\n // Z - Set if result is zero.\n\n // N - Reset.\n\n // H - Reset.\n\n // C - Contains old bit 0 data\n\n fn alu_rrc(&mut self, a: u8) -> u8 {\n\n let c = a & 0x01 == 0x01;\n\n let result = if c { 0x80 | (a >> 1) } else { a >> 1 };\n\n self.registers.flags.carry = c;\n\n self.registers.flags.half_carry = false;\n", "file_path": "src/cpu.rs", "rank": 94, "score": 6.7365282973617715 }, { "content": " 4\n\n }\n\n\n\n // Rotate A left through Carry flag.\n\n //\n\n // Flags affected:\n\n // Z - Set if result is zero.\n\n // N - Reset.\n\n // H - Reset.\n\n // C - Contains old bit 7 data.\n\n fn alu_rl(&mut self, a: u8) -> u8 {\n\n let c = (a & 0x80) >> 7 == 0x01;\n\n let result = (a << 1) + u8::from(self.registers.flags.carry);\n\n self.registers.flags.carry = c;\n\n self.registers.flags.half_carry = false;\n\n self.registers.flags.negative = false;\n\n self.registers.flags.zero = result == 0x00;\n\n\n\n result\n\n }\n", "file_path": "src/cpu.rs", "rank": 95, "score": 6.7365282973617715 }, { "content": " JrZPcDd,\n\n JrNcPcDd,\n\n JrCPcDd,\n\n\n\n // JR PC+dd\n\n // The 8-bit signed integer dd is added to the program counter and the\n\n // result is stored in the program counter, from where execution continues.\n\n // Clock cycles: 12\n\n JrPcDd,\n\n\n\n // JP f, nn\n\n // The 16-bit word nn is loaded into the program counter only if the\n\n // condition f is true. Execution will then continue from the program\n\n // counter. Condition f may be any of nz, z, nc or c.\n\n // Clock cycles: 16 if condition is met, otherwise 12\n\n JpNzNn,\n\n JpZNn,\n\n JpNcNn,\n\n JpCNn,\n\n\n", "file_path": "src/opcodes.rs", "rank": 96, "score": 6.614581897173261 }, { "content": "\n\nimpl InternalMemory {\n\n pub fn new() -> InternalMemory {\n\n InternalMemory {\n\n wram: [0; 0x8000],\n\n wram_bank: 1,\n\n hram: [0; 0x007F],\n\n interrupt_flag: 0,\n\n interrupt_enable: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl ReadWrite for InternalMemory {\n\n fn contains(&self, address: usize) -> bool {\n\n (0xC000..=0xCFFF).contains(&address)\n\n || (0xD000..=0xDFFF).contains(&address)\n\n || (0xE000..=0xEFFF).contains(&address)\n\n || (0xF000..=0xFDFF).contains(&address)\n\n || (0xFF80..=0xFFFE).contains(&address)\n", "file_path": "src/internal_memory.rs", "rank": 97, "score": 6.525938271950399 }, { "content": " // Add n to current address and jump to it.\n\n // n = one byte signed immediate value\n\n fn alu_jr(&mut self, n: u8) {\n\n let n = n as i8;\n\n self.registers.program_counter =\n\n ((u32::from(self.registers.program_counter) as i32) + i32::from(n)) as u16;\n\n }\n\n\n\n fn jr_pc_dd(&mut self) -> u8 {\n\n let v = self.fetch_byte();\n\n self.alu_jr(v);\n\n\n\n 12\n\n }\n\n\n\n fn jp_f_nn(&mut self, op: ConditionOperand) -> u8 {\n\n let nn = self.fetch_word();\n\n let condition = match op {\n\n ConditionOperand::NZ => !self.registers.flags.zero,\n\n ConditionOperand::Z => self.registers.flags.zero,\n", "file_path": "src/cpu.rs", "rank": 98, "score": 6.482702054203067 }, { "content": " DecA,\n\n\n\n // DEC (HL)\n\n // The byte at the memory address specified in the register HL is decremented by 1.\n\n // Clock cycles: 12\n\n DecHlSpecific,\n\n\n\n // DAA\n\n // The results of the previous operation as stored in the Accumulator and flags\n\n // are retroactively adjusted to become a BCD (binary coded decimal) operation,\n\n // where the lower and upper nibbles of the bytes in the operation are treated as\n\n // two individual decimal digits, rather than the whole byte as one binary number.\n\n // It does this by adding or subtracting 6 from the Accumulator's lower nibble,\n\n // upper nibble or both, based on whether the last operation was a subtraction\n\n // (n flag), and whether a carry and/or half carry occurred (c and h flags).\n\n // Clock cycles: 4\n\n Daa,\n\n\n\n // CPL\n\n // The contents of register A are inverted (one's complement).\n", "file_path": "src/opcodes.rs", "rank": 99, "score": 6.441477460784721 } ]
Rust
src/worker.rs
dginev/rust-cortex-peripherals
c1eb9eb489e45ae80857cc3b00b20fe9bee6c258
use std::borrow::Cow; use std::error::Error; use std::fs::File; use std::io::{Read, Seek, SeekFrom, Write}; use std::ops::Deref; use std::path::Path; use std::thread; use std::time::Duration; use std::ffi::OsString; use tempdir::TempDir; use zmq::{Context, Message, Socket, SNDMORE}; pub trait Worker: Clone + Send { fn convert(&self, _: &Path) -> Result<File, Box<dyn Error>>; fn message_size(&self) -> usize; fn get_service(&self) -> &str; fn get_source_address(&self) -> Cow<str>; fn get_sink_address(&self) -> Cow<str>; fn pool_size(&self) -> usize { 1 } fn set_identity(&mut self, _identity: String) { unimplemented!() } fn get_identity(&self) -> &str { unimplemented!() } fn start(&mut self, limit: Option<usize>) -> Result<(), Box<dyn Error>> where Self: 'static + Sized, { let hostname = hostname::get().unwrap_or_else(|_| OsString::from("hostname")).into_string().unwrap(); match self.pool_size() { 1 => { self.set_identity(format!("{}:engrafo:1", hostname)); self.start_single(limit) } n => { let mut threads = Vec::new(); for thread in 1..=n { let thread_str = if thread < 10 { format!("0{}", thread) } else { thread.to_string() }; let identity_single = format!("{}:engrafo:{}", hostname, thread_str); let mut thread_self: Self = self.clone(); thread_self.set_identity(identity_single); threads.push(thread::spawn(move || { thread_self.start_single(limit).unwrap(); })); } for t in threads { t.join().unwrap(); } Ok(()) } } } fn start_single(&self, limit: Option<usize>) -> Result<(), Box<dyn Error>> { let mut work_counter = 0; let context_source = Context::new(); let source = context_source.socket(zmq::DEALER).unwrap(); source.set_identity(self.get_identity().as_bytes()).unwrap(); assert!(source.connect(&self.get_source_address()).is_ok()); let context_sink = Context::new(); let sink = context_sink.socket(zmq::PUSH).unwrap(); assert!(sink.connect(&self.get_sink_address()).is_ok()); loop { let input_tmpdir = TempDir::new("cortex_task").unwrap(); let (file_result, input_filepath, input_size, taskid) = self.receive_from_cortex(&input_tmpdir, &source); let converted_result = if file_result.is_ok() { self.convert(Path::new(&input_filepath)) } else { file_result }; self.respond_to_cortex(converted_result, input_size, &taskid, &sink); input_tmpdir.close().unwrap(); work_counter += 1; if let Some(upper_bound) = limit { if work_counter >= upper_bound { thread::sleep(Duration::new(1, 0)); break; } } } Ok(()) } fn receive_from_cortex( &self, input_tmpdir: &TempDir, source: &Socket, ) -> (Result<File, Box<dyn Error>>, String, usize, String) { let mut taskid_msg = Message::new(); let mut recv_msg = Message::new(); source.send(&self.get_service(), 0).unwrap(); source.recv(&mut taskid_msg, 0).unwrap(); let taskid = taskid_msg.as_str().unwrap(); let input_filepath = input_tmpdir.path().to_str().unwrap().to_string() + "/" + taskid + ".zip"; let mut file = File::create(input_filepath.clone()).unwrap(); let mut input_size = 0; loop { source.recv(&mut recv_msg, 0).unwrap(); if let Ok(written) = file.write(recv_msg.deref()) { input_size += written; } if !source.get_rcvmore().unwrap() { break; } } let file_result = if input_size > 0 { file.seek(SeekFrom::Start(0)).unwrap(); Ok(file) } else { Err(From::from("Input was empty.")) }; info!( target: &format!("{}:received", self.get_identity()), "task {}, read {} bytes from CorTeX.", taskid, input_size ); (file_result, input_filepath, input_size, taskid.to_string()) } fn respond_to_cortex( &self, file_result: Result<File, Box<dyn Error>>, input_size: usize, taskid: &str, sink: &Socket, ) { sink.send(self.get_identity(), SNDMORE).unwrap(); sink.send(self.get_service(), SNDMORE).unwrap(); sink.send(taskid, SNDMORE).unwrap(); match file_result { Ok(mut converted_file) => { let mut total_size = 0; loop { let message_size = self.message_size(); let mut data = vec![0; message_size]; let size = converted_file.read(&mut data).unwrap(); total_size += size; data.truncate(size); if size < message_size { sink.send(&data, 0).unwrap(); break; } else { sink.send(&data, SNDMORE).unwrap(); } } info!( target: &format!("{}:completed", self.get_identity()), " task {}, sent {} bytes back to CorTeX.", taskid, total_size ); } Err(e) => { sink.send(&Vec::new(), 0).unwrap(); if input_size == 0 { info!( target: &format!("{}:result", self.get_identity()), "Empty input. Throttling for a minute." ); } else { info!( target: &format!("{}:result", self.get_identity()), "Conversion came back empty: {:?}. Throttling for a minute.", e ); } thread::sleep(Duration::new(60, 0)); } } } } mod echo; pub use echo::EchoWorker; mod tex_to_html; pub use tex_to_html::TexToHtmlWorker; #[cfg(feature = "engrafo")] mod engrafo; #[cfg(feature = "engrafo")] pub use engrafo::EngrafoWorker;
use std::borrow::Cow; use std::error::Error; use std::fs::File; use std::io::{Read, Seek, SeekFrom, Write}; use std::ops::Deref; use std::path::Path; use std::thread; use std::time::Duration; use std::ffi::OsString; use tempdir::TempDir; use zmq::{Context, Message, Socket, SNDMORE}; pub trait Worker: Clone + Send { fn convert(&self, _: &Path) -> Result<File, Box<dyn Error>>; fn message_size(&self) -> usize; fn get_service(&self) -> &str; fn get_source_address(&self) -> Cow<str>; fn get_sink_address(&self) -> Cow<str>; fn pool_size(&self) -> usize { 1 } fn set_identity(&mut self, _identity: String) { unimplemented!() } fn get_identity(&self) -> &str { unimplemented!() } fn start(&mut self, limit: Option<usize>) -> Result<(), Box<dyn Error>> where Self: 'static + Sized, { let hostname = hostname::get().unwrap_or_else(|_| OsString::from("hostname")).into_string().unwrap(); match self.pool_size() { 1 => { self.set_identity(format!("{}:engrafo:1", hostname)); self.start_single(limit) } n => { let mut threads = Vec::new(); for thread in 1..=n { let thread_str = if thread < 10 { format!("0{}", thread) } else { thread.to_string() }; let identity_single = format!("{}:engrafo:{}", hostname, thread_str); let mut thread_self: Self = self.clone(); thread_self.set_identity(identity_single); threads.push(thread::spawn(move || { thread_self.start_single(limit).unwrap(); })); } for t in threads { t.join().unwrap(); } Ok(()) } } } fn start_single(&self, limit: Option<usize>) -> Result<(), Box<dyn Error>> { let mut work_counter = 0; let context_source = Context::new(); let source = context_source.socket(zmq::DEALER).unwrap(); source.set_identity(self.get_identity().as_bytes()).unwrap(); assert!(source.connect(&self.get_source_address()).is_ok()); let context_sink = Context::new(); let sink = context_sink.socket(zmq::PUSH).unwrap(); assert!(sink.connect(&self.get_sink_address()).is_ok());
fn receive_from_cortex( &self, input_tmpdir: &TempDir, source: &Socket, ) -> (Result<File, Box<dyn Error>>, String, usize, String) { let mut taskid_msg = Message::new(); let mut recv_msg = Message::new(); source.send(&self.get_service(), 0).unwrap(); source.recv(&mut taskid_msg, 0).unwrap(); let taskid = taskid_msg.as_str().unwrap(); let input_filepath = input_tmpdir.path().to_str().unwrap().to_string() + "/" + taskid + ".zip"; let mut file = File::create(input_filepath.clone()).unwrap(); let mut input_size = 0; loop { source.recv(&mut recv_msg, 0).unwrap(); if let Ok(written) = file.write(recv_msg.deref()) { input_size += written; } if !source.get_rcvmore().unwrap() { break; } } let file_result = if input_size > 0 { file.seek(SeekFrom::Start(0)).unwrap(); Ok(file) } else { Err(From::from("Input was empty.")) }; info!( target: &format!("{}:received", self.get_identity()), "task {}, read {} bytes from CorTeX.", taskid, input_size ); (file_result, input_filepath, input_size, taskid.to_string()) } fn respond_to_cortex( &self, file_result: Result<File, Box<dyn Error>>, input_size: usize, taskid: &str, sink: &Socket, ) { sink.send(self.get_identity(), SNDMORE).unwrap(); sink.send(self.get_service(), SNDMORE).unwrap(); sink.send(taskid, SNDMORE).unwrap(); match file_result { Ok(mut converted_file) => { let mut total_size = 0; loop { let message_size = self.message_size(); let mut data = vec![0; message_size]; let size = converted_file.read(&mut data).unwrap(); total_size += size; data.truncate(size); if size < message_size { sink.send(&data, 0).unwrap(); break; } else { sink.send(&data, SNDMORE).unwrap(); } } info!( target: &format!("{}:completed", self.get_identity()), " task {}, sent {} bytes back to CorTeX.", taskid, total_size ); } Err(e) => { sink.send(&Vec::new(), 0).unwrap(); if input_size == 0 { info!( target: &format!("{}:result", self.get_identity()), "Empty input. Throttling for a minute." ); } else { info!( target: &format!("{}:result", self.get_identity()), "Conversion came back empty: {:?}. Throttling for a minute.", e ); } thread::sleep(Duration::new(60, 0)); } } } } mod echo; pub use echo::EchoWorker; mod tex_to_html; pub use tex_to_html::TexToHtmlWorker; #[cfg(feature = "engrafo")] mod engrafo; #[cfg(feature = "engrafo")] pub use engrafo::EngrafoWorker;
loop { let input_tmpdir = TempDir::new("cortex_task").unwrap(); let (file_result, input_filepath, input_size, taskid) = self.receive_from_cortex(&input_tmpdir, &source); let converted_result = if file_result.is_ok() { self.convert(Path::new(&input_filepath)) } else { file_result }; self.respond_to_cortex(converted_result, input_size, &taskid, &sink); input_tmpdir.close().unwrap(); work_counter += 1; if let Some(upper_bound) = limit { if work_counter >= upper_bound { thread::sleep(Duration::new(1, 0)); break; } } } Ok(()) }
function_block-function_prefix_line
[ { "content": "/// Transform the ZIP provided by cortex into a TempDir,\n\n/// for e.g. tools such as Engrafo that aren't ZIP-capable\n\npub fn extract_zip_to_tmpdir(path: &Path, tmpdir_prefix: &str) -> Result<TempDir, Box<dyn Error>> {\n\n let input_tmpdir = TempDir::new(tmpdir_prefix)?;\n\n let unpacked_dir_path = input_tmpdir.path().to_str().unwrap().to_string() + \"/\";\n\n\n\n // unpack the Zip file for engrafo\n\n let inputzip = File::open(&path)?;\n\n let mut input_archive = ZipArchive::new(inputzip)?;\n\n for i in 0..input_archive.len() {\n\n let mut file = input_archive.by_index(i)?;\n\n let extract_path = file.mangled_name();\n\n let extract_pathname = extract_path.as_path().display();\n\n let full_pathname = format!(\"{}{}\", unpacked_dir_path, extract_pathname);\n\n if (&*file.name()).ends_with('/') {\n\n create_dir_all(&full_pathname)?;\n\n } else {\n\n if let Some(p) = extract_path.parent() {\n\n if !p.exists() {\n\n let absolute_parent = format!(\"{}{}\", unpacked_dir_path, p.display());\n\n create_dir_all(&absolute_parent)?;\n\n }\n\n }\n\n let mut extracted_file = File::create(&full_pathname)?;\n\n copy(&mut file, &mut extracted_file)?;\n\n }\n\n }\n\n Ok(input_tmpdir)\n\n}\n\n\n", "file_path": "src/adaptor.rs", "rank": 1, "score": 116319.06141921555 }, { "content": "fn archive_directory(src_dir: &str) -> Result<File, Box<dyn Error>> {\n\n let method = METHOD_DEFLATED;\n\n\n\n let mut file = tempfile()?;\n\n\n\n let walkdir = WalkDir::new(src_dir.to_string());\n\n let it = walkdir.into_iter();\n\n\n\n zip_one_dir(&mut it.filter_map(Result::ok), src_dir, &mut file, method)?;\n\n\n\n file.seek(SeekFrom::Start(0))?;\n\n Ok(file)\n\n}\n\n\n", "file_path": "src/adaptor.rs", "rank": 2, "score": 83406.55936905215 }, { "content": "/// Initialize the logger with an appropriate level of verbosity\n\npub fn init(level: LevelFilter) -> Result<(), SetLoggerError> {\n\n log::set_logger(&LOGGER).unwrap();\n\n log::set_max_level(level);\n\n Ok(())\n\n}\n", "file_path": "src/logger.rs", "rank": 3, "score": 79784.16114103183 }, { "content": "/// Adaptor that turns an output temporary directory (assuming the filnema conventions are _already_ ollowed)\n\n/// into a ZIP file transmittable back to Cortex\n\npub fn archive_tmpdir_to_zip(tmpdir: TempDir) -> Result<File, Box<dyn Error>> {\n\n let dir_path = tmpdir.path().to_str().unwrap();\n\n archive_directory(dir_path)\n\n}\n\n\n\nconst METHOD_DEFLATED: zip::CompressionMethod = zip::CompressionMethod::Deflated;\n\n\n", "file_path": "src/adaptor.rs", "rank": 4, "score": 77042.71739121564 }, { "content": "#[test]\n\nfn mock_round_trip() {\n\n // Let's get a minimal ZMQ ventilator/sink pair to test the worker\n\n let test_payload = \"cortex peripherals - echo worker test\".to_string();\n\n let sink_test_payload = test_payload.clone();\n\n let vent_thread = thread::spawn(move || {\n\n let ventilator_context = zmq::Context::new();\n\n let ventilator = ventilator_context.socket(zmq::ROUTER).unwrap();\n\n let ventilator_address = \"tcp://127.0.0.1:51695\";\n\n assert!(ventilator.bind(&ventilator_address).is_ok());\n\n\n\n // We expect one request\n\n let mut msg = zmq::Message::new();\n\n let mut identity = zmq::Message::new();\n\n ventilator.recv(&mut identity, 0).unwrap();\n\n ventilator.recv(&mut msg, 0).unwrap();\n\n let service_name = msg.as_str().unwrap().to_string();\n\n assert!(service_name == \"echo_service\");\n\n\n\n ventilator.send(identity, SNDMORE).unwrap();\n\n ventilator.send(\"1\", SNDMORE).unwrap();\n", "file_path": "tests/echo_test.rs", "rank": 5, "score": 28529.788006511117 }, { "content": "#[test]\n\nfn unit_engrafo_test() {\n\n let worker = EngrafoWorker::default();\n\n // test we can convert a test doc\n\n let test_input_path = Path::new(\"tests/resources/1508.01222.zip\");\n\n let converted = worker.convert(&test_input_path);\n\n assert!(converted.is_ok());\n\n let mut zip_file = converted.unwrap();\n\n let mut contents = vec![];\n\n assert!(zip_file.read_to_end(&mut contents).is_ok());\n\n assert!(contents.len() > 1_000_000); // make sure we have a reasonably sized ZIP, as a basic sanity check\n\n}\n", "file_path": "tests/engrafo_test.rs", "rank": 6, "score": 28529.788006511117 }, { "content": "fn zip_one_dir<T>(\n\n it: &mut dyn Iterator<Item = DirEntry>,\n\n prefix: &str,\n\n writer: &mut T,\n\n method: zip::CompressionMethod,\n\n) -> zip::result::ZipResult<()>\n\nwhere\n\n T: Write + Seek,\n\n{\n\n let mut zip = zip::ZipWriter::new(writer);\n\n let options = FileOptions::default()\n\n .compression_method(method)\n\n .unix_permissions(0o755);\n\n\n\n let mut buffer = Vec::new();\n\n for entry in it {\n\n let path = entry.path();\n\n let name = path\n\n .strip_prefix(Path::new(prefix))\n\n .unwrap()\n", "file_path": "src/adaptor.rs", "rank": 7, "score": 27359.897215821456 }, { "content": "use std::borrow::Cow;\n\nuse std::fs::File;\n\nuse std::path::Path;\n\nuse std::error::Error;\n\nuse super::Worker;\n\n\n\n/// An echo worker for testing\n\n#[derive(Clone, Debug)]\n\npub struct EchoWorker {\n\n /// the usual\n\n pub service: String,\n\n /// the usual\n\n pub version: f32,\n\n /// the usual\n\n pub message_size: usize,\n\n /// the usual\n\n pub source: String,\n\n /// the usual\n\n pub sink: String,\n\n /// the usual\n", "file_path": "src/worker/echo.rs", "rank": 18, "score": 16664.17098668745 }, { "content": "use crate::adaptor;\n\n\n\n/// An echo worker for testing\n\n#[derive(Clone, Debug)]\n\npub struct EngrafoWorker {\n\n /// the usual\n\n pub service: String,\n\n /// the usual\n\n pub version: f32,\n\n /// the usual\n\n pub message_size: usize,\n\n /// the usual\n\n pub source: String,\n\n /// the usual\n\n pub sink: String,\n\n /// port to the source address\n\n pub source_port: usize,\n\n /// port to the sink address\n\n pub sink_port: usize,\n\n /// Allow for multiple parallel workers\n", "file_path": "src/worker/engrafo.rs", "rank": 19, "score": 16661.70347545447 }, { "content": " }\n\n fn get_sink_address(&self) -> Cow<str> {\n\n Cow::Borrowed(&self.sink)\n\n }\n\n fn message_size(&self) -> usize {\n\n self.message_size\n\n }\n\n\n\n fn convert(&self, path: &Path) -> Result<File, Box<dyn Error>> {\n\n File::open(path).map_err(Into::into)\n\n }\n\n fn set_identity(&mut self, identity: String) {\n\n self.identity = identity;\n\n }\n\n fn get_identity(&self) -> &str {\n\n &self.identity\n\n }\n\n}", "file_path": "src/worker/echo.rs", "rank": 20, "score": 16660.704034470917 }, { "content": " pub pool_size: usize,\n\n /// A uniquely identifying string, usually `hostname:engrafo:threadid`\n\n pub identity: String,\n\n}\n\nimpl Default for EngrafoWorker {\n\n fn default() -> EngrafoWorker {\n\n EngrafoWorker {\n\n service: \"engrafo\".to_string(),\n\n version: 2.0,\n\n message_size: 100_000,\n\n source: \"127.0.0.1\".to_string(),\n\n source_port: 51695,\n\n sink: \"127.0.0.1\".to_string(),\n\n sink_port: 51696,\n\n pool_size: 1,\n\n identity: \"unknown:engrafo:1\".to_string(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/worker/engrafo.rs", "rank": 21, "score": 16660.016987957984 }, { "content": "impl Worker for EngrafoWorker {\n\n fn get_service(&self) -> &str {\n\n &self.service\n\n }\n\n fn get_source_address(&self) -> Cow<str> {\n\n Cow::Owned(format!(\"tcp://{}:{}\", self.source, self.source_port))\n\n }\n\n fn get_sink_address(&self) -> Cow<str> {\n\n Cow::Owned(format!(\"tcp://{}:{}\", self.sink, self.sink_port))\n\n }\n\n fn message_size(&self) -> usize {\n\n self.message_size\n\n }\n\n fn pool_size(&self) -> usize {\n\n self.pool_size\n\n }\n\n fn set_identity(&mut self, identity: String) {\n\n self.identity = identity;\n\n }\n\n fn get_identity(&self) -> &str {\n", "file_path": "src/worker/engrafo.rs", "rank": 22, "score": 16659.72910733305 }, { "content": " pub identity: String,\n\n}\n\nimpl Default for EchoWorker {\n\n fn default() -> EchoWorker {\n\n EchoWorker {\n\n service: \"echo_service\".to_string(),\n\n version: 0.1,\n\n message_size: 100_000,\n\n source: \"tcp://127.0.0.1:51695\".to_string(),\n\n sink: \"tcp://127.0.0.1:51696\".to_string(),\n\n identity: \"echo worker\".to_string(),\n\n }\n\n }\n\n}\n\nimpl Worker for EchoWorker {\n\n fn get_service(&self) -> &str {\n\n &self.service\n\n }\n\n fn get_source_address(&self) -> Cow<str> {\n\n Cow::Borrowed(&self.source)\n", "file_path": "src/worker/echo.rs", "rank": 23, "score": 16657.319969048614 }, { "content": " &self.identity\n\n }\n\n\n\n fn convert(&self, path: &Path) -> Result<File, Box<Error>> {\n\n let input_tmpdir = adaptor::extract_zip_to_tmpdir(path, \"engrafo_input\")?;\n\n let unpacked_dir_path = input_tmpdir.path().to_str().unwrap().to_string() + \"/\";\n\n let destination_tmpdir = TempDir::new(\"engrafo_output\").unwrap();\n\n let destination_dir_path = destination_tmpdir.path().to_str().unwrap();\n\n let tmp_dir_str = env::temp_dir().as_path().display().to_string();\n\n let docker_input_path = unpacked_dir_path.replace(&tmp_dir_str, \"/workdir\");\n\n let docker_output_path = destination_dir_path.replace(&tmp_dir_str, \"/workdir\");\n\n\n\n let cmd_result = Command::new(\"docker\")\n\n .arg(\"run\")\n\n .arg(\"-m\")\n\n .arg(\"4g\") // can be made customizeable based on architecture\n\n .arg(\"-v\")\n\n .arg(format!(\"{}:/workdir\", tmp_dir_str))\n\n .arg(\"-w\")\n\n .arg(\"/workdir\")\n", "file_path": "src/worker/engrafo.rs", "rank": 24, "score": 16650.894242914434 }, { "content": "#![cfg(feature = \"engrafo\")]\n\n// Copyright 2015 Deyan Ginev. See the LICENSE\n\n// file at the top-level directory of this distribution.\n\n//\n\n// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>.\n\n// This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! a CorTeX worker for Engrafo, via a docker image\n\n\n\nuse std::borrow::Cow;\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::io::{Write};\n\nuse std::path::Path;\n\nuse std::process::Command;\n\nuse std::error::Error;\n\nuse tempdir::TempDir;\n\n\n\nuse super::Worker;\n", "file_path": "src/worker/engrafo.rs", "rank": 25, "score": 16650.82725024083 }, { "content": " .arg(\"arxivvanity/engrafo:2.0.0\")\n\n .arg(\"engrafo\")\n\n .arg(docker_input_path)\n\n .arg(docker_output_path)\n\n .output()\n\n .expect(\"failed to execute process engrafo docker process.\");\n\n\n\n // Package the output -- cortex requires a single ZIP return,\n\n // with all logging information stored in a \"cortex.log\" file at the ZIP's root.\n\n\n\n let log_name = format!(\"{}/cortex.log\", destination_dir_path);\n\n let cortex_log_path = Path::new(&log_name);\n\n {\n\n // write log file and close it before archiving.\n\n let mut log_file = File::create(&cortex_log_path)?;\n\n log_file.write_all(&cmd_result.stderr)?;\n\n log_file.write_all(&cmd_result.stdout)?;\n\n }\n\n\n\n // cleanup\n", "file_path": "src/worker/engrafo.rs", "rank": 26, "score": 16649.20606674296 }, { "content": " // By closing the `TempDir` explicitly, we can check that it has\n\n // been deleted successfully. If we don't close it explicitly,\n\n // the directory will still be deleted when `tmp_dir` goes out\n\n // of scope, but we won't know whether deleting the directory\n\n // succeeded.\n\n input_tmpdir.close().unwrap();\n\n\n\n adaptor::archive_tmpdir_to_zip(destination_tmpdir).map_err(Into::into)\n\n }\n\n}\n", "file_path": "src/worker/engrafo.rs", "rank": 27, "score": 16641.625743032608 }, { "content": " fn get_service(&self) -> &str {\n\n &self.service\n\n }\n\n fn get_source_address(&self) -> Cow<str> {\n\n Cow::Borrowed(&self.source)\n\n }\n\n fn get_sink_address(&self) -> Cow<str> {\n\n Cow::Borrowed(&self.sink)\n\n }\n\n fn message_size(&self) -> usize {\n\n self.message_size\n\n }\n\n fn get_identity(&self) -> &str { &self.identity }\n\n fn set_identity(&mut self, identity: String) { self.identity = identity; }\n\n\n\n fn convert(&self, path: &Path) -> Result<File, Box<dyn Error>> {\n\n let name = path.file_stem().unwrap().to_str().unwrap();\n\n let destination_path = env::temp_dir().to_str().unwrap().to_string() + \"/\" + name + \".zip\";\n\n // println!(\"Source {:?}\", path);\n\n Command::new(\"latexmlc\")\n", "file_path": "src/worker/tex_to_html.rs", "rank": 28, "score": 15717.99746999025 }, { "content": "use super::Worker;\n\nuse std::borrow::Cow;\n\nuse std::env;\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::path::Path;\n\nuse std::process::Command;\n\n\n\n/// A TeX to HTML conversion worker -- this is a demonstration only\n\n/// it lacks robustness guards\n\n/// see the Perl worker used in production for a full overviewthread::spawn(move || {\n\n/// https://github.com/dginev/latexml-plugin-cortex\n\n#[derive(Clone, Debug)]\n\npub struct TexToHtmlWorker {\n\n /// the usual\n\n pub service: String,\n\n /// the usual\n\n pub version: f32,\n\n /// the usual\n\n pub message_size: usize,\n", "file_path": "src/worker/tex_to_html.rs", "rank": 29, "score": 15715.590944732061 }, { "content": " /// the usual\n\n pub source: String,\n\n /// the usual\n\n pub sink: String,\n\n /// the usual\n\n pub identity: String,\n\n}\n\nimpl Default for TexToHtmlWorker {\n\n fn default() -> TexToHtmlWorker {\n\n TexToHtmlWorker {\n\n service: \"tex_to_html\".to_string(),\n\n version: 0.1,\n\n message_size: 100_000,\n\n source: \"tcp://127.0.0.1:51695\".to_string(),\n\n sink: \"tcp://127.0.0.1:51696\".to_string(),\n\n identity: String::new()\n\n }\n\n }\n\n}\n\nimpl Worker for TexToHtmlWorker {\n", "file_path": "src/worker/tex_to_html.rs", "rank": 30, "score": 15713.613477214012 }, { "content": " .arg(\"--whatsin\")\n\n .arg(\"archive\")\n\n .arg(\"--whatsout\")\n\n .arg(\"archive\")\n\n .arg(\"--format\")\n\n .arg(\"html5\")\n\n .arg(\"--pmml\")\n\n .arg(\"--cmml\")\n\n .arg(\"--mathtex\")\n\n .arg(\"--preload\")\n\n .arg(\"[ids]latexml.sty\")\n\n .arg(\"--nodefaultresources\")\n\n .arg(\"--inputencoding\")\n\n .arg(\"iso-8859-1\")\n\n .arg(\"--timeout\")\n\n .arg(\"300\")\n\n .arg(\"--log\")\n\n .arg(\"cortex.log\")\n\n .arg(\"--destination\")\n\n .arg(destination_path.clone())\n\n .arg(path.to_string_lossy().to_string())\n\n .output()\n\n .unwrap_or_else(|e| panic!(\"failed to execute process: {}\", e));\n\n\n\n // println!(\"Dest: {:?}\", destination_path);\n\n File::open(destination_path.clone()).map_err(Into::into)\n\n }\n\n}\n", "file_path": "src/worker/tex_to_html.rs", "rank": 31, "score": 15704.742235799113 }, { "content": " let taskid_str = taskid_msg.as_str().unwrap();\n\n assert!(taskid_str == \"1\");\n\n\n\n let mut recv_msg = zmq::Message::new();\n\n sink.recv(&mut recv_msg, 0).unwrap();\n\n let recv_payload = recv_msg.as_str().unwrap();\n\n assert!(recv_payload == sink_test_payload);\n\n });\n\n\n\n // Start up an echo worker\n\n let mut worker = EchoWorker::default();\n\n // Perform a single echo task\n\n assert!(worker.start(Some(1)).is_ok());\n\n\n\n assert!(vent_thread.join().is_ok());\n\n assert!(sink_thread.join().is_ok());\n\n}\n", "file_path": "tests/echo_test.rs", "rank": 32, "score": 14.733080975336398 }, { "content": " ventilator.send(&test_payload, 0).unwrap();\n\n });\n\n\n\n let sink_thread = thread::spawn(move || {\n\n let sink_context = zmq::Context::new();\n\n let sink = sink_context.socket(zmq::PULL).unwrap();\n\n let sink_address = \"tcp://127.0.0.1:51696\";\n\n assert!(sink.bind(&sink_address).is_ok());\n\n\n\n let mut id_msg = zmq::Message::new();\n\n sink.recv(&mut id_msg, 0).unwrap();\n\n let _identity = id_msg.as_str().unwrap();\n\n\n\n let mut service_msg = zmq::Message::new();\n\n sink.recv(&mut service_msg, 0).unwrap();\n\n let service_name = service_msg.as_str().unwrap();\n\n assert!(service_name == \"echo_service\");\n\n\n\n let mut taskid_msg = zmq::Message::new();\n\n sink.recv(&mut taskid_msg, 0).unwrap();\n", "file_path": "tests/echo_test.rs", "rank": 33, "score": 14.046000450292402 }, { "content": " .to_str()\n\n .unwrap();\n\n\n\n if path.is_file() {\n\n zip.start_file(name, options)?;\n\n let mut f = File::open(path)?;\n\n\n\n f.read_to_end(&mut buffer)?;\n\n zip.write_all(&*buffer)?;\n\n buffer.clear();\n\n }\n\n }\n\n zip.finish()?;\n\n Result::Ok(())\n\n}\n", "file_path": "src/adaptor.rs", "rank": 34, "score": 12.206794872908855 }, { "content": "use pericortex::worker::{EchoWorker, Worker};\n\nuse std::thread;\n\nuse zmq::SNDMORE;\n\n\n\n#[test]\n", "file_path": "tests/echo_test.rs", "rank": 35, "score": 10.192978871090572 }, { "content": "//! Simple adaptors to relax the CorTeX conentions for agnostic third-party tooling\n\nuse std::error::Error;\n\nuse std::fs::{create_dir_all, File};\n\nuse std::io::copy;\n\nuse std::io::prelude::*;\n\nuse std::io::SeekFrom;\n\nuse std::io::{Seek, Write};\n\nuse std::iter::Iterator;\n\nuse std::path::Path;\n\n\n\nuse tempdir::TempDir;\n\nuse tempfile::tempfile;\n\n\n\nuse walkdir::{DirEntry, WalkDir};\n\nuse zip::write::FileOptions;\n\nuse zip::ZipArchive;\n\n\n\n/// Transform the ZIP provided by cortex into a TempDir,\n\n/// for e.g. tools such as Engrafo that aren't ZIP-capable\n", "file_path": "src/adaptor.rs", "rank": 36, "score": 9.939784270392513 }, { "content": "#![cfg(feature = \"engrafo\")]\n\nuse std::io::Read;\n\nuse std::path::Path;\n\n\n\nuse pericortex::worker::{EngrafoWorker, Worker};\n\n\n\n#[test]\n", "file_path": "tests/engrafo_test.rs", "rank": 37, "score": 7.96691217455288 }, { "content": " };\n\n // Following the reporting syntax at: http://dlmf.nist.gov/LaTeXML/manual/errorcodes/\n\n // let severity = if category_object.starts_with(\"Fatal:\") {\n\n // \"\"\n\n // } else {\n\n // match record.level() {\n\n // Level::Info => \"Info\",\n\n // Level::Warn => \"Warn\",\n\n // Level::Error => \"Error\",\n\n // Level::Debug => \"Debug\",\n\n // Level::Trace => \"Trace\",\n\n // }\n\n // };\n\n\n\n let message = format!(\"{}\\t\", category_object);\n\n\n\n let painted_message = match record.level() {\n\n Level::Info => Green.paint(message),\n\n Level::Warn => Yellow.paint(message),\n\n Level::Error => Red.paint(message),\n", "file_path": "src/logger.rs", "rank": 38, "score": 6.738168486573943 }, { "content": " match write!(&mut ::std::io::stderr(), $($arg)* ) {\n\n Ok(_) => {},\n\n Err(x) => panic!(\"Unable to write to stderr: {}\", x),\n\n }\n\n })\n\n);\n\n\n\nimpl log::Log for RtxLogger {\n\n fn enabled(&self, metadata: &Metadata) -> bool {\n\n metadata.level() <= max_level()\n\n }\n\n\n\n fn log(&self, record: &Record) {\n\n if self.enabled(record.metadata()) {\n\n let record_target = record.target();\n\n let details = record.args();\n\n let category_object = if record_target.is_empty() {\n\n \"\" // \"unknown:unknown\" ???\n\n } else {\n\n record_target\n", "file_path": "src/logger.rs", "rank": 39, "score": 6.628584319784631 }, { "content": " Level::Debug => Style::default().paint(message),\n\n _ => White.paint(message),\n\n }\n\n .to_string()\n\n + &details.to_string();\n\n\n\n println_stderr!(\n\n \"\\r[{}] {}\",\n\n Local::now().format(\"%Y-%m-%d %H:%M:%S\"),\n\n painted_message\n\n );\n\n }\n\n }\n\n\n\n fn flush(&self) {}\n\n}\n\n\n", "file_path": "src/logger.rs", "rank": 40, "score": 4.901642539400877 }, { "content": "// Copyright 2015 Deyan Ginev. See the LICENSE\n\n// file at the top-level directory of this distribution.\n\n//\n\n// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>.\n\n// This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! # The CorTeX library in Rust\n\n//! The original library can be found at https://github.com/dginev/CorTeX\n\n\n\n#![doc(html_root_url = \"https://dginev.github.io/rust-cortex-peripherals/\")]\n\n#![doc(\n\n html_logo_url = \"https://raw.githubusercontent.com/dginev/rust-cortex/master/public/img/logo.jpg\"\n\n)]\n\n#![deny(missing_docs)]\n\n\n\n#[macro_use]\n\nextern crate log;\n\n\n\npub mod adaptor;\n\npub mod logger;\n\npub mod worker;\n", "file_path": "src/lib.rs", "rank": 41, "score": 4.167587412296118 }, { "content": "//! This module is a trimmed-down copy of rtx_core::util::logger,\n\n//! which is still waiting to get released as a crate...\n\n//! maybe there is a simple logger crate that achieves this exact behavior?\n\nuse ansi_term::Colour::{Green, Red, White, Yellow};\n\nuse ansi_term::Style;\n\nuse chrono::Local;\n\nuse log::max_level;\n\nuse log::{Level, LevelFilter, Metadata, Record, SetLoggerError};\n\n\n", "file_path": "src/logger.rs", "rank": 42, "score": 4.152411170015447 }, { "content": "![CorTeX Peripherals](./public/img/logo.jpg) Peripherals\n\n======\n\n\n\n**Worker executables for [CorTeX](https://github.com/dginev/CorTeX) - a general processing framework for scientific documents**\n\n\n\n[![Build Status](https://github.com/dginev/CorTeX-Peripherals/workflows/CI/badge.svg)](https://github.com/dginev/CorTeX-Peripherals/actions?query=workflow%3ACI) [![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://raw.githubusercontent.com/dginev/CorTeX-Peripherals/master/LICENSE) ![version](https://img.shields.io/badge/version-0.2.3-orange.svg)\n\n\n\n\n\n\n\n1. [Engrafo](https://github.com/arxiv-vanity/engrafo) - tex-to-html conversion via latexml, with advanced styling and UX\n\n - uses a dedicated `docker` image which is an installation prerequisite.\n\n - builds under the `engrafo` feature flag, via `cargo test --features=engrafo`\n\n - starting a worker: `cargo run --release --features=engrafo --bin engrafo_worker`\n", "file_path": "README.md", "rank": 43, "score": 2.4814114156474743 } ]
Rust
chain/src/action.rs
monacohq/rust-eos
a54e873baaf21db9268e9537856b04357583d8bf
use alloc::string::{String, ToString}; use alloc::{format, vec}; use alloc::vec::Vec; use core::str::FromStr; use codec::{Encode, Decode}; use crate::{ AccountName, ActionName, Asset, Digest, NumBytes, PermissionLevel, Read, SerializeData, Write }; #[cfg(feature = "std")] use serde::{ Serialize, Deserialize, de::Error as DeError, ser::{Error as SerError, Serializer, SerializeStruct} }; #[derive(Clone, Debug, Read, Write, NumBytes, PartialEq, Default, Encode, Decode, Digest, SerializeData)] #[eosio_core_root_path = "crate"] #[repr(C)] pub struct Action { pub account: AccountName, pub name: ActionName, pub authorization: Vec<PermissionLevel>, pub data: Vec<u8>, } #[cfg(feature = "std")] impl<'de> serde::Deserialize<'de> for Action { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::de::Deserializer<'de> { #[derive(Debug)] struct VisitorAction; impl<'de> serde::de::Visitor<'de> for VisitorAction { type Value = Action; fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "string or a struct, but this is: {:?}", self) } fn visit_map<D>(self, mut map: D) -> Result<Self::Value, D::Error> where D: serde::de::MapAccess<'de>, { let mut account = AccountName::default(); let mut name = ActionName::default(); let mut authorization: Vec<PermissionLevel> = vec![]; let mut data: Vec<u8> = vec![]; while let Some(field) = map.next_key()? { match field { "account" => { account = map.next_value()?; } "name" => { name = map.next_value()?; } "authorization" => { authorization= map.next_value()?; } "hex_data" => { let val: String= map.next_value()?; data = hex::decode(val).map_err(D::Error::custom)?; } _ => { let _: serde_json::Value = map.next_value()?; continue; } } } let action = Action { account, name, authorization, data, }; Ok(action) } } deserializer.deserialize_any(VisitorAction) } } impl Action { pub fn new(account: AccountName, name: ActionName, authorization: Vec<PermissionLevel>, data: Vec<u8>) -> Self { Action { account, name, authorization, data } } pub fn from_str<T: AsRef<str>, S: SerializeData>( account: T, name: T, authorization: Vec<PermissionLevel>, action_data: S ) -> crate::Result<Self> { let account = FromStr::from_str(account.as_ref()).map_err(crate::Error::from)?; let name = FromStr::from_str(name.as_ref()).map_err(crate::Error::from)?; let data = action_data.to_serialize_data()?; Ok(Action { account, name, authorization, data }) } pub fn transfer<T: AsRef<str>>(from: T, to: T, quantity: T, memo: T) -> crate::Result<Action> { let permission_level = PermissionLevel::from_str(from.as_ref(), "active")?; let action_transfer = ActionTransfer::from_str(from, to, quantity, memo)?; Action::from_str( "eosio.token", "transfer", vec![permission_level], action_transfer ) } } impl core::fmt::Display for Action { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "account: {}\n\ name: {}\n\ authorization: {}\n\ hex_data: {}", self.account, self.name, self.authorization.iter().map(|item| format!("{}", item)).collect::<String>(), hex::encode(&self.data), ) } } #[cfg(feature = "std")] impl serde::ser::Serialize for Action { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer { let mut state = serializer.serialize_struct("Action", 5)?; state.serialize_field("account", &self.account)?; state.serialize_field("name", &self.name)?; state.serialize_field("authorization", &self.authorization)?; state.serialize_field("hex_data", &hex::encode(&self.data))?; match (self.account.to_string().as_str(), self.name.to_string().as_str()) { ("eosio.token", "transfer") => { let data = ActionTransfer::read(&self.data, &mut 0).map_err(|_| S::Error::custom("Action read from data failed."))?; state.serialize_field("data", &data)?; }, _ => {} } state.end() } } #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Debug, Read, Write, NumBytes, Default, SerializeData)] #[eosio_core_root_path = "crate"] pub struct ActionTransfer { pub from: AccountName, pub to: AccountName, pub quantity: Asset, pub memo: String, } impl ActionTransfer { pub fn new(from: AccountName, to: AccountName, quantity: Asset, memo: String) -> Self { ActionTransfer { from, to, quantity, memo } } pub fn from_str<T: AsRef<str>>(from: T, to: T, quantity: T, memo: T) -> crate::Result<Self> { let from = FromStr::from_str(from.as_ref()).map_err(crate::Error::from)?; let to = FromStr::from_str(to.as_ref()).map_err(crate::Error::from)?; let quantity = FromStr::from_str(quantity.as_ref()).map_err(crate::Error::from)?; let memo = memo.as_ref().to_string(); Ok(ActionTransfer { from, to, quantity, memo }) } } pub trait ToAction: Write + NumBytes { const NAME: u64; #[inline] fn to_action( &self, account: AccountName, authorization: Vec<PermissionLevel>, ) -> crate::Result<Action> { let mut data = vec![0_u8; self.num_bytes()]; self.write(&mut data, &mut 0).map_err(crate::Error::BytesWriteError)?; Ok(Action { account, name: Self::NAME.into(), authorization, data, }) } } #[cfg(test)] mod tests { use hex; use super::*; #[test] fn action_hash_should_work() { let action = Action { account: FromStr::from_str("eosio.token").unwrap(), name: FromStr::from_str("issue").unwrap(), authorization: vec![PermissionLevel { actor: FromStr::from_str("eosio").unwrap(), permission: FromStr::from_str("active").unwrap(), }], data: hex::decode("0000000000ea305500625e5a1809000004454f530000000004696e6974").unwrap(), }; let hash = action.digest().unwrap(); assert_eq!(hash, "0221f3da945a3de738cdb744f7963a6a3486097ab42436d1f4e13a1ade502bb9".into()); } #[test] fn action_transfer_serialize_should_work() { let action = Action::transfer("testa", "testb", "1.0000 EOS", "a memo").ok().unwrap(); let data = action.to_serialize_data(); assert!(data.is_ok()); let data = data.unwrap(); assert_eq!( hex::encode(data), "00a6823403ea3055000000572d3ccdcd01000000000093b1ca00000000a8ed323227000000000093b1ca000000008093b1ca102700000000000004454f53000000000661206d656d6f" ); } #[test] fn action_deserialize_should_be_ok() { let action_str = r#" { "account": "eosio.token", "name": "transfer", "authorization": [ { "actor": "junglefaucet", "permission": "active" } ], "data": { "from": "junglefaucet", "receiver": "megasuper333", "stake_net_quantity": "1.0000 EOS", "stake_cpu_quantity": "1.0000 EOS", "transfer": 1 }, "hex_data": "9015d266a9c8a67e30c6b8aa6a6c989240420f000000000004454f5300000000134e657720425020526567697374726174696f6e" }"#; let action: Result<Action, _> = serde_json::from_str(action_str); assert!(action.is_ok()); let hash = action.unwrap().digest().unwrap(); assert_eq!(hash, "eaa3b4bf845a1b41668ab7ca49fb5644fc91a6c0156dfd33911b4ec69d2e41d6".into()) } }
use alloc::string::{String, ToString}; use alloc::{format, vec}; use alloc::vec::Vec; use core::str::FromStr; use codec::{Encode, Decode}; use crate::{ AccountName, ActionName, Asset, Digest, NumBytes, PermissionLevel, Read, SerializeData, Write }; #[cfg(feature = "std")] use serde::{ Serialize, Deserialize, de::Error as DeError, ser::{Error as SerError, Serializer, SerializeStruct} }; #[derive(Clone, Debug, Read, Write, NumBytes, PartialEq, Default, Encode, Decode, Digest, SerializeData)] #[eosio_core_root_path = "crate"] #[repr(C)] pub struct Action { pub account: AccountName, pub name: ActionName, pub authorization: Vec<PermissionLevel>, pub data: Vec<u8>, } #[cfg(feature = "std")] impl<'de> serde::Deserialize<'de> for Action { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::de::Deserializer<'de> { #[derive(Debug)] struct VisitorAction; impl<'de> serde::de::Visitor<'de> for VisitorAction { type Value = Action; fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "string or a struct, but this is: {:?}", self) } fn visit_map<D>(self, mut map: D) -> Result<Self::Value, D::Error> where D: serde::de::MapAccess<'de>, { let mut account = AccountName::default(); let mut name = ActionName::default(); let mut authorization: Vec<PermissionLevel> = vec![]; let mut data: Vec<u8> = vec![]; while let Some(field) = map.next_key()? { match field { "account" => { account = map.next_value()?; } "name" => { name = map.next_value()?; } "authorization" => { authorization= map.next_value()?; } "hex_data" => { let val: String= map.next_value()?; data = hex::decode(val).map_err(D::Error::custom)?; } _ => { let _: serde_json::Value = map.next_value()?; continue; } } } let action = Action { account, name, authorization, data, }; Ok(action) } } deserializer.deserialize_any(VisitorAction) } } impl Action { pub fn new(account: AccountName, name: ActionName, authorization: Vec<PermissionLevel>, data: Vec<u8>) -> Self { Action { account, name, authorization, data } } pub fn from_str<T: AsRef<str>, S: SerializeData>( account: T, name: T, authorization: Vec<PermissionLevel>, action_data: S ) -> crate::Result<Self> { let account = FromStr::from_str(account.as_ref()).map_err(crate::Error::from)?; let name = FromStr::from_str(name.as_ref()).map_err(crate::Error::from)?; let data = action_data.to_serialize_data()?; Ok(Action { account, name, authorization, data }) } pub fn transfer<T: AsRef<str>>(from:
o.token", "transfer", vec![permission_level], action_transfer ) } } impl core::fmt::Display for Action { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "account: {}\n\ name: {}\n\ authorization: {}\n\ hex_data: {}", self.account, self.name, self.authorization.iter().map(|item| format!("{}", item)).collect::<String>(), hex::encode(&self.data), ) } } #[cfg(feature = "std")] impl serde::ser::Serialize for Action { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer { let mut state = serializer.serialize_struct("Action", 5)?; state.serialize_field("account", &self.account)?; state.serialize_field("name", &self.name)?; state.serialize_field("authorization", &self.authorization)?; state.serialize_field("hex_data", &hex::encode(&self.data))?; match (self.account.to_string().as_str(), self.name.to_string().as_str()) { ("eosio.token", "transfer") => { let data = ActionTransfer::read(&self.data, &mut 0).map_err(|_| S::Error::custom("Action read from data failed."))?; state.serialize_field("data", &data)?; }, _ => {} } state.end() } } #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Debug, Read, Write, NumBytes, Default, SerializeData)] #[eosio_core_root_path = "crate"] pub struct ActionTransfer { pub from: AccountName, pub to: AccountName, pub quantity: Asset, pub memo: String, } impl ActionTransfer { pub fn new(from: AccountName, to: AccountName, quantity: Asset, memo: String) -> Self { ActionTransfer { from, to, quantity, memo } } pub fn from_str<T: AsRef<str>>(from: T, to: T, quantity: T, memo: T) -> crate::Result<Self> { let from = FromStr::from_str(from.as_ref()).map_err(crate::Error::from)?; let to = FromStr::from_str(to.as_ref()).map_err(crate::Error::from)?; let quantity = FromStr::from_str(quantity.as_ref()).map_err(crate::Error::from)?; let memo = memo.as_ref().to_string(); Ok(ActionTransfer { from, to, quantity, memo }) } } pub trait ToAction: Write + NumBytes { const NAME: u64; #[inline] fn to_action( &self, account: AccountName, authorization: Vec<PermissionLevel>, ) -> crate::Result<Action> { let mut data = vec![0_u8; self.num_bytes()]; self.write(&mut data, &mut 0).map_err(crate::Error::BytesWriteError)?; Ok(Action { account, name: Self::NAME.into(), authorization, data, }) } } #[cfg(test)] mod tests { use hex; use super::*; #[test] fn action_hash_should_work() { let action = Action { account: FromStr::from_str("eosio.token").unwrap(), name: FromStr::from_str("issue").unwrap(), authorization: vec![PermissionLevel { actor: FromStr::from_str("eosio").unwrap(), permission: FromStr::from_str("active").unwrap(), }], data: hex::decode("0000000000ea305500625e5a1809000004454f530000000004696e6974").unwrap(), }; let hash = action.digest().unwrap(); assert_eq!(hash, "0221f3da945a3de738cdb744f7963a6a3486097ab42436d1f4e13a1ade502bb9".into()); } #[test] fn action_transfer_serialize_should_work() { let action = Action::transfer("testa", "testb", "1.0000 EOS", "a memo").ok().unwrap(); let data = action.to_serialize_data(); assert!(data.is_ok()); let data = data.unwrap(); assert_eq!( hex::encode(data), "00a6823403ea3055000000572d3ccdcd01000000000093b1ca00000000a8ed323227000000000093b1ca000000008093b1ca102700000000000004454f53000000000661206d656d6f" ); } #[test] fn action_deserialize_should_be_ok() { let action_str = r#" { "account": "eosio.token", "name": "transfer", "authorization": [ { "actor": "junglefaucet", "permission": "active" } ], "data": { "from": "junglefaucet", "receiver": "megasuper333", "stake_net_quantity": "1.0000 EOS", "stake_cpu_quantity": "1.0000 EOS", "transfer": 1 }, "hex_data": "9015d266a9c8a67e30c6b8aa6a6c989240420f000000000004454f5300000000134e657720425020526567697374726174696f6e" }"#; let action: Result<Action, _> = serde_json::from_str(action_str); assert!(action.is_ok()); let hash = action.unwrap().digest().unwrap(); assert_eq!(hash, "eaa3b4bf845a1b41668ab7ca49fb5644fc91a6c0156dfd33911b4ec69d2e41d6".into()) } }
T, to: T, quantity: T, memo: T) -> crate::Result<Action> { let permission_level = PermissionLevel::from_str(from.as_ref(), "active")?; let action_transfer = ActionTransfer::from_str(from, to, quantity, memo)?; Action::from_str( "eosi
function_block-random_span
[ { "content": "/// Directly encode a slice as base58\n\npub fn encode_slice(data: &[u8]) -> String {\n\n encode_iter(data.iter().cloned())\n\n}\n\n\n", "file_path": "keys/src/base58.rs", "rank": 0, "score": 273043.4003143676 }, { "content": "/// Obtain a string with the base58check encoding of a slice\n\n/// (Tack the first 4 256-digits of the object's Bitcoin hash onto the end.)\n\npub fn check_encode_slice(data: &[u8]) -> String {\n\n let checksum = sha256d::Hash::hash(&data);\n\n encode_iter(\n\n data.iter()\n\n .cloned()\n\n .chain(checksum[0..4].iter().cloned())\n\n )\n\n}\n\n\n", "file_path": "keys/src/base58.rs", "rank": 1, "score": 269084.2311956325 }, { "content": "#[inline]\n\npub fn name_to_string(name: u64) -> String {\n\n String::from_utf8_lossy(&name_to_utf8(name))\n\n .trim_matches('.')\n\n .into()\n\n}\n\n\n\n/// Converts an EOSIO name into an array of UTF-8 characters.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::name_to_utf8;\n\n/// assert_eq!(name_to_utf8(6138663591592764928), *b\"eosio.token..\");\n\n/// assert_eq!(name_to_utf8(6138663581940940800), *b\"eosio.bpay...\");\n\n/// assert_eq!(name_to_utf8(0), *b\".............\");\n\n/// assert_eq!(name_to_utf8(614251535012020768), *b\"123451234512.\");\n\n/// ```\n", "file_path": "chain/src/names.rs", "rank": 2, "score": 259794.0761416483 }, { "content": "#[inline]\n\npub fn symbol_to_string(name: u64) -> String {\n\n String::from_utf8_lossy(&symbol_to_utf8(name)).trim().into()\n\n}\n\n\n\n/// Converts an EOSIO symbol into an array of UTF-8 characters.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::symbol_to_utf8;\n\n/// assert_eq!(symbol_to_utf8(1397703940), *b\"EOS \");\n\n/// assert_eq!(symbol_to_utf8(5138124851399447552), *b\"TESTING\");\n\n/// assert_eq!(symbol_to_utf8(361956332544), *b\"TGFT \");\n\n/// assert_eq!(symbol_to_utf8(1398362882), *b\"SYS \");\n\n/// assert_eq!(symbol_to_utf8(0), *b\" \");\n\n/// ```\n", "file_path": "chain/src/symbol.rs", "rank": 3, "score": 245504.49238985975 }, { "content": "/// Obtain a string with the base58check encoding of a slice\n\n/// (Tack the first 4 256-digits of the object's Bitcoin hash onto the end.)\n\npub fn check_encode_slice_to_fmt(fmt: &mut fmt::Formatter, data: &[u8]) -> fmt::Result {\n\n let checksum = sha256d::Hash::hash(&data);\n\n let iter = data.iter()\n\n .cloned()\n\n .chain(checksum[0..4].iter().cloned());\n\n format_iter(fmt, iter)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use hex::decode as hex_decode;\n\n\n\n #[test]\n\n fn test_base58_encode() {\n\n // Basics\n\n assert_eq!(&encode_slice(&[0][..]), \"1\");\n\n assert_eq!(&encode_slice(&[1][..]), \"2\");\n\n assert_eq!(&encode_slice(&[58][..]), \"21\");\n\n assert_eq!(&encode_slice(&[13, 36][..]), \"211\");\n", "file_path": "keys/src/base58.rs", "rank": 4, "score": 234624.67344165192 }, { "content": "fn encode_iter<I>(data: I) -> String\n\n where\n\n I: Iterator<Item=u8> + Clone,\n\n{\n\n let mut ret = String::new();\n\n format_iter(&mut ret, data).expect(\"writing into string shouldn't fail\");\n\n ret\n\n}\n\n\n\n\n", "file_path": "keys/src/base58.rs", "rank": 5, "score": 225036.6446784019 }, { "content": "/// Decode base58-encoded string into a byte vector\n\npub fn from(data: &str) -> Result<Vec<u8>, Error> {\n\n // 11/15 is just over log_256(58)\n\n let mut scratch = vec![0u8; 1 + data.len() * 11 / 15];\n\n // Build in base 256\n\n for d58 in data.bytes() {\n\n // Compute \"X = X * 58 + next_digit\" in base 256\n\n if d58 as usize > BASE58_DIGITS.len() {\n\n return Err(Error::BadByte(d58));\n\n }\n\n let mut carry = match BASE58_DIGITS[d58 as usize] {\n\n Some(d58) => d58 as u32,\n\n None => { return Err(Error::BadByte(d58)); }\n\n };\n\n for d256 in scratch.iter_mut().rev() {\n\n carry += *d256 as u32 * 58;\n\n *d256 = carry as u8;\n\n carry /= 256;\n\n }\n\n assert_eq!(carry, 0);\n\n }\n\n\n\n // Copy leading zeroes directly\n\n let mut ret: Vec<u8> = data.bytes().take_while(|&x| x == BASE58_CHARS[0])\n\n .map(|_| 0)\n\n .collect();\n\n // Copy rest of string\n\n ret.extend(scratch.into_iter().skip_while(|&x| x == 0));\n\n Ok(ret)\n\n}\n\n\n", "file_path": "keys/src/base58.rs", "rank": 6, "score": 223295.2343156985 }, { "content": "pub trait SerializeData: Write + NumBytes {\n\n fn to_serialize_data(&self) -> crate::Result<Vec<u8>> {\n\n let mut data = vec![0u8; self.num_bytes()];\n\n self.write(&mut data, &mut 0).map_err(crate::Error::BytesWriteError)?;\n\n Ok(data.to_vec())\n\n }\n\n}\n\n\n", "file_path": "chain/src/lib.rs", "rank": 7, "score": 221022.76636444347 }, { "content": "pub fn merkle(ids: Vec<Checksum256>) -> crate::Result<Checksum256> {\n\n let mut ids = ids;\n\n\n\n if 0 == ids.len() {\n\n return Ok(Default::default());\n\n }\n\n\n\n while ids.len() > 1 {\n\n if ids.len() % 2 != 0 {\n\n ids.push(ids[ids.len() - 1]);\n\n }\n\n\n\n for i in 0..(ids.len() / 2) {\n\n ids[i] = Checksum256::hash(make_canonical_pair(&ids[2 * i], &ids[(2 * i) + 1]))?;\n\n }\n\n\n\n ids.resize(ids.len() / 2, Default::default());\n\n }\n\n\n\n Ok(ids[0])\n\n}\n\n\n", "file_path": "chain/src/merkle.rs", "rank": 8, "score": 220737.4424538433 }, { "content": "/// Decode a base58check-encoded string\n\npub fn from_check(data: &str) -> Result<Vec<u8>, Error> {\n\n let mut ret: Vec<u8> = from(data)?;\n\n if ret.len() < 4 {\n\n return Err(Error::TooShort(ret.len()));\n\n }\n\n let ck_start = ret.len() - 4;\n\n let expected = LittleEndian::read_u32(&sha256d::Hash::hash(&ret[..ck_start])[..4]);\n\n let actual = LittleEndian::read_u32(&ret[ck_start..(ck_start + 4)]);\n\n if expected != actual {\n\n return Err(Error::BadChecksum(expected, actual));\n\n }\n\n\n\n ret.truncate(ck_start);\n\n Ok(ret)\n\n}\n\n\n", "file_path": "keys/src/base58.rs", "rank": 9, "score": 220260.73299613723 }, { "content": "pub fn get_proof(position: usize, ids: Vec<Checksum256>) -> crate::Result<Vec<Checksum256>> {\n\n let mut ids = ids;\n\n let mut position = position;\n\n let mut paths: Vec<Checksum256> = Vec::new();\n\n let is_right_node = |i| i % 2 == 1;\n\n\n\n if 0 == ids.len() {\n\n return Ok(Default::default());\n\n }\n\n\n\n while ids.len() > 1 {\n\n if ids.len() % 2 != 0 {\n\n ids.push(ids[ids.len() - 1]);\n\n }\n\n\n\n if is_right_node(position) {\n\n paths.push(make_canonical_left(&ids[position - 1]));\n\n } else {\n\n paths.push(make_canonical_right(&ids[position + 1]));\n\n }\n", "file_path": "chain/src/merkle.rs", "rank": 10, "score": 220179.70965677738 }, { "content": "#[inline]\n\npub fn name_from_str(value: &str) -> Result<u64, ParseNameError> {\n\n name_from_chars(value.chars())\n\n}\n\n\n\n/// Attempts to create an EOSIO name from an `Iterator`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::{name_from_chars, ParseNameError};\n\n/// assert_eq!(name_from_chars(\"\".chars()), Ok(0));\n\n/// assert_eq!(name_from_chars(\"a\".chars()), Ok(3458764513820540928));\n\n/// assert_eq!(name_from_chars(\"123456789012\".chars()), Err(ParseNameError::BadChar('6')));\n\n/// assert_eq!(name_from_chars(\"123451234512\".chars()), Ok(614251535012020768));\n\n/// assert_eq!(name_from_chars(\"1234512345123\".chars()), Err(ParseNameError::TooLong));\n\n/// assert_eq!(name_from_chars(\"eosio.token\".chars()), Ok(6138663591592764928));\n\n/// assert_eq!(name_from_chars(\"eosio.bpay\".chars()), Ok(6138663581940940800));\n\n/// assert_eq!(name_from_chars(\"A\".chars()), Err(ParseNameError::BadChar('A')));\n\n/// assert_eq!(name_from_chars(\"TEST\".chars()), Err(ParseNameError::BadChar('T')));\n\n/// ```\n", "file_path": "chain/src/names.rs", "rank": 11, "score": 186065.48557275656 }, { "content": "#[inline]\n\npub fn name_to_utf8(name: u64) -> [u8; 13] {\n\n let mut chars = [b'.'; 13]; // TODO: make this 12 instead of 13\n\n let mut t = name;\n\n for (i, c) in chars.iter_mut().rev().enumerate() {\n\n let index = t & if i == 0 { 15 } else { 31 };\n\n let index = usize::try_from(index).unwrap_or_default();\n\n if let Some(v) = NAME_UTF8_CHARS.get(index) {\n\n *c = *v;\n\n }\n\n t >>= if i == 0 { 4 } else { 5 };\n\n }\n\n chars\n\n}\n\n\n", "file_path": "chain/src/names.rs", "rank": 12, "score": 184958.85913752075 }, { "content": "pub trait Digest: Clone + Write + NumBytes {\n\n fn digest(&self) -> crate::Result<Checksum256> {\n\n Checksum256::hash(self.clone())\n\n }\n\n}\n", "file_path": "chain/src/lib.rs", "rank": 13, "score": 180728.9376201569 }, { "content": "pub fn get_abi_json_to_bin<Args: serde::Serialize>(\n\n code: impl Into<AccountName>,\n\n action: Actions,\n\n args: Args\n\n) -> Result<GetAbiJsonToBinParams<Args>, ParseNameError>\n\n{\n\n let action: ActionName = match action {\n\n Actions::Close => ActionName::from_str(\"close\")?,\n\n Actions::Create => ActionName::from_str(\"create\")?,\n\n Actions::Transfer => ActionName::from_str(\"transfer\")?,\n\n Actions::Open => ActionName::from_str(\"open\")?,\n\n Actions::Retire => ActionName::from_str(\"retire\")?,\n\n Actions::Issue => ActionName::from_str(\"issue\")?,\n\n };\n\n Ok(GetAbiJsonToBinParams { code: code.into(), action, args })\n\n}\n\n\n\n// defined six action\n\npub enum Actions {\n\n Close,\n", "file_path": "rpc/src/chain/abi_json_to_bin.rs", "rank": 14, "score": 170846.04644052914 }, { "content": "#[inline]\n\n#[proc_macro_derive(SerializeData, attributes(eosio_core_root_path))]\n\npub fn derive_serialize_data(input: TokenStream) -> TokenStream {\n\n crate::derive_serialize_data::expand(input)\n\n}\n\n\n\n/// Derive the `Write` trait\n", "file_path": "eosio-core-derive/src/lib.rs", "rank": 15, "score": 170739.6910925728 }, { "content": "#[inline]\n\npub fn u64_or_string<'de, D>(deserializer: D) -> Result<u64, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_any(U64OrStringVisitor)\n\n}\n\n\n\n/// TODO docs\n\npub struct U64OrStringVisitor;\n\n\n\nimpl<'de> serde::de::Visitor<'de> for U64OrStringVisitor {\n\n type Value = u64;\n\n\n\n fn expecting(\n\n &self,\n\n formatter: &mut std::fmt::Formatter,\n\n ) -> std::fmt::Result {\n\n formatter.write_str(\"a number or string\")\n\n }\n\n\n", "file_path": "chain/src/json.rs", "rank": 17, "score": 164931.20502418422 }, { "content": "#[inline]\n\npub fn f64_or_string<'de, D>(deserializer: D) -> Result<f64, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_any(F64OrStringVisitor)\n\n}\n\n\n\n/// TODO docs\n\npub struct F64OrStringVisitor;\n\n\n\nimpl<'de> serde::de::Visitor<'de> for F64OrStringVisitor {\n\n type Value = f64;\n\n\n\n fn expecting(\n\n &self,\n\n formatter: &mut std::fmt::Formatter,\n\n ) -> std::fmt::Result {\n\n formatter.write_str(\"a number or string\")\n\n }\n\n\n", "file_path": "chain/src/json.rs", "rank": 18, "score": 164931.20502418422 }, { "content": "pub fn get_currency_stats<C: Into<AccountName>, S: Into<Symbol>>(\n\n code: C,\n\n symbol: S,\n\n) -> GetCurrencyStatsParams {\n\n GetCurrencyStatsParams {\n\n code: code.into(),\n\n symbol: symbol.into().code().to_string(),\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct CurrencyStats {\n\n pub supply: String,\n\n pub max_supply: String,\n\n pub issuer: AccountName,\n\n}\n\n\n\n#[cfg(feature = \"use-hyper\")]\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "rpc/src/chain/get_currency_stats.rs", "rank": 19, "score": 164251.32161413677 }, { "content": "fn format_iter<I, W>(writer: &mut W, data: I) -> Result<(), fmt::Error>\n\n where\n\n I: Iterator<Item=u8> + Clone,\n\n W: fmt::Write\n\n{\n\n let mut ret = SmallVec::new();\n\n\n\n let mut leading_zero_count = 0;\n\n let mut leading_zeroes = true;\n\n // Build string in little endian with 0-58 in place of characters...\n\n for d256 in data {\n\n let mut carry = d256 as usize;\n\n if leading_zeroes && carry == 0 {\n\n leading_zero_count += 1;\n\n } else {\n\n leading_zeroes = false;\n\n }\n\n\n\n for ch in ret.iter_mut() {\n\n let new_ch = *ch as usize * 256 + carry;\n", "file_path": "keys/src/base58.rs", "rank": 20, "score": 158931.03177250386 }, { "content": "// given an unsigned integral number return the smallest\n\n// power-of-2 which is greater than or equal to the given number\n\n//\n\n// @param value - an unsigned integral\n\n// @return - the minimum power-of-2 which is >= value\n\nfn next_power_of_2(mut value: u64) -> u64 {\n\n value -= 1;\n\n value |= value >> 1;\n\n value |= value >> 2;\n\n value |= value >> 4;\n\n value |= value >> 8;\n\n value |= value >> 16;\n\n value |= value >> 32;\n\n value += 1;\n\n value\n\n}\n\n\n", "file_path": "chain/src/incremental_merkle.rs", "rank": 21, "score": 153576.26986386633 }, { "content": "#[inline]\n\npub fn name_from_chars<I>(chars: I) -> Result<u64, ParseNameError>\n\nwhere\n\n I: Iterator<Item = char>,\n\n{\n\n let mut value = 0;\n\n for (i, c) in chars.enumerate() {\n\n if i == NAME_LEN_MAX {\n\n return Err(ParseNameError::TooLong);\n\n } else if c == '.' {\n\n continue;\n\n } else if let Some(symbol) = char_to_symbol(c) {\n\n let mut n = symbol as u64;\n\n if i < NAME_LEN_MAX {\n\n n &= 31;\n\n n <<= 64 - 5 * (i + 1);\n\n } else {\n\n n &= 15;\n\n }\n\n value |= n;\n\n } else {\n\n return Err(ParseNameError::BadChar(c));\n\n }\n\n }\n\n\n\n Ok(value)\n\n}\n\n\n", "file_path": "chain/src/names.rs", "rank": 22, "score": 152151.26062099525 }, { "content": "pub fn verify_proof(paths: &Vec<Checksum256>, leaf: Checksum256, expected_root: Checksum256) -> bool {\n\n let mut current: Checksum256 = leaf;\n\n let mut left: Checksum256;\n\n let mut right: Checksum256;\n\n\n\n for path in paths.iter() {\n\n if is_canonical_right(&path) {\n\n left = current;\n\n right = *path;\n\n } else {\n\n left = *path;\n\n right = current;\n\n }\n\n left = make_canonical_left(&left);\n\n right = make_canonical_right(&right);\n\n\n\n match Checksum256::hash(make_canonical_pair(&left, &right)) {\n\n Ok(hash) => {\n\n current = hash;\n\n },\n", "file_path": "chain/src/merkle.rs", "rank": 23, "score": 150120.98702272854 }, { "content": "#[inline]\n\npub fn symbol_precision(value: u64) -> u8 {\n\n u8::try_from(value & 255).unwrap_or_default()\n\n}\n\n\n\n/// Gets an EOSIO symbol's code.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::symbol_code;\n\n/// assert_eq!(symbol_code(1397703940), 5459781); // 4,EOS\n\n/// assert_eq!(symbol_code(1398362882), 5462355); // 2,SYS\n\n/// assert_eq!(symbol_code(5138124851399447552), 20070800200779092); // 0,TESTING\n\n/// ```\n\n#[inline]\n\npub const fn symbol_code(value: u64) -> u64 {\n\n value >> 8\n\n}\n\n\n\n/// Gets the length of an EOSIO symbol's code\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::symbol_code_length;\n\n/// assert_eq!(symbol_code_length(1397703940), 3); // 4,EOS\n\n/// assert_eq!(symbol_code_length(1398362882), 3); // 2,SYS\n\n/// assert_eq!(symbol_code_length(5138124851399447552), 7); // 0,TESTING\n\n/// ```\n", "file_path": "chain/src/symbol.rs", "rank": 24, "score": 147779.7416882244 }, { "content": "fn string_or_struct<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\n where\n\n T: Deserialize<'de> + FromStr<Err = core::convert::Infallible>,\n\n D: Deserializer<'de>,\n\n{\n\n struct StringOrStruct<T>(PhantomData<fn() -> T>);\n\n\n\n impl<'de, T> Visitor<'de> for StringOrStruct<T>\n\n where\n\n T: Deserialize<'de> + FromStr<Err = core::convert::Infallible>,\n\n {\n\n type Value = T;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"string or map\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<T, E>\n\n where\n\n E: de::Error,\n", "file_path": "rpc/src/chain/get_block.rs", "rank": 25, "score": 145481.31119977243 }, { "content": "#[inline]\n\npub fn symbol_to_utf8(value: u64) -> [u8; SYMBOL_LEN_MAX] {\n\n let mask: u64 = 0xff;\n\n let mut chars = [b' '; SYMBOL_LEN_MAX];\n\n let mut v = value;\n\n for c in &mut chars {\n\n v >>= 8;\n\n if v == 0 {\n\n break;\n\n }\n\n *c = u8::try_from(v & mask).unwrap_or_default();\n\n }\n\n chars\n\n}\n\n\n\n/// Gets an EOSIO symbol's precision.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::symbol_precision;\n\n/// assert_eq!(symbol_precision(1397703940), 4); // 4,EOS\n\n/// assert_eq!(symbol_precision(1398362882), 2); // 2,SYS\n\n/// assert_eq!(symbol_precision(5138124851399447552), 0); // 0,TESTING\n\n/// ```\n", "file_path": "chain/src/symbol.rs", "rank": 26, "score": 139612.74295735208 }, { "content": "#[inline]\n\n#[proc_macro_derive(Digest, attributes(eosio_core_root_path))]\n\npub fn derive_digest(input: TokenStream) -> TokenStream {\n\n crate::derive_digest::expand(input)\n\n}\n\n\n\n/// Derive the `SerializeData` trait\n", "file_path": "eosio-core-derive/src/lib.rs", "rank": 27, "score": 138885.6877076417 }, { "content": "#[inline]\n\n#[proc_macro_derive(Read, attributes(eosio_core_root_path))]\n\npub fn derive_read(input: TokenStream) -> TokenStream {\n\n crate::derive_read::expand(input)\n\n}\n\n\n\n/// Derive the `NumBytes` trait\n", "file_path": "eosio-core-derive/src/lib.rs", "rank": 28, "score": 138849.7648195163 }, { "content": "/// Expand input\n\npub fn expand(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let root = crate::root_path(&input);\n\n\n\n let name = input.ident;\n\n\n\n let mut generics = input.generics;\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n type_param.bounds.push(parse_quote!(#root));\n\n }\n\n }\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let call_site = ::proc_macro2::Span::call_site();\n\n let reads = match input.data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => {\n\n let field_reads = fields.named.iter().map(|f| {\n\n let ident = &f.ident;\n", "file_path": "eosio-core-derive/src/derive_read.rs", "rank": 29, "score": 138845.41920019785 }, { "content": "#[inline]\n\n#[proc_macro_derive(Write, attributes(eosio_core_root_path))]\n\npub fn derive_write(input: TokenStream) -> TokenStream {\n\n crate::derive_write::expand(input)\n\n}\n\n\n\n/// Derive the `Read` trait\n", "file_path": "eosio-core-derive/src/lib.rs", "rank": 30, "score": 138823.0406291605 }, { "content": "/// Expand input\n\npub fn expand(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let root = crate::root_path(&input);\n\n\n\n let name = input.ident;\n\n\n\n let mut generics = input.generics;\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n type_param.bounds.push(parse_quote!(#root::Write));\n\n }\n\n }\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let call_site = ::proc_macro2::Span::call_site();\n\n let var = quote!(self);\n\n let writes = match input.data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => {\n\n let recurse = fields.named.iter().map(|f| {\n", "file_path": "eosio-core-derive/src/derive_write.rs", "rank": 31, "score": 138818.75089367226 }, { "content": "#[inline]\n\npub fn bool_or_integer<'de, D>(deserializer: D) -> Result<bool, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_any(BoolOrIntegerVisitor)\n\n}\n\n\n\n/// TODO docs\n\npub struct BoolOrIntegerVisitor;\n\n\n\nimpl<'de> serde::de::Visitor<'de> for BoolOrIntegerVisitor {\n\n type Value = bool;\n\n\n\n fn expecting(\n\n &self,\n\n formatter: &mut std::fmt::Formatter,\n\n ) -> std::fmt::Result {\n\n formatter.write_str(\"a bool or integer\")\n\n }\n\n\n", "file_path": "chain/src/json.rs", "rank": 32, "score": 131017.06750429736 }, { "content": "pub fn get_block<B: ToString>(block_num_or_id: B) -> GetBlockParams {\n\n GetBlockParams {\n\n block_num_or_id: block_num_or_id.to_string(),\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct GetBlock {\n\n pub timestamp: String,\n\n pub producer: AccountName,\n\n pub confirmed: u16,\n\n pub previous: String,\n\n pub transaction_mroot: String,\n\n pub action_mroot: String,\n\n pub schedule_version: u16,\n\n pub new_producers: Option<ProducerSchedule>,\n\n #[serde(skip)]\n\n pub header_extensions: Vec<Extension>,\n\n pub producer_signature: String,\n\n pub transactions: Vec<Transaction>,\n", "file_path": "rpc/src/chain/get_block.rs", "rank": 33, "score": 128639.76676752475 }, { "content": "/// Read bytes.\n\npub trait Read: Sized {\n\n /// Read bytes.\n\n fn read(bytes: &[u8], pos: &mut usize) -> Result<Self, ReadError>;\n\n}\n\n\n\n/// Error that can be returned when reading bytes.\n\n#[derive(Debug, Clone, Copy)]\n\npub enum ReadError {\n\n /// Not enough bytes.\n\n NotEnoughBytes,\n\n /// Not support message type.\n\n NotSupportMessageType,\n\n}\n\n\n", "file_path": "chain/src/bytes.rs", "rank": 34, "score": 126715.63635942822 }, { "content": "/// Write bytes.\n\npub trait Write: Sized {\n\n /// Write bytes.\n\n fn write(\n\n &self,\n\n bytes: &mut [u8],\n\n pos: &mut usize,\n\n ) -> Result<(), WriteError>;\n\n}\n\n\n\n/// Error that can be returned when writing bytes.\n\n#[derive(Debug, Clone, Copy)]\n\npub enum WriteError {\n\n /// Not enough space in the vector.\n\n NotEnoughSpace,\n\n /// Failed to parse an integer.\n\n TryFromIntError,\n\n}\n\n\n\nmacro_rules! impl_num {\n\n ($($t:ty, $s:expr)*) => ($(\n", "file_path": "chain/src/bytes.rs", "rank": 35, "score": 126405.02253837534 }, { "content": "fn is_canonical_left(val: &Checksum256) -> bool {\n\n (val.hash0() & 0x0000000000000080u64) == 0\n\n}\n\n\n", "file_path": "chain/src/merkle.rs", "rank": 36, "score": 121648.51682583094 }, { "content": "fn is_canonical_right(val: &Checksum256) -> bool {\n\n (val.hash0() & 0x0000000000000080u64) != 0\n\n}\n\n\n", "file_path": "chain/src/merkle.rs", "rank": 37, "score": 121648.51682583094 }, { "content": "struct NameVisitor<\n\n T: FromStr<Err = ParseNameError> + From<u64> + core::fmt::Display,\n\n>(core::marker::PhantomData<T>);\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl<'de, T> serde::de::Visitor<'de> for NameVisitor<T>\n\n where\n\n T: FromStr<Err = ParseNameError> + From<u64> + core::fmt::Display,\n\n{\n\n type Value = T;\n\n\n\n #[inline]\n\n fn expecting(\n\n &self,\n\n formatter: &mut core::fmt::Formatter,\n\n ) -> core::fmt::Result {\n\n formatter.write_str(\"an EOSIO name string or number\")\n\n }\n\n\n\n #[inline]\n", "file_path": "chain/src/names.rs", "rank": 38, "score": 120741.0396632931 }, { "content": "#[inline]\n\npub fn symbol_from_str(\n\n precision: u8,\n\n value: &str,\n\n) -> Result<u64, ParseSymbolError> {\n\n symbol_from_chars(precision, value.chars())\n\n}\n\n\n\n/// Attempts to create an EOSIO symbol from an `Iterator`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::{symbol_from_chars, ParseSymbolError};\n\n/// assert_eq!(symbol_from_chars(4, \"EOS\".chars()), Ok(1397703940));\n\n/// assert_eq!(symbol_from_chars(0, \"TGFT\".chars()), Ok(361956332544));\n\n/// assert_eq!(symbol_from_chars(2, \"SYS\".chars()), Ok(1398362882));\n\n/// assert_eq!(symbol_from_chars(4, \"TSt\".chars()), Err(ParseSymbolError::BadChar('t')));\n\n/// assert_eq!(symbol_from_chars(0, \"TESTING\".chars()), Ok(5138124851399447552));\n\n/// assert_eq!(symbol_from_chars(0, \"TESTINGG\".chars()), Err(ParseSymbolError::TooLong));\n\n/// ```\n", "file_path": "chain/src/symbol.rs", "rank": 39, "score": 120053.99055564075 }, { "content": "fn make_canonical_right(val: &Checksum256) -> Checksum256 {\n\n let mut canonical_r: Checksum256 = *val;\n\n canonical_r.set_hash0(canonical_r.hash0() | 0x0000000000000080u64);\n\n canonical_r\n\n}\n\n\n", "file_path": "chain/src/merkle.rs", "rank": 40, "score": 119993.87388621713 }, { "content": "fn make_canonical_left(val: &Checksum256) -> Checksum256 {\n\n let mut canonical_l: Checksum256 = *val;\n\n canonical_l.set_hash0(canonical_l.hash0() & 0xFFFFFFFFFFFFFF7Fu64);\n\n canonical_l\n\n}\n\n\n", "file_path": "chain/src/merkle.rs", "rank": 41, "score": 119993.87388621713 }, { "content": "pub trait CheckedMul<Other = Self>: Sized {\n\n type Output;\n\n fn checked_mul(self, other: Other) -> Self::Output;\n\n}\n\n\n", "file_path": "chain/src/ops.rs", "rank": 42, "score": 119213.0866550644 }, { "content": "pub trait CheckedAdd<Other = Self>: Sized {\n\n type Output;\n\n fn checked_add(self, other: Other) -> Self::Output;\n\n}\n\n\n", "file_path": "chain/src/ops.rs", "rank": 43, "score": 119213.0866550644 }, { "content": "pub trait CheckedDiv<Other = Self>: Sized {\n\n type Output;\n\n fn checked_div(self, other: Other) -> Self::Output;\n\n}\n\n\n", "file_path": "chain/src/ops.rs", "rank": 44, "score": 119213.0866550644 }, { "content": "pub trait CheckedSub<Other = Self>: Sized {\n\n type Output;\n\n fn checked_sub(self, other: Other) -> Self::Output;\n\n}\n\n\n", "file_path": "chain/src/ops.rs", "rank": 45, "score": 119213.0866550644 }, { "content": "pub trait CheckedRem<Other = Self>: Sized {\n\n type Output;\n\n fn checked_rem(self, other: Other) -> Self::Output;\n\n}\n", "file_path": "chain/src/ops.rs", "rank": 46, "score": 119213.0866550644 }, { "content": "#[inline]\n\npub fn symbol_from_chars<I>(\n\n precision: u8,\n\n chars: I,\n\n) -> Result<u64, ParseSymbolError>\n\nwhere\n\n I: Iterator<Item = char>,\n\n{\n\n // TODO check precision. what is max precision?\n\n let mut result: u64 = 0;\n\n for (i, c) in chars.enumerate() {\n\n if i == SYMBOL_LEN_MAX {\n\n return Err(ParseSymbolError::TooLong);\n\n } else if c < 'A' || c > 'Z' {\n\n return Err(ParseSymbolError::BadChar(c));\n\n } else {\n\n result |= (c as u64) << (8 * (i + 1));\n\n }\n\n }\n\n\n\n // TODO check if zero, IsEmpty error\n", "file_path": "chain/src/symbol.rs", "rank": 47, "score": 118340.5609491183 }, { "content": "pub fn get_table_rows<\n\n C: Into<AccountName>,\n\n S: Into<ScopeName>,\n\n T: Into<TableName>,\n\n>(\n\n code: C,\n\n scope: S,\n\n table: T,\n\n) -> GetTableRowsParams {\n\n GetTableRowsParams {\n\n code: code.into(),\n\n scope: scope.into(),\n\n table: table.into(),\n\n json: true,\n\n lower_bound: None,\n\n upper_bound: None,\n\n limit: None,\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\npub struct GetTableRows<Row> {\n\n pub rows: Vec<Row>,\n\n pub more: bool,\n\n}\n", "file_path": "rpc/src/chain/get_table_rows.rs", "rank": 48, "score": 113322.69443354834 }, { "content": "pub fn get_currency_balance<\n\n C: Into<AccountName>,\n\n A: Into<AccountName>,\n\n S: Into<Symbol>,\n\n>(\n\n code: C,\n\n account: A,\n\n symbol: Option<S>,\n\n) -> GetCurrencyBalanceParams {\n\n GetCurrencyBalanceParams {\n\n code: code.into(),\n\n account: account.into(),\n\n symbol: symbol.map(|s| s.into().code().to_string() ),\n\n }\n\n}\n\n\n\npub type GetCurrencyBalance = Vec<String>;\n\n\n\n#[cfg(feature = \"use-hyper\")]\n\n#[cfg(test)]\n", "file_path": "rpc/src/chain/get_currency_balance.rs", "rank": 49, "score": 113322.69443354834 }, { "content": "/// Computes RIPEMD-160 cryptographic hash of key\n\npub fn ripemd160(msg: &[u8]) -> H160 {\n\n let mut engine = ripemd160::Hash::engine();\n\n engine.input(msg);\n\n ripemd160::Hash::from_engine(engine).into_inner().into()\n\n}\n", "file_path": "keys/src/hash.rs", "rank": 50, "score": 111798.77965277247 }, { "content": "/// Vector-like object that holds the first 100 elements on the stack. If more space is needed it\n\n/// will be allocated on the heap.\n\nstruct SmallVec<T> {\n\n len: usize,\n\n stack: [T; 100],\n\n heap: Vec<T>,\n\n}\n\n\n\nimpl<T: Default + Copy> SmallVec<T> {\n\n pub fn new() -> SmallVec<T> {\n\n SmallVec {\n\n len: 0,\n\n stack: [T::default(); 100],\n\n heap: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn push(&mut self, val: T) {\n\n if self.len < 100 {\n\n self.stack[self.len] = val;\n\n self.len += 1;\n\n } else {\n", "file_path": "keys/src/base58.rs", "rank": 51, "score": 108710.06861259186 }, { "content": "pub fn endian_reverse_u64(x: u64) -> u64 {\n\n ((x >> 0x38) & 0xFF)\n\n | (((x >> 0x30) & 0xFF) << 0x08)\n\n | (((x >> 0x28) & 0xFF) << 0x10)\n\n | (((x >> 0x20) & 0xFF) << 0x18)\n\n | (((x >> 0x18) & 0xFF) << 0x20)\n\n | (((x >> 0x10) & 0xFF) << 0x28)\n\n | (((x >> 0x08) & 0xFF) << 0x30)\n\n | (((x) & 0xFF) << 0x38)\n\n}\n\n\n", "file_path": "chain/src/utils/bitutil.rs", "rank": 52, "score": 108582.09400854477 }, { "content": "#[inline]\n\npub fn symbol_code_length(symbol: u64) -> usize {\n\n let mut sym = symbol;\n\n sym >>= 8; // skip precision\n\n let mut len = 0;\n\n while sym & 255 > 0 && len <= SYMBOL_LEN_MAX {\n\n len += 1;\n\n sym >>= 8;\n\n }\n\n len\n\n}\n\n\n\n/// Stores information about a symbol, the symbol can be 7 characters long.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy, Default, Read, Write, NumBytes, Hash, PartialOrd, Ord)]\n\n#[cfg_attr(feature = \"std\", derive(Deserialize, Serialize))]\n\n#[eosio_core_root_path = \"crate\"]\n\npub struct Symbol(u64);\n\n\n\nimpl Symbol {\n\n /// Construct a new symbol given a value.\n\n #[inline]\n", "file_path": "chain/src/symbol.rs", "rank": 53, "score": 108582.09400854477 }, { "content": "pub fn endian_reverse_u32(x: u32) -> u32 {\n\n ((x >> 0x18) & 0xFF)\n\n | (((x >> 0x10) & 0xFF) << 0x08)\n\n | (((x >> 0x08) & 0xFF) << 0x10)\n\n | (((x) & 0xFF) << 0x18)\n\n}\n", "file_path": "chain/src/utils/bitutil.rs", "rank": 54, "score": 108582.09400854477 }, { "content": "#[proc_macro_derive(Fetch, attributes(api))]\n\npub fn derive_show(item: TokenStream) -> TokenStream {\n\n // parse the whole token tree\n\n let input = parse_macro_input!(item as DeriveInput);\n\n let struct_name = &input.ident;\n\n let generics = &input.generics;\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n // get api attribute and paranmeters\n\n let mut returns = String::new();\n\n let mut path = String::new();\n\n let mut method = String::new();\n\n input.attrs.iter().for_each(|attr| {\n\n match attr.parse_meta() {\n\n Ok(Meta::List(ref list)) => {\n\n if !list.path.is_ident(\"api\") {\n\n panic!(\"cannot find api attribute\");\n\n }\n\n\n\n list.nested.iter().for_each(|nest| {\n\n match nest {\n", "file_path": "rpc/rpc_codegen/src/lib.rs", "rank": 55, "score": 104237.76210366395 }, { "content": "pub fn derive_table(input: TokenStream) -> TokenStream {\n\n crate::derive_table::expand(input)\n\n}\n\n\n\n/// The default root path using the `eosio` crate.\n\n#[cfg(feature = \"internal-use-only-root-path-is-eosio\")]\n\nconst DEFAULT_ROOT_PATH: &str = \"::eosio\";\n\n\n\n/// The default root path using the `eosio_core` crate.\n\n#[cfg(not(feature = \"internal-use-only-root-path-is-eosio\"))]\n\nconst DEFAULT_ROOT_PATH: &str = \"::eosio_core\";\n\n\n\n/// Get the root path for types/traits.\n\npub(crate) fn root_path(input: &DeriveInput) -> Path {\n\n let litstr = input\n\n .attrs\n\n .iter()\n\n .fold(None, |acc, attr| match attr.parse_meta() {\n\n Ok(meta) => {\n\n let name = meta.path().get_ident();\n", "file_path": "eosio-core-derive/src/lib.rs", "rank": 56, "score": 104237.76210366395 }, { "content": "pub fn expand(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let eosio_core = crate::root_path(&input);\n\n let name = input.ident.clone();\n\n\n\n let mut generics = input.generics.clone();\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n type_param.bounds.push(parse_quote!(#eosio_core::Read));\n\n }\n\n }\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let (is_singleton, table_name) = input.attrs.iter().fold((false, None), |(a, b), attr| {\n\n match attr.parse_meta() {\n\n Ok(meta) => {\n\n let name = meta.path().get_ident().as_ref().expect(\"please add table name.\").to_string();\n\n if name == \"table_name\" {\n\n if b.is_some() {\n\n panic!(\"only 1 table_name attribute allowed per struct\");\n", "file_path": "eosio-core-derive/src/derive_table.rs", "rank": 57, "score": 104237.76210366395 }, { "content": "pub fn make_canonical_pair(l: &Checksum256, r: &Checksum256) -> (Checksum256, Checksum256) {\n\n (make_canonical_left(l), make_canonical_right(r))\n\n}\n\n\n", "file_path": "chain/src/merkle.rs", "rank": 58, "score": 103323.60530878254 }, { "content": "/// Expand input\n\npub fn expand(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let root = crate::root_path(&input);\n\n\n\n let name = input.ident;\n\n\n\n let mut generics = input.generics;\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n type_param.bounds.push(parse_quote!(NumBytes));\n\n }\n\n }\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let call_site = ::proc_macro2::Span::call_site();\n\n let var = quote!(self);\n\n let add_to_count = match input.data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => {\n\n let recurse = fields.named.iter().map(|f| {\n", "file_path": "eosio-core-derive/src/derive_num_bytes.rs", "rank": 59, "score": 102902.81189470866 }, { "content": "#[inline]\n\n#[proc_macro_derive(NumBytes, attributes(eosio_core_root_path))]\n\npub fn derive_num_bytes(input: TokenStream) -> TokenStream {\n\n crate::derive_num_bytes::expand(input)\n\n}\n\n\n\n/// TODO docs\n\n#[inline]\n\n#[proc_macro_derive(\n\n Table,\n\n attributes(table_name, primary, secondary, singleton)\n\n)]\n", "file_path": "eosio-core-derive/src/lib.rs", "rank": 60, "score": 102902.81189470866 }, { "content": "pub fn push_transaction(signed_trx: SignedTransaction) -> PushTransactionParams {\n\n PushTransactionParams {\n\n signatures: signed_trx.signatures.iter().map(|sig| sig.to_string()).collect(),\n\n compression: \"none\".to_string(),\n\n packed_context_free_data: \"\".to_string(),\n\n packed_trx: hex::encode(&signed_trx.trx.to_serialize_data().expect(\"failed to serialize signed transaction data.\")),\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct PushTransaction {\n\n pub transaction_id: String,\n\n pub processed: TransactionTrace,\n\n}\n\n\n\n/// https://github.com/EOSIO/eos/blob/c3817b3f965aaf3d7ac3be5809893ef17aa770f6/libraries/chain/include/eosio/chain/trace.hpp#L53\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct TransactionTrace {\n\n id: String,\n\n block_num: u32,\n", "file_path": "rpc/src/chain/push_transaction.rs", "rank": 61, "score": 101619.02107971006 }, { "content": "#[inline]\n\npub fn bool_to_u8<S>(x: &bool, s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let num = if *x { 1 } else { 0 };\n\n s.serialize_u8(num)\n\n}\n\n\n\n/// TODO docs\n", "file_path": "chain/src/json.rs", "rank": 62, "score": 95221.88161855892 }, { "content": "/// Converts a character to a symbol.\n\nfn char_to_symbol(c: char) -> Option<char> {\n\n if c >= 'a' && c <= 'z' {\n\n ::core::char::from_u32((c as u32 - 'a' as u32) + 6)\n\n } else if c >= '1' && c <= '5' {\n\n ::core::char::from_u32((c as u32 - '1' as u32) + 1)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n/// Converts an EOSIO name value into a string.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::name_to_string;\n\n/// use std::str::FromStr;\n\n/// use eos_chain::ActionName;\n\n/// assert_eq!(name_to_string(6138663577826885632), \"eosio\");\n\n/// assert_eq!(name_to_string(6138663581940940800), \"eosio.bpay\");\n\n/// assert_eq!(name_to_string(0), \"\");\n\n/// assert_eq!(name_to_string(614251535012020768), \"123451234512\");\n\n/// ```\n", "file_path": "chain/src/names.rs", "rank": 63, "score": 94508.95356709186 }, { "content": "// Given a power-of-2 (assumed correct) return the number of leading zeros\n\n//\n\n// This is a classic count-leading-zeros in parallel without the necessary\n\n// math to make it safe for anything that is not already a power-of-2\n\n//\n\n// @param value - and integral power-of-2\n\n// @return the number of leading zeros\n\nfn clz_power_2(value: u64) -> usize {\n\n let mut lz: usize = 64;\n\n\n\n if value != 0 { lz -= 1; }\n\n if (value & 0x00000000FFFFFFFF_u64) != 0 { lz -= 32; }\n\n if (value & 0x0000FFFF0000FFFF_u64) != 0 { lz -= 16; }\n\n if (value & 0x00FF00FF00FF00FF_u64) != 0 { lz -= 8; }\n\n if (value & 0x0F0F0F0F0F0F0F0F_u64) != 0 { lz -= 4; }\n\n if (value & 0x3333333333333333_u64) != 0 { lz -= 2; }\n\n if (value & 0x5555555555555555_u64) != 0 { lz -= 1; }\n\n\n\n lz\n\n}\n\n\n", "file_path": "chain/src/incremental_merkle.rs", "rank": 64, "score": 93734.69971311733 }, { "content": "use crate::proc_macro::TokenStream;\n\nuse quote::quote;\n\nuse syn::{parse_macro_input, DeriveInput};\n\n\n\npub(crate) fn expand(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let struct_name = &input.ident;\n\n let trait_root_path = crate::root_path(&input);\n\n\n\n // split generics into parts\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();\n\n\n\n let serialize_data_impl = quote! {\n\n impl #impl_generics #trait_root_path::SerializeData for #struct_name #ty_generics #where_clause\n\n {}\n\n };\n\n serialize_data_impl.into()\n\n}", "file_path": "eosio-core-derive/src/derive_serialize_data.rs", "rank": 65, "score": 78602.9395626934 }, { "content": "struct Params {\n\n account_name: AccountName,\n\n}\n", "file_path": "rpc/src/chain/get_code.rs", "rank": 66, "score": 69784.53959438519 }, { "content": "struct Params {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n limit: Option<u32>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n lower_bound: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n json: Option<bool>,\n\n}\n", "file_path": "rpc/src/chain/get_producers.rs", "rank": 67, "score": 69784.53959438519 }, { "content": "#[cfg(feature = \"std\")]\n\nstruct BlockTimestampVisitor;\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl<'de> ::serde::de::Visitor<'de> for BlockTimestampVisitor {\n\n type Value = BlockTimestamp;\n\n\n\n #[inline]\n\n fn expecting(\n\n &self,\n\n formatter: &mut core::fmt::Formatter,\n\n ) -> core::fmt::Result {\n\n formatter.write_str(\"a second timestamp as a number or string\")\n\n }\n\n\n\n #[inline]\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: ::serde::de::Error,\n\n {\n\n match value.parse::<u32>() {\n", "file_path": "chain/src/block_timestamp.rs", "rank": 68, "score": 68879.27228091602 }, { "content": "struct Params {\n\n code: AccountName,\n\n action: ActionName,\n\n binargs: String,\n\n}\n", "file_path": "rpc/src/chain/abi_bin_to_json.rs", "rank": 69, "score": 68874.63800306445 }, { "content": "struct Params {\n\n block_num_or_id: String,\n\n}\n", "file_path": "rpc/src/chain/get_block_header_state.rs", "rank": 70, "score": 68005.00834084969 }, { "content": "pub trait Client {\n\n fn node(&self) -> &str;\n\n\n\n fn fetch<T>(&self, path: impl AsRef<str>, params: impl Serialize) -> crate::Result<T>\n\n where T: 'static + for<'b> Deserialize<'b> + Send + Sync;\n\n}\n", "file_path": "rpc/src/client.rs", "rank": 71, "score": 60638.65972544371 }, { "content": "/// Count the number of bytes a type is expected to use.\n\npub trait NumBytes {\n\n /// Count the number of bytes a type is expected to use.\n\n fn num_bytes(&self) -> usize;\n\n}\n\n\n", "file_path": "chain/src/bytes.rs", "rank": 72, "score": 59706.863188946634 }, { "content": "/// TODO docs\n\npub trait Table: Sized {\n\n /// TODO docs\n\n const NAME: u64;\n\n /// TODO docs\n\n type Row: Read + Write + NumBytes;\n\n /// TODO docs\n\n fn primary_key(row: &Self::Row) -> u64;\n\n /// TODO docs\n\n fn secondary_keys(_row: &Self::Row) -> SecondaryKeys {\n\n SecondaryKeys::default()\n\n }\n\n /// TODO docs\n\n #[inline]\n\n fn table<C, S>(code: C, scope: S) -> PrimaryTableIndex<Self>\n\n where\n\n C: Into<AccountName>,\n\n S: Into<ScopeName>,\n\n {\n\n PrimaryTableIndex::new(code, scope)\n\n }\n", "file_path": "chain/src/table.rs", "rank": 73, "score": 58854.48379850985 }, { "content": "// Given a number of nodes return the depth required to store them\n\n// in a fully balanced binary tree.\n\n//\n\n// @param node_count - the number of nodes in the implied tree\n\n// @return the max depth of the minimal tree that stores them\n\nfn calculate_max_depth(node_count: u64) -> usize {\n\n if node_count == 0 {\n\n return 0;\n\n }\n\n let implied_count = next_power_of_2(node_count);\n\n clz_power_2(implied_count) + 1\n\n}\n\n\n\n#[derive(Clone, Default, Debug, PartialEq, Encode, Decode)]\n\n#[cfg_attr(feature = \"std\", derive(Deserialize, Serialize))]\n\npub struct IncrementalMerkle {\n\n _node_count: u64,\n\n _active_nodes: Vec<Checksum256>,\n\n}\n\n\n\nimpl IncrementalMerkle {\n\n\n\n pub fn new(node_count: u64, active_nodes: Vec<Checksum256>) -> Self {\n\n IncrementalMerkle {\n\n _node_count: node_count,\n", "file_path": "chain/src/incremental_merkle.rs", "rank": 74, "score": 54565.43915545155 }, { "content": " fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: ::serde::de::Error,\n\n {\n\n value.parse::<T>().map_err(serde::de::Error::custom)\n\n }\n\n\n\n #[inline]\n\n fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(value.into())\n\n }\n\n}\n\n\n\nmacro_rules! declare_name_types {\n\n ($($ident:ident)*) => ($(\n\n #[derive(Debug, PartialEq, Eq, Clone, Copy, Default, Hash, PartialOrd, Ord, Read, Write, NumBytes, Encode, Decode)]\n\n #[eosio_core_root_path = \"crate\"]\n", "file_path": "chain/src/names.rs", "rank": 75, "score": 43129.54179951121 }, { "content": " type Error = ParseNameError;\n\n #[inline]\n\n fn try_from(value: &str) -> Result<Self, Self::Error> {\n\n Self::from_str(value)\n\n }\n\n }\n\n\n\n #[cfg(feature = \"std\")]\n\n impl<'de> serde::Deserialize<'de> for $ident {\n\n #[inline]\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n deserializer.deserialize_any(NameVisitor(core::marker::PhantomData::<Self>))\n\n }\n\n }\n\n\n\n #[cfg(feature = \"std\")]\n\n impl serde::Serialize for $ident {\n", "file_path": "chain/src/names.rs", "rank": 78, "score": 43124.0830762291 }, { "content": "//! <https://github.com/EOSIO/eosio.cdt/blob/4985359a30da1f883418b7133593f835927b8046/libraries/eosiolib/core/eosio/name.hpp#L28-L269>\n\nuse crate::{NumBytes, Read, Write};\n\nuse alloc::string::{String, ToString};\n\nuse core::{\n\n convert::TryFrom,\n\n fmt,\n\n str::FromStr,\n\n};\n\nuse codec::{Encode, Decode};\n\n\n\n/// All possible characters that can be used in EOSIO names.\n\npub const NAME_UTF8_CHARS: [u8; 32] = *b\".12345abcdefghijklmnopqrstuvwxyz\";\n\n\n\n/// The maximum character length of an EOSIO name.\n\npub const NAME_LEN_MAX: usize = 12;\n\n\n\n/// An error which can be returned when parsing an EOSIO name.\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum ParseNameError {\n\n /// The name is over the maximum allowed length.\n", "file_path": "chain/src/names.rs", "rank": 79, "score": 43121.29781654099 }, { "content": " fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n serializer.serialize_str(self.to_string().as_str())\n\n }\n\n }\n\n\n\n impl TryFrom<String> for $ident {\n\n type Error = ParseNameError;\n\n #[inline]\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n Self::from_str(value.as_str())\n\n }\n\n }\n\n\n\n impl fmt::Display for $ident {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let s = name_to_string(self.0);\n", "file_path": "chain/src/names.rs", "rank": 81, "score": 43119.62284558733 }, { "content": " fn eq(&self, other: &String) -> bool {\n\n self.to_string().as_str() == other.as_str()\n\n }\n\n }\n\n )*)\n\n}\n\n\n\ndeclare_name_types! {\n\n Name\n\n AccountName\n\n PermissionName\n\n ScopeName\n\n TableName\n\n ActionName\n\n}\n\n\n\nimpl From<ScopeName> for AccountName {\n\n #[inline]\n\n fn from(scope: ScopeName) -> Self {\n\n let value: u64 = scope.into();\n", "file_path": "chain/src/names.rs", "rank": 82, "score": 43118.590136089115 }, { "content": " TooLong,\n\n /// The name contains an unallowed character.\n\n BadChar(char),\n\n}\n\n\n\nimpl fmt::Display for ParseNameError {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Self::TooLong => write!(\n\n f,\n\n \"name is too long, must be {} chars or less\",\n\n NAME_LEN_MAX\n\n ),\n\n Self::BadChar(c) => write!(\n\n f,\n\n \"name contains invalid character '{}'; must only contain the following characters: {}\",\n\n c,\n\n alloc::string::String::from_utf8_lossy(&NAME_UTF8_CHARS)\n\n ),\n", "file_path": "chain/src/names.rs", "rank": 84, "score": 43112.82736830794 }, { "content": " value.into()\n\n }\n\n}\n\n\n\nimpl From<AccountName> for ScopeName {\n\n #[inline]\n\n fn from(name: AccountName) -> Self {\n\n let value: u64 = name.into();\n\n value.into()\n\n }\n\n}\n", "file_path": "chain/src/names.rs", "rank": 86, "score": 43109.23616420665 }, { "content": " #[repr(C)]\n\n pub struct $ident(u64);\n\n\n\n impl $ident {\n\n /// Creates a new name\n\n #[inline]\n\n pub const fn new(value: u64) -> Self {\n\n Self(value)\n\n }\n\n\n\n #[inline]\n\n pub const fn as_u64(&self) -> u64 {\n\n self.0\n\n }\n\n }\n\n\n\n impl From<u64> for $ident {\n\n #[inline]\n\n fn from(n: u64) -> Self {\n\n Self(n)\n", "file_path": "chain/src/names.rs", "rank": 87, "score": 43108.377647714886 }, { "content": " write!(f, \"{}\", s)\n\n }\n\n }\n\n\n\n impl From<$ident> for String {\n\n #[inline]\n\n fn from(i: $ident) -> Self {\n\n i.to_string()\n\n }\n\n }\n\n\n\n impl PartialEq<$ident> for String {\n\n #[inline]\n\n fn eq(&self, other: &$ident) -> bool {\n\n self.as_str() == other.to_string().as_str()\n\n }\n\n }\n\n\n\n impl PartialEq<String> for $ident {\n\n #[inline]\n", "file_path": "chain/src/names.rs", "rank": 90, "score": 43103.192733302705 }, { "content": " }\n\n }\n\n\n\n impl From<$ident> for u64 {\n\n #[inline]\n\n fn from(i: $ident) -> Self {\n\n i.0\n\n }\n\n }\n\n\n\n impl FromStr for $ident {\n\n type Err = ParseNameError;\n\n #[inline]\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let name = name_from_str(s)?;\n\n Ok(name.into())\n\n }\n\n }\n\n\n\n impl TryFrom<&str> for $ident {\n", "file_path": "chain/src/names.rs", "rank": 91, "score": 43100.330352519115 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<ParseNameError> for crate::error::Error {\n\n fn from(e: ParseNameError) -> crate::error::Error {\n\n crate::error::Error::ParseNameErr(e)\n\n }\n\n}\n\n\n\n/// Attempts to create an EOSIO name from a `&str`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use eos_chain::{name_from_str, ParseNameError};\n\n/// assert_eq!(name_from_str(\"\"), Ok(0));\n\n/// assert_eq!(name_from_str(\"a\"), Ok(3458764513820540928));\n\n/// assert_eq!(name_from_str(\"123456789012\"), Err(ParseNameError::BadChar('6')));\n\n/// assert_eq!(name_from_str(\"123451234512\"), Ok(614251535012020768));\n\n/// assert_eq!(name_from_str(\"1234512345123\"), Err(ParseNameError::TooLong));\n\n/// assert_eq!(name_from_str(\"eosio.token\"), Ok(6138663591592764928));\n\n/// assert_eq!(name_from_str(\"eosio.bpay\"), Ok(6138663581940940800));\n\n/// assert_eq!(name_from_str(\"A\"), Err(ParseNameError::BadChar('A')));\n\n/// assert_eq!(name_from_str(\"TEST\"), Err(ParseNameError::BadChar('T')));\n\n/// ```\n\n#[inline]\n", "file_path": "chain/src/names.rs", "rank": 92, "score": 43099.95571665296 }, { "content": "//! <https://github.com/EOSIO/eosio.cdt/blob/4985359a30da1f883418b7133593f835927b8046/libraries/eosiolib/core/eosio/asset.hpp#L18-L369>\n\nuse alloc::{format, string::{String, ToString}};\n\nuse core::{\n\n convert::TryFrom,\n\n fmt,\n\n ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Rem, RemAssign, Sub, SubAssign},\n\n str::FromStr,\n\n};\n\n\n\n#[cfg(feature = \"std\")]\n\nuse serde::{Deserialize};\n\n\n\nuse crate::{\n\n CheckedAdd, CheckedDiv, CheckedMul, CheckedRem, CheckedSub,\n\n NumBytes, ParseSymbolError, Read, Symbol, symbol_from_chars, Write,\n\n};\n\n\n\n/// Stores information for owner of asset\n\n#[cfg_attr(feature = \"std\", derive(Deserialize))]\n\n#[derive(Debug, PartialEq, PartialOrd, Clone, Copy, Default, Read, Write, NumBytes)]\n", "file_path": "chain/src/asset.rs", "rank": 98, "score": 42986.962970448956 }, { "content": " #[inline]\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n Self::try_from(value.as_str())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum AssetOpError {\n\n Overflow,\n\n DifferentSymbols,\n\n}\n\n\n\nimpl fmt::Display for AssetOpError {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let msg = match *self {\n\n Self::Overflow => \"integer overflow\",\n\n Self::DifferentSymbols => \"assets have different symbols\",\n\n };\n\n write!(f, \"{}\", msg)\n", "file_path": "chain/src/asset.rs", "rank": 99, "score": 42986.90622531489 } ]
Rust
components/epaxos/src/replication/hdlreply.rs
openacid/celeritasdb
061d71e5e6305b6e3782530433ad082680c9fc14
use crate::qpaxos::replicate_reply::Phase; use crate::qpaxos::Direction; use crate::qpaxos::ReplicateReply; use crate::qpaxos::*; use crate::replica::*; use crate::replication::RpcHandlerError; pub fn check_repl_common(inst: &Instance, reply: ReplicateReply) -> Result<Phase, RpcHandlerError> { let iid = reply .instance_id .ok_or(ProtocolError::LackOf("instance_id".into()))?; if iid != inst.instance_id.unwrap() { let err = ProtocolError::NotMatch( "instance_id".into(), format!("{}", inst.instance_id.unwrap()), format!("{}", iid), ); return Err(err.into()); } let phase = reply.phase.ok_or(ProtocolError::LackOf("phase".into()))?; let last_ballot = reply.last_ballot; if inst.ballot < last_ballot { let zero = Some(BallotNum::default()); let err = RpcHandlerError::StaleBallot( inst.ballot.or(zero).unwrap(), last_ballot.or(zero).unwrap(), ); return Err(err); } Ok(phase) } pub fn handle_prepare_reply( st: &mut ReplicationStatus, from_rid: ReplicaId, repl: ReplicateReply, ) -> Result<(), RpcHandlerError> { if let Some(ref e) = repl.err { return Err(RpcHandlerError::RemoteError(e.clone())); } let phase = check_repl_common(&st.instance, repl)?; let frepl: PrepareReply = phase .try_into() .or(Err(ProtocolError::LackOf("phase::Prepare".into())))?; let deps = frepl .deps .as_ref() .ok_or(ProtocolError::LackOf("phase::Prepare.deps".into()))?; if frepl.deps_committed.len() < deps.len() { return Err(ProtocolError::Incomplete( "phase::Prepare.deps_committed".into(), deps.len() as i32, frepl.deps_committed.len() as i32, ) .into()); } for (i, d) in deps.iter().enumerate() { let rid = d.replica_id; if !st.prepared.contains_key(&rid) { st.prepared.insert( rid, DepStatus { ..DepStatus::default() }, ); } let pre = st.prepared.get_mut(&rid).unwrap(); if pre.replied.insert(from_rid) { pre.rdeps.push(RepliedDep { idx: d.idx, seq: d.seq, committed: frepl.deps_committed[i], }); } else { return Err(RpcHandlerError::DupRpc( InstanceStatus::Prepared, Direction::Reply, from_rid, st.instance.instance_id.unwrap(), )); } } Ok(()) } pub fn handle_accept_reply( st: &mut ReplicationStatus, from_rid: ReplicaId, repl: ReplicateReply, ) -> Result<(), RpcHandlerError> { if let Some(ref e) = repl.err { return Err(RpcHandlerError::RemoteError(e.clone())); } check_repl_common(&st.instance, repl)?; let inst = &st.instance; let status = inst.get_status(); if status != InstanceStatus::Accepted { return Err(RpcHandlerError::DelayedReply( InstanceStatus::Accepted, status, )); } if st.accepted.insert(from_rid) { } else { return Err(RpcHandlerError::DupRpc( InstanceStatus::Accepted, Direction::Reply, from_rid, st.instance.instance_id.unwrap(), )); } Ok(()) }
use crate::qpaxos::replicate_reply::Phase; use crate::qpaxos::Direction; use crate::qpaxos::ReplicateReply; use crate::qpaxos::*; use crate::replica::*; use crate::replication::RpcHandlerError; pub fn check_repl_common(inst: &Instance, reply: ReplicateReply) -> Result<Phase, RpcHandlerError> { let iid = reply .instance_id .ok_or(ProtocolError::LackOf("instance_id".into()))?; if iid != inst.instance_id.unwrap() { let err = ProtocolError::NotMatch( "instance_id".into(),
pub fn handle_prepare_reply( st: &mut ReplicationStatus, from_rid: ReplicaId, repl: ReplicateReply, ) -> Result<(), RpcHandlerError> { if let Some(ref e) = repl.err { return Err(RpcHandlerError::RemoteError(e.clone())); } let phase = check_repl_common(&st.instance, repl)?; let frepl: PrepareReply = phase .try_into() .or(Err(ProtocolError::LackOf("phase::Prepare".into())))?; let deps = frepl .deps .as_ref() .ok_or(ProtocolError::LackOf("phase::Prepare.deps".into()))?; if frepl.deps_committed.len() < deps.len() { return Err(ProtocolError::Incomplete( "phase::Prepare.deps_committed".into(), deps.len() as i32, frepl.deps_committed.len() as i32, ) .into()); } for (i, d) in deps.iter().enumerate() { let rid = d.replica_id; if !st.prepared.contains_key(&rid) { st.prepared.insert( rid, DepStatus { ..DepStatus::default() }, ); } let pre = st.prepared.get_mut(&rid).unwrap(); if pre.replied.insert(from_rid) { pre.rdeps.push(RepliedDep { idx: d.idx, seq: d.seq, committed: frepl.deps_committed[i], }); } else { return Err(RpcHandlerError::DupRpc( InstanceStatus::Prepared, Direction::Reply, from_rid, st.instance.instance_id.unwrap(), )); } } Ok(()) } pub fn handle_accept_reply( st: &mut ReplicationStatus, from_rid: ReplicaId, repl: ReplicateReply, ) -> Result<(), RpcHandlerError> { if let Some(ref e) = repl.err { return Err(RpcHandlerError::RemoteError(e.clone())); } check_repl_common(&st.instance, repl)?; let inst = &st.instance; let status = inst.get_status(); if status != InstanceStatus::Accepted { return Err(RpcHandlerError::DelayedReply( InstanceStatus::Accepted, status, )); } if st.accepted.insert(from_rid) { } else { return Err(RpcHandlerError::DupRpc( InstanceStatus::Accepted, Direction::Reply, from_rid, st.instance.instance_id.unwrap(), )); } Ok(()) }
format!("{}", inst.instance_id.unwrap()), format!("{}", iid), ); return Err(err.into()); } let phase = reply.phase.ok_or(ProtocolError::LackOf("phase".into()))?; let last_ballot = reply.last_ballot; if inst.ballot < last_ballot { let zero = Some(BallotNum::default()); let err = RpcHandlerError::StaleBallot( inst.ballot.or(zero).unwrap(), last_ballot.or(zero).unwrap(), ); return Err(err); } Ok(phase) }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn test_display_replicate_reply_err() {\n\n let cmn = \"last:None, iid:None, phase\";\n\n\n\n {\n\n // storage error\n\n let r = ReplicateReply {\n\n err: Some(QError {\n\n sto: Some(StorageFailure::default()),\n\n req: None,\n\n }),\n\n ..Default::default()\n\n };\n\n let e = \"{sto:StorageFailure, req:None}\";\n\n\n\n assert_eq!(\n\n format!(\"{{err:{}, {}:{}}}\", e, cmn, \"None\"),\n\n format!(\"{}\", r)\n\n );\n\n }\n\n {\n", "file_path": "components/epaxos/src/qpaxos/test_display.rs", "rank": 3, "score": 116781.67040701021 }, { "content": "#[test]\n\nfn test_handle_prepare_reply_err() {\n\n let inst = inst!((1, 2), (0, _), [(x = \"1\")], [(1, 1)]);\n\n\n\n let cases: Vec<(ReplicateReply, RpcHandlerError)> = vec![\n\n (\n\n frepl!((None, None)),\n\n ProtocolError::LackOf(\"instance_id\".into()).into(),\n\n ),\n\n (\n\n frepl!(((2, 3), None)),\n\n ProtocolError::LackOf(\"instance_id\".into()).into(),\n\n ),\n\n (\n\n frepl!(((2, 3), (1, 2)), None),\n\n ProtocolError::LackOf(\"phase\".into()).into(),\n\n ),\n\n (\n\n ReplicateReply {\n\n last_ballot: blt!((0, 1)),\n\n instance_id: iid!((1, 2)),\n", "file_path": "components/epaxos/src/replication/test_hdlreply.rs", "rank": 4, "score": 116781.67040701021 }, { "content": "pub fn new_replica(\n\n rid: ReplicaId,\n\n group: Vec<ReplicaId>,\n\n peers: Vec<ReplicaPeer>,\n\n sto: Arc<dyn RawKV>,\n\n) -> Replica {\n\n Replica {\n\n replica_id: rid,\n\n group_replica_ids: group,\n\n peers,\n\n storage: Storage::new(rid, sto),\n\n committed_timeout: 1000,\n\n waiting_replies: Mutex::new(HashMap::new()),\n\n }\n\n}\n\n\n\npub struct TestCluster {\n\n pub replicas: Vec<Replica>,\n\n pub txs: Vec<oneshot::Sender<()>>,\n\n pub addrs: Vec<String>,\n", "file_path": "components/epaxos/src/testutil.rs", "rank": 5, "score": 111426.89289774898 }, { "content": "fn _handle_prepare_request(iid: (i64, i64), mut inst: Instance, req_inst: Instance) {\n\n let replica = new_foo_replica(1, new_mem_sto(), &[(iid, inst.clone())]);\n\n\n\n let req = MakeRequest::prepare(1, &req_inst, &vec![false]);\n\n let req: PrepareRequest = req.phase.unwrap().try_into().unwrap();\n\n let _ = replica.handle_prepare(&req, &mut inst);\n\n}\n\n\n", "file_path": "components/epaxos/src/replica/test_replica.rs", "rank": 6, "score": 110261.85857132364 }, { "content": "pub fn handle_replicate_request(\n\n sv: &QPaxosImpl,\n\n req: ReplicateRequest,\n\n) -> Result<ReplicateReply, RpcHandlerError> {\n\n // TODO test replica not found\n\n let rid = req.to_replica_id;\n\n let r = sv.server_data.local_replicas.get(&rid);\n\n let r = r.ok_or(ProtocolError::NoSuchReplica(rid, 0))?;\n\n\n\n r.handle_replicate(req)\n\n}\n", "file_path": "components/epaxos/src/service/service.rs", "rank": 7, "score": 107145.7803738522 }, { "content": "/// init a global log\n\npub fn init_logger() -> io::Result<()> {\n\n // TODO: add log config to init logger\n\n let mut log_path = match env::current_dir() {\n\n Ok(p) => p,\n\n Err(_) => PathBuf::new(),\n\n };\n\n log_path.push(\"cele.log\");\n\n\n\n let file = open_log_file(log_path)?;\n\n\n\n let decorator = slog_term::PlainDecorator::new(file);\n\n let drain = CeleFormat::new(decorator).fuse();\n\n let drain = slog_async::Async::new(drain).build().fuse();\n\n\n\n let logger = slog::Logger::root(drain, slog::o!());\n\n\n\n slog_global::set_global(logger);\n\n\n\n info!(\"logger ready\");\n\n Ok(())\n\n}\n\n\n", "file_path": "src/setup/setup.rs", "rank": 8, "score": 99775.48671600394 }, { "content": "#[test]\n\nfn test_instance_conflict() {\n\n let nx = Command::from((\"NoOp\", \"x\", \"1\"));\n\n let gx = Command::from((\"Get\", \"x\", \"1\"));\n\n let sx = Command::from((\"Set\", \"x\", \"1\"));\n\n\n\n let ny = Command::from((\"NoOp\", \"y\", \"1\"));\n\n let _gy = Command::from((\"Get\", \"y\", \"1\"));\n\n let sy = Command::from((\"Set\", \"y\", \"1\"));\n\n\n\n let nxny = Instance::of(&[nx.clone(), ny.clone()], (0, 0).into(), &[]);\n\n let gxny = Instance::of(&[gx.clone(), ny.clone()], (0, 0).into(), &[]);\n\n let sxny = Instance::of(&[sx.clone(), ny.clone()], (0, 0).into(), &[]);\n\n let sxsy = Instance::of(&[sx.clone(), sy.clone()], (0, 0).into(), &[]);\n\n let gxsy = Instance::of(&[gx.clone(), sy.clone()], (0, 0).into(), &[]);\n\n\n\n assert!(!nxny.conflict(&nxny));\n\n assert!(!nxny.conflict(&gxny));\n\n assert!(!nxny.conflict(&sxny));\n\n assert!(!nxny.conflict(&sxsy));\n\n assert!(!nxny.conflict(&gxsy));\n", "file_path": "components/epaxos/src/qpaxos/test_instance.rs", "rank": 9, "score": 98640.25405319863 }, { "content": "#[test]\n\nfn test_instance_id_from() {\n\n let id = InstanceId {\n\n replica_id: 3,\n\n idx: 4,\n\n };\n\n assert_eq!(id, (3, 4).into());\n\n assert_eq!(id, (3i32, 4i64).into());\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance.rs", "rank": 10, "score": 98640.25405319863 }, { "content": "#[test]\n\nfn test_instance_ids_from() {\n\n let iid = InstanceId::from((1, 2));\n\n\n\n let sl: &[_] = &[iid];\n\n let ids: InstanceIds = sl.into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let ids: InstanceIds = vec![iid].into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let sl: &[_] = &[(1, 2), (3, 4)];\n\n let ids: InstanceIds = sl.into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let sl: &[(i32, i64)] = &[(1, 2), (3, 4)];\n\n let ids: InstanceIds = sl.into();\n\n assert_eq!(2, ids[&1]);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_ids.rs", "rank": 11, "score": 96814.62560629149 }, { "content": "/// wait_for wait for some state becomes valid.\n\n/// wait_for stops is `get_state` returns Ok().\n\n/// wait_for goes on to get new state if `get_state` returns error and\n\n/// `is_retriable_err(err)` is `true`.\n\npub fn wait_for<GET, T, E, ERETRY>(mut get_state: GET, is_retriable_err: ERETRY) -> T\n\nwhere\n\n E: fmt::Debug,\n\n GET: FnMut() -> Result<T, E>,\n\n ERETRY: Fn(&E) -> bool,\n\n{\n\n let millisecond = Duration::from_millis(50);\n\n loop {\n\n let r = get_state();\n\n match r {\n\n Err(err) => {\n\n if is_retriable_err(&err) {\n\n println!(\n\n \"err: {:?} while waiting. retry after {:?}\",\n\n err, millisecond\n\n );\n\n sleep(millisecond);\n\n } else {\n\n panic!(\"Unexpected error: {:?}\", err);\n\n }\n\n }\n\n Ok(x) => {\n\n println!(\"waiting done\");\n\n return x;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "components/epaxos/src/testutil.rs", "rank": 12, "score": 95541.26916838546 }, { "content": "#[test]\n\nfn test_instance_ids_deref() {\n\n let ids = InstanceIds {\n\n ids: hashmap! {\n\n 1 => 2,\n\n 3 => 4,\n\n },\n\n };\n\n\n\n assert_eq!(ids[&1], 2);\n\n assert_eq!(ids[&3], 4);\n\n\n\n let mut ids = InstanceIds { ids: hashmap! {} };\n\n\n\n ids.insert(1, 2);\n\n assert_eq!(ids[&1], 2);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_ids.rs", "rank": 13, "score": 95076.32031970199 }, { "content": "#[test]\n\nfn test_instance_ids_from_array() {\n\n let arr: [i32; 0] = [];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(0, ids.len());\n\n\n\n let arr = [(1, 2)];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let arr = [(1, 2), (3, 4)];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let arr = [(1, 2), (3, 4), (5, 6)];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let arr = [(1, 2), (3, 4), (5, 6), (7, 8)];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(2, ids[&1]);\n", "file_path": "components/epaxos/src/qpaxos/test_instance_ids.rs", "rank": 14, "score": 95076.32031970199 }, { "content": "pub fn quorum(n: i32) -> i32 {\n\n n / 2 + 1\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/quorums.rs", "rank": 15, "score": 94472.30416710296 }, { "content": "#[test]\n\nfn test_instance_after() {\n\n // macro to create an instance with only field `deps`.\n\n #[allow(unused_macros)]\n\n macro_rules! dinst {\n\n [$($deps:tt),*] => {\n\n Instance {\n\n deps: Some($crate::depvec![$($deps),*].into()),\n\n ..Default::default()\n\n }\n\n };\n\n }\n\n\n\n let cases = vec![\n\n (dinst![(1, 1)], dinst![(1, 1)], false),\n\n (dinst![(1, 1)], dinst![(1, 0)], true),\n\n (dinst![(1, 1), (2, 1)], dinst![(1, 1), (2, 1)], false),\n\n (dinst![(1, 1), (2, 1)], dinst![(1, 1), (2, 0)], true),\n\n (\n\n dinst![(1, 1), (2, 1), (3, 1)],\n\n dinst![(1, 1), (2, 1)],\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 16, "score": 94448.46626135618 }, { "content": "pub fn test_objectkv_trait(eng: &Storage) {\n\n let noninst: Option<TestInstance> = eng.get(DBColumnFamily::Status, &TestId { id: 0 }).unwrap();\n\n assert_eq!(None, noninst);\n\n\n\n let inst = new_inst();\n\n eng.set(DBColumnFamily::Status, &TestId { id: 0 }, &inst)\n\n .unwrap();\n\n\n\n let got: Option<TestInstance> = eng.get(DBColumnFamily::Status, &TestId { id: 0 }).unwrap();\n\n assert_eq!(Some(inst), got);\n\n}\n\n\n", "file_path": "components/storage/src/test_storage.rs", "rank": 17, "score": 93904.65675159768 }, { "content": "#[test]\n\nfn test_instance_ids_cmp_inst() {\n\n let ids = InstanceIds {\n\n ids: hashmap! {\n\n 1 => 2,\n\n 3 => 4,\n\n },\n\n };\n\n\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&(1, 3).into()));\n\n assert_eq!(Some(Ordering::Equal), ids.partial_cmp(&(1, 2).into()));\n\n assert_eq!(Some(Ordering::Greater), ids.partial_cmp(&(1, 1).into()));\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&(3, 5).into()));\n\n assert_eq!(Some(Ordering::Equal), ids.partial_cmp(&(3, 4).into()));\n\n assert_eq!(Some(Ordering::Greater), ids.partial_cmp(&(3, 3).into()));\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&(2, 1).into()));\n\n\n\n assert!(ids < (1, 3).into());\n\n assert!(ids > (1, 1).into());\n\n assert!(ids == InstanceId::from((1, 2)));\n\n\n\n // Absent replica-id always results in Less\n\n assert!(ids < (2, 1).into());\n\n assert!(ids <= (2, 1).into());\n\n\n\n assert!(!(ids == InstanceId::from((2, 2))));\n\n assert!(ids != InstanceId::from((2, 2)));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_ids.rs", "rank": 18, "score": 93419.00111002744 }, { "content": "#[test]\n\nfn test_instance_ids_from_array_ref() {\n\n let arr: &[i32; 0] = &[];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(0, ids.len());\n\n\n\n let arr = &[(1, 2)];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let arr = &[(1, 2), (3, 4)];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let arr = &[(1, 2), (3, 4), (5, 6)];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(2, ids[&1]);\n\n\n\n let arr = &[(1, 2), (3, 4), (5, 6), (7, 8)];\n\n let ids: InstanceIds = arr.into();\n\n assert_eq!(2, ids[&1]);\n", "file_path": "components/epaxos/src/qpaxos/test_instance_ids.rs", "rank": 19, "score": 93419.00111002744 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_instance_ids_index_panic() {\n\n let ids = InstanceIds {\n\n ids: hashmap! {\n\n 1 => 2,\n\n 3 => 4,\n\n },\n\n };\n\n\n\n let _ = ids[&2];\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_ids.rs", "rank": 20, "score": 93419.00111002744 }, { "content": "#[test]\n\nfn test_instance_id_as_key() {\n\n let cases = vec![(instid!(1, 2), \"0000000000000001/0000000000000002\")];\n\n for (iid, want) in cases.iter() {\n\n let key = iid.into_key();\n\n assert_eq!(want.as_bytes(), key.as_slice());\n\n assert_eq!(33, iid.key_len());\n\n\n\n let iidgot = InstanceId::from_key(&key);\n\n assert_eq!(*iid, iidgot);\n\n }\n\n}\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_as_key.rs", "rank": 21, "score": 93419.00111002744 }, { "content": "#[test]\n\nfn test_instance_id_vec_from() {\n\n let iid = InstanceId::from((1, 2));\n\n\n\n let sl: &[_] = &[iid];\n\n let ids: InstanceIdVec = sl.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let ids: InstanceIdVec = vec![iid].into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let sl: &[_] = &[(1, 2), (3, 4)];\n\n let ids: InstanceIdVec = sl.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let sl: &[(i32, i64)] = &[(1, 2), (3, 4)];\n\n let ids: InstanceIdVec = sl.into();\n\n assert_eq!(iid, ids[1]);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 22, "score": 93419.00111002744 }, { "content": "/// get_slowpath_dep returns the dep for accept-request if a quorum of replies received.\n\n/// Otherwise it returns None.\n\n/// It always choose an as low instance as possible to reduce conflict.\n\n///\n\n/// E.g. If n=5 and quorum is 3 then if the leader `L` accumulated: [a, a, a, b].\n\n/// Then we could just choose `a` instead of `b`.\n\n/// Although in original paper it specifies the deps for accept is the union of all replied deps.\n\n/// If the leader saw 3 `a`, it means leader of `b` did not commit when `L` initiated.\n\n/// Thus `L` does not have to be after `b`.\n\n///\n\n/// It contains the initial dep at the 0-th slot, and updated deps from 1-th slot.\n\n/// `rdeps` in Accept Request is the union of `rdeps` replied in fast-accept phase.\n\n///\n\n/// `rdeps` must be sorted.\n\npub fn get_slowpath_dep(rid: ReplicaId, rdeps: &Vec<RepliedDep>, quorum: i32) -> Option<Dep> {\n\n let quorum = quorum as usize;\n\n\n\n // the first elt in rdeps is the initial dep.\n\n\n\n let n = rdeps.len();\n\n assert!(n > 0);\n\n assert!(quorum > 0);\n\n assert!(rdeps.is_sorted());\n\n\n\n if n < quorum {\n\n return None;\n\n }\n\n\n\n let d = &rdeps[quorum - 1];\n\n return Some(Dep {\n\n replica_id: rid,\n\n idx: d.idx,\n\n seq: d.seq,\n\n });\n\n}\n", "file_path": "components/epaxos/src/replica/status.rs", "rank": 23, "score": 92926.67112606202 }, { "content": "pub fn fast_quorum(n: i32) -> i32 {\n\n let q = n / 2 + 1;\n\n let f = (n - 1) / 2;\n\n let fq = f + q / 2;\n\n // Except f + q/2, fast_quorum must satisfy another condition:\n\n // two fast_quorum must have intersection.\n\n if fq < q {\n\n q\n\n } else {\n\n fq\n\n }\n\n}\n", "file_path": "components/epaxos/src/qpaxos/quorums.rs", "rank": 24, "score": 92518.78051316968 }, { "content": "#[test]\n\nfn test_instance_protobuf() {\n\n let inst_id1 = (1, 10).into();\n\n let inst_id2 = (2, 20).into();\n\n let inst_id3 = (3, 30).into();\n\n let deps = vec![inst_id1, inst_id2, inst_id3];\n\n\n\n let cmds = cmdvec![(\"NoOp\", \"k1\", \"v1\"), (\"Get\", \"k2\", \"v2\")];\n\n let ballot = (2, 3).into();\n\n\n\n let inst1 = Instance::of(&cmds[..], ballot, &deps[..]);\n\n\n\n test_enc_dec!(inst1, Instance);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 25, "score": 92107.75302821131 }, { "content": "#[test]\n\nfn test_instance_id_vec_get() {\n\n let ids = InstanceIdVec {\n\n ids: vec![(1, 2).into(), (3, 4).into()],\n\n };\n\n\n\n assert_eq!(ids.ids[0], ids.get(1).unwrap());\n\n assert_eq!(ids.ids[1], ids.get(3).unwrap());\n\n assert_eq!(None, ids.get(2));\n\n\n\n let refids = &ids;\n\n assert_eq!(ids.ids[0], refids.get(1).unwrap());\n\n assert_eq!(ids.ids[1], refids.get(3).unwrap());\n\n assert_eq!(None, ids.get(2));\n\n\n\n let sm = Some(ids.clone());\n\n let refids = sm.as_ref().unwrap();\n\n\n\n assert_eq!(ids.ids[0], refids.get(1i64).unwrap());\n\n assert_eq!(ids.ids[1], refids.get(3).unwrap());\n\n assert_eq!(None, refids.get(2));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 26, "score": 91836.94751612925 }, { "content": "#[test]\n\nfn test_instance_id_vec_with_dup() {\n\n let ids = InstanceIdVec {\n\n ids: vec![(1, 2).into(), (3, 4).into(), (1, 100).into()],\n\n };\n\n\n\n assert_eq!(ids.ids[0], ids.get(1).unwrap());\n\n assert_eq!(ids.ids[1], ids.get(3).unwrap());\n\n assert_eq!(None, ids.get(2));\n\n\n\n assert_eq!(ids.ids[0], ids[1]);\n\n assert_eq!(ids.ids[1], ids[3]);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 27, "score": 91836.94751612925 }, { "content": "#[test]\n\nfn test_instance_id_vec_deref() {\n\n let ids = InstanceIdVec {\n\n ids: vec![(1, 2).into(), (3, 4).into()],\n\n };\n\n\n\n let mut it = ids.iter();\n\n assert_eq!(&ids.ids[0], it.next().unwrap());\n\n assert_eq!(&ids.ids[1], it.next().unwrap());\n\n assert_eq!(None, it.next());\n\n\n\n let mut ids = InstanceIdVec {\n\n ids: vec![(1, 2).into(), (3, 4).into()],\n\n };\n\n\n\n let mut it = ids.iter_mut();\n\n assert_eq!(&InstanceId::from((1, 2)), it.next().unwrap());\n\n assert_eq!(&InstanceId::from((3, 4)), it.next().unwrap());\n\n assert_eq!(None, it.next());\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 28, "score": 91836.94751612925 }, { "content": "#[test]\n\nfn test_instance_id_vec_set() {\n\n let id01 = InstanceId::from((0, 1));\n\n let id12 = InstanceId::from((1, 2));\n\n let id13 = InstanceId::from((1, 3));\n\n let id34 = InstanceId::from((3, 4));\n\n let id56 = InstanceId::from((5, 6));\n\n\n\n let mut ids = InstanceIdVec {\n\n ids: vec![id12, id34],\n\n };\n\n\n\n let r = ids.set((1, 3).into());\n\n assert_eq!((0, Some(id12)), r);\n\n assert_eq!(id13, ids.get(1).unwrap());\n\n\n\n // set a same instanceId twice\n\n let r = ids.set((1, 3).into());\n\n assert_eq!((0, Some(id13)), r);\n\n assert_eq!(id13, ids.get(1).unwrap());\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 29, "score": 91836.94751612925 }, { "content": "#[test]\n\nfn test_instance_id_vec_from_array() {\n\n let iid = InstanceId::from((1, 2));\n\n\n\n let arr: [i32; 0] = [];\n\n let ids: InstanceIdVec = arr.into();\n\n assert_eq!(0, ids.len());\n\n\n\n let arr = [(1, 2)];\n\n let ids: InstanceIdVec = arr.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let arr = [(1, 2), (3, 4)];\n\n let ids: InstanceIdVec = arr.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let arr = [(1, 2), (3, 4), (5, 6)];\n\n let ids: InstanceIdVec = arr.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let arr = [(1, 2), (3, 4), (5, 6), (7, 8)];\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 30, "score": 91836.94751612925 }, { "content": "#[test]\n\nfn test_instance_id_vec_index() {\n\n let ids = InstanceIdVec {\n\n ids: vec![(1, 2).into(), (3, 4).into()],\n\n };\n\n\n\n assert_eq!(ids.ids[0], ids[1]);\n\n assert_eq!(ids.ids[1], ids[3]);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 31, "score": 91836.94751612925 }, { "content": "/// `get_fastpath_dep` finds out the safe dependency by a leader for fast commit.\n\n///\n\n/// `rdeps`: replied dependent instances proposed by replica `rid`.\n\n/// `rdeps` must be sorted.\n\n///\n\n/// The conditions of fast-commit are:\n\n/// - the number of identical deps is at least fast-quorum,\n\n/// - and this dep is committed.\n\n///\n\n/// If there is no safe dep for fast-commit, it returns None.\n\npub fn get_fastpath_dep(rid: ReplicaId, rdeps: &Vec<RepliedDep>, fast_quorum: i32) -> Option<Dep> {\n\n let fast_quorum = fast_quorum as usize;\n\n let n = rdeps.len();\n\n\n\n assert!(n > 0);\n\n assert!(fast_quorum > 0);\n\n assert!(rdeps.is_sorted());\n\n\n\n if n < fast_quorum {\n\n return None;\n\n }\n\n\n\n // In a sorted vec, that i-th elt equals the (i+k)-th elt implies there are at least k+1 equal elts.\n\n for i in 0..=(n - fast_quorum) {\n\n let left = &rdeps[i];\n\n\n\n for j in (i + fast_quorum - 1)..n {\n\n let right = &rdeps[j];\n\n\n\n // TODO: add proof of it: equals to initial value does not need to be committed.\n", "file_path": "components/epaxos/src/replica/status.rs", "rank": 32, "score": 91246.94886092654 }, { "content": "pub fn new_sto(rid: ReplicaId) -> Storage {\n\n let eng = MemEngine::new().unwrap();\n\n Storage::new(rid, Arc::new(eng))\n\n}\n\n\n", "file_path": "components/epaxos/src/testutil.rs", "rank": 33, "score": 90675.83005721866 }, { "content": "#[test]\n\nfn test_replicate_reply_pb() {\n\n let reply = ReplicateReply {\n\n err: None,\n\n last_ballot: Some((2, 3).into()),\n\n instance_id: Some(instid!(1, 2)),\n\n phase: Some(\n\n PrepareReply {\n\n deps: Some(instidvec![(1, 2), (3, 4)].into()),\n\n deps_committed: vec![true],\n\n }\n\n .into(),\n\n ),\n\n };\n\n\n\n test_enc_dec!(reply, ReplicateReply);\n\n}\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 34, "score": 90364.23387459533 }, { "content": "#[test]\n\n#[should_panic(expected = \"NotFound instance_id with replica_id=2\")]\n\nfn test_instance_id_vec_index_panic() {\n\n let ids = InstanceIdVec {\n\n ids: vec![(1, 2).into(), (3, 4).into()],\n\n };\n\n\n\n let _ = ids[2];\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 35, "score": 90329.15293096955 }, { "content": "#[test]\n\nfn test_instance_id_vec_cmd_inst() {\n\n let id12 = InstanceId::from((1, 2));\n\n let id34 = InstanceId::from((3, 4));\n\n\n\n let ids = InstanceIdVec {\n\n ids: vec![id12, id34],\n\n };\n\n\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&(1, 3).into()));\n\n assert_eq!(Some(Ordering::Equal), ids.partial_cmp(&(1, 2).into()));\n\n assert_eq!(Some(Ordering::Greater), ids.partial_cmp(&(1, 1).into()));\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&(3, 5).into()));\n\n assert_eq!(Some(Ordering::Equal), ids.partial_cmp(&(3, 4).into()));\n\n assert_eq!(Some(Ordering::Greater), ids.partial_cmp(&(3, 3).into()));\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&(2, 1).into()));\n\n\n\n assert!(ids < (1, 3).into());\n\n assert!(ids > (1, 1).into());\n\n assert!(ids == InstanceId::from((1, 2)));\n\n\n\n // Absent replica-id always results in Less\n\n assert!(ids < (2, 1).into());\n\n assert!(ids <= (2, 1).into());\n\n\n\n assert!(!(ids == InstanceId::from((2, 2))));\n\n assert!(ids != InstanceId::from((2, 2)));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 36, "score": 90324.98073992998 }, { "content": "#[test]\n\nfn test_instance_id_vec_from_array_ref() {\n\n let iid = InstanceId::from((1, 2));\n\n\n\n let arr: &[i32; 0] = &[];\n\n let ids: InstanceIdVec = arr.into();\n\n assert_eq!(0, ids.len());\n\n\n\n let arr = &[(1, 2)];\n\n let ids: InstanceIdVec = arr.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let arr = &[(1, 2), (3, 4)];\n\n let ids: InstanceIdVec = arr.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let arr = &[(1, 2), (3, 4), (5, 6)];\n\n let ids: InstanceIdVec = arr.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let arr = &[(1, 2), (3, 4), (5, 6), (7, 8)];\n", "file_path": "components/epaxos/src/qpaxos/test_instance_id_vec.rs", "rank": 37, "score": 90324.98073992998 }, { "content": "#[test]\n\nfn test_cmp_instance_id() {\n\n let cases = vec![\n\n ((1, 10), (1, 10), \"=\"),\n\n ((1, 10), (1, 10), \"<=\"),\n\n ((1, 10), (1, 10), \">=\"),\n\n ((2, 10), (1, 10), \">\"),\n\n ((2, 11), (1, 10), \">\"),\n\n ((1, 10), (1, 11), \"<\"),\n\n ((1, 10), (2, 10), \"<\"),\n\n ((1, 10), (2, 12), \"<\"),\n\n ];\n\n\n\n for (t1, t2, op) in cases {\n\n let i1: InstanceId = t1.into();\n\n let i2: InstanceId = t2.into();\n\n match op {\n\n \"=\" => assert_eq!(i1 == i2, true),\n\n \"<=\" => assert_eq!(i1 <= i2, true),\n\n \">=\" => assert_eq!(i1 >= i2, true),\n\n \"<\" => assert_eq!(i1 < i2, true),\n\n \">\" => assert_eq!(i1 > i2, true),\n\n _ => {\n\n assert!(false, format!(\"Unknown op: {}\", op));\n\n }\n\n };\n\n }\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 38, "score": 89911.82101147942 }, { "content": "#[test]\n\nfn test_instance_id_to_key() {\n\n let k = InstanceId::from((1, 10)).into_key();\n\n assert_eq!(\n\n \"0000000000000001/000000000000000a\",\n\n str::from_utf8(&k).unwrap()\n\n );\n\n\n\n let k = InstanceId::from((-1, 0)).into_key();\n\n assert_eq!(\n\n \"ffffffffffffffff/0000000000000000\",\n\n str::from_utf8(&k).unwrap()\n\n );\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 39, "score": 89911.82101147942 }, { "content": "/// new_cluster creates a ClusterInfo with predefined config specified by `name`.\n\n/// Such a cluster is only meant for test.\n\n/// Available names are:\n\n/// az_1: to create a cluster with 1 group of replica 1 covers key from `[a, z)`.\n\n/// az_3: to create a cluster with 1 group of replica 1, 2, 3 covers key from `[a, z)`.\n\npub fn new_cluster(name: &str) -> ClusterInfo {\n\n let yaml = LOCAL_CLUSTERS[name];\n\n ClusterInfo::from_str(yaml).unwrap()\n\n}\n\n\n", "file_path": "components/epaxos/src/testutil_cluster.rs", "rank": 40, "score": 88934.32319223929 }, { "content": "#[test]\n\nfn test_macro_inst() {\n\n let mut want = Instance {\n\n instance_id: Some((1, 2).into()),\n\n ballot: Some((4, 1).into()),\n\n cmds: vec![(\"Set\", \"x\", \"y\").into(), (\"Get\", \"a\", \"\").into()],\n\n deps: Some(Deps {\n\n dep_vec: vec![(11, 12).into(), (13, 14).into()],\n\n }),\n\n vballot: None,\n\n committed: false,\n\n };\n\n\n\n // only initial_deps\n\n assert_eq!(\n\n want,\n\n inst!((1, 2), (4, _), [(x = y), (a)], [(11, 12), (13, 14)])\n\n );\n\n\n\n // deps\n\n want.deps = Some(instidvec![(10, 0), (11, 12)].into());\n", "file_path": "components/epaxos/src/qpaxos/test_instance.rs", "rank": 41, "score": 87847.64035293384 }, { "content": "#[test]\n\nfn test_storage_instance() {\n\n let (s1, s2) = new_two_sto(1, 2);\n\n\n\n // with namespace, operation does not affect each other\n\n for sto in vec![s1, s2].iter() {\n\n let noninst = sto.get_instance(&instid!(1, 2)).unwrap();\n\n assert_eq!(None, noninst);\n\n\n\n let inst = inst!((1, 2), (3, _), [(x = y)]);\n\n sto.set_instance(&inst.instance_id.unwrap(), &inst).unwrap();\n\n\n\n let got = sto.get_instance(&instid!(1, 2)).unwrap();\n\n assert_eq!(Some(inst), got);\n\n }\n\n}\n\n\n", "file_path": "components/epaxos/src/test_qpaxos_storage.rs", "rank": 42, "score": 87847.64035293384 }, { "content": "#[test]\n\nfn test_new_instance() {\n\n let rid1 = 1;\n\n\n\n let cmds = cmdvec![(\"Set\", \"x\", \"1\")];\n\n let eng = new_mem_sto();\n\n {\n\n // initial\n\n let r1 = new_foo_replica(rid1, eng.clone(), &[]);\n\n // R1: (1, 0) -> []\n\n let i10 = r1.new_instance(&cmds).unwrap();\n\n assert_eq!(\n\n i10,\n\n inst!((rid1, 0), (0, _), [(x = \"1\")], (0, [-1, -1, -1]))\n\n );\n\n assert_eq!(\n\n i10,\n\n r1.storage.get_instance(&(rid1, 0).into()).unwrap().unwrap()\n\n );\n\n }\n\n {\n", "file_path": "components/epaxos/src/replica/test_replica.rs", "rank": 43, "score": 87847.64035293384 }, { "content": "#[test]\n\nfn test_instance_iter() {\n\n let mut ints = Vec::<Instance>::new();\n\n let (sto, s2) = new_two_sto();\n\n\n\n for rid in 0..3 {\n\n for idx in 0..10 {\n\n let iid = InstanceId::from((rid, idx));\n\n\n\n let cmds = vec![Command::from((\n\n OpCode::NoOp,\n\n format!(\"k1{:}\", rid * idx).as_bytes(),\n\n format!(\"v1{:}\", rid * idx).as_bytes(),\n\n ))];\n\n\n\n let ballot = (idx as i32, 0).into();\n\n let depvec = vec![Dep::from((rid + 1, idx + 1))];\n\n let mut inst = Instance::of(&cmds[..], ballot, &depvec[..]);\n\n inst.instance_id = Some(iid);\n\n\n\n let _ = sto.set_instance(&inst.instance_id.unwrap(), &inst).unwrap();\n", "file_path": "components/epaxos/src/iters/test_iters.rs", "rank": 44, "score": 87847.64035293384 }, { "content": "#[test]\n\nfn test_display_instance() {\n\n let inst = inst!(\n\n (1, 2),\n\n (3, 4),\n\n [(a = b), (c)],\n\n [(3, 4), (4, 5)],\n\n (6, 7),\n\n false,\n\n );\n\n assert_eq!(\"{id:(1, 2), blt:(3, 4), ablt:(6, 7), cmds:[Set:a=b, Get:c], deps:[(3, 4, 0), (4, 5, 0)], c:false}\",\n\n format!(\"{}\", inst));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_display.rs", "rank": 45, "score": 87847.64035293384 }, { "content": "#[test]\n\nfn test_macro_ballot() {\n\n let b = ballot!(2, 3);\n\n\n\n assert_eq!(BallotNum::from((2, 3)), b);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance.rs", "rank": 46, "score": 87847.64035293384 }, { "content": "#[test]\n\nfn test_macro_instid() {\n\n let id = instid!(1, 2);\n\n\n\n assert_eq!(InstanceId::from((1, 2)), id);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance.rs", "rank": 47, "score": 87847.64035293384 }, { "content": "#[test]\n\nfn test_macro_instids() {\n\n let ids = instidvec![(1, 2), (3i32, 4i64)];\n\n\n\n assert_eq!(InstanceId::from((1, 2)), ids[0]);\n\n assert_eq!(InstanceId::from((3, 4)), ids[1]);\n\n assert_eq!(2, ids.len());\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_instance.rs", "rank": 48, "score": 87847.64035293384 }, { "content": "#[test]\n\n#[should_panic(expected = \"idx can not be less than 0:-1\")]\n\nfn test_instance_id_to_key_negative() {\n\n InstanceId::from((1, -1)).into_key();\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 49, "score": 87847.64035293384 }, { "content": "pub fn test_base_trait(eng: &dyn RawKV) {\n\n let none = eng\n\n .next_raw(\n\n DBColumnFamily::Record,\n\n &\"init\".as_bytes().to_vec(),\n\n true,\n\n true,\n\n )\n\n .unwrap();\n\n assert_eq!(none, None);\n\n let none = eng\n\n .next_raw(\n\n DBColumnFamily::Instance,\n\n &\"init\".as_bytes().to_vec(),\n\n true,\n\n true,\n\n )\n\n .unwrap();\n\n assert_eq!(none, None);\n\n\n", "file_path": "components/storage/src/test_storage.rs", "rank": 50, "score": 87286.10845315513 }, { "content": "fn new_foo_inst() -> Instance {\n\n inst!((1, 10), (0, _), [(), (k2 = v2)], [(2, 10)])\n\n}\n\n\n\n// TODO test to_replica_id\n\n\n\nmacro_rules! test_request_common {\n\n ($msg:ident, $inst:ident, $to_rid:expr) => {\n\n assert_eq!($inst.ballot, $msg.ballot);\n\n assert_eq!($inst.instance_id, $msg.instance_id);\n\n assert_eq!($to_rid, $msg.to_replica_id);\n\n };\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 51, "score": 86658.01664983058 }, { "content": "#[test]\n\nfn test_handle_prepare_reply() {\n\n let inst = inst!((1, 2), (0, _), [(x = \"1\")], []);\n\n let mut st = ReplicationStatus::new(3, inst.clone());\n\n\n\n {\n\n // positive reply updates the Status.\n\n let repl: ReplicateReply = frepl!(((0, 1), (1, 2)), ([(1, 2), (2, 3)], vec![false, true]));\n\n let from_rid = 5;\n\n\n\n let r = handle_prepare_reply(&mut st, from_rid, repl.clone());\n\n assert_eq!(r.unwrap(), ());\n\n assert!(st.prepared[&1].replied.contains(&from_rid));\n\n\n\n assert_eq!(\n\n st.prepared[&1].rdeps,\n\n vec![RepliedDep {\n\n idx: 2,\n\n seq: 0,\n\n committed: false\n\n }]\n", "file_path": "components/epaxos/src/replication/test_hdlreply.rs", "rank": 52, "score": 86329.7587868776 }, { "content": "#[test]\n\nfn test_handle_accept_reply() {\n\n let replica_id = 2;\n\n let rp = testutil::new_replica(\n\n replica_id,\n\n vec![0, 1, 2],\n\n vec![],\n\n Arc::new(MemEngine::new().unwrap()),\n\n );\n\n\n\n let mut inst = inst!((1, 2), (0, _), [(x = \"1\")], []);\n\n inst.deps = Some(depvec![].into());\n\n rp.storage\n\n .set_instance(&inst.instance_id.unwrap(), &inst)\n\n .unwrap();\n\n let n = rp.group_replica_ids.len() as i32;\n\n\n\n {\n\n // with high ballot num\n\n let mut st = ReplicationStatus::new(n, inst.clone());\n\n st.start_accept();\n", "file_path": "components/epaxos/src/replication/test_hdlreply.rs", "rank": 53, "score": 86329.7587868776 }, { "content": "fn execute_error_to_str(err: &ExecutionError) -> &'static str {\n\n match err {\n\n ExecutionError::Timeout => \"Timeout(timeout)\",\n\n ExecutionError::Uninitialized => \"InternalErr(uninitalized)\",\n\n ExecutionError::Disconnected => \"InternalErr(disconnected)\",\n\n ExecutionError::PoolPoisoned => \"InternalErr(poolposioned)\",\n\n }\n\n}\n\n\n", "file_path": "components/cele_threads/src/cele_threads.rs", "rank": 54, "score": 86185.80927957041 }, { "content": "#[test]\n\nfn test_macro_inst_all_arg() {\n\n let want = Instance {\n\n instance_id: Some((1, 2).into()),\n\n ballot: Some((4, 2).into()),\n\n cmds: vec![(\"Set\", \"x\", \"y\").into(), (\"Get\", \"a\", \"\").into()],\n\n deps: Some(Deps {\n\n dep_vec: vec![(12, 13).into(), (14, 15).into()],\n\n }),\n\n vballot: Some((2, 3).into()),\n\n committed: true,\n\n };\n\n\n\n assert_eq!(\n\n want,\n\n inst!(\n\n (1, 2),\n\n (4, 2),\n\n [(\"Set\", \"x\", \"y\"), (\"Get\", \"a\", \"\")],\n\n [(12, 13), (14, 15)],\n\n (2, 3),\n", "file_path": "components/epaxos/src/qpaxos/test_instance.rs", "rank": 55, "score": 85903.69918504314 }, { "content": "#[test]\n\nfn test_find_missing_instances() {\n\n let rp = new_replica();\n\n\n\n let cases1 = [\n\n (vec![inst!((1, 2), deps:[(1, 1)])], instidvec![(1, 1)]),\n\n // R1 R2 R3\n\n // | | |\n\n // | 2(Committed)-. 10(Executed)\n\n // | | ↘ |\n\n // 2(Committed)--.---------------. 5\n\n // | ↘ | ↘ |\n\n // 1(Executed) 1(Executed) 3\n\n // | | |\n\n (\n\n vec![\n\n inst!((1, 2), deps:(1, [1, 1, 3])),\n\n inst!((2, 2), deps:(1, [1, 1, 5])),\n\n ],\n\n instidvec![(1, 1), (2, 1), (3, 10)],\n\n ),\n", "file_path": "components/epaxos/src/replica/test_exec.rs", "rank": 56, "score": 85903.69918504314 }, { "content": "#[test]\n\nfn test_display_instance_ids() {\n\n assert_eq!(\"{1:2, 3:4}\", format!(\"{}\", instids! {(1,2),(3,4)}));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_display.rs", "rank": 57, "score": 85903.69918504314 }, { "content": "#[test]\n\nfn test_macro_instance_fields() {\n\n // instance_id\n\n\n\n let smiid: Option<InstanceId> = Some(InstanceId::from((1, 2)));\n\n assert_eq!(\n\n Option::<InstanceId>::None,\n\n __instance_fields!(instance_id, None)\n\n );\n\n assert_eq!(smiid, __instance_fields!(instance_id, (1, 2)));\n\n assert_eq!(\n\n smiid,\n\n __instance_fields!(instance_id, InstanceId::from((1, 2)))\n\n );\n\n\n\n // deps\n\n let smdeps: Option<Deps> = Some(Deps::from(depvec![(1, 2), (3, 4)]));\n\n assert_eq!(smdeps, __instance_fields!(deps, [(1, 2), (3, 4)]));\n\n assert_eq!(Option::<Deps>::None, __instance_fields!(deps, None));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_macros.rs", "rank": 58, "score": 85903.69918504314 }, { "content": "#[test]\n\nfn test_display_instance_id() {\n\n assert_eq!(\n\n \"(1, 2)\",\n\n format!(\n\n \"{}\",\n\n InstanceId {\n\n replica_id: 1,\n\n idx: 2\n\n }\n\n )\n\n );\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_display.rs", "rank": 59, "score": 85903.69918504314 }, { "content": "/// new_inmem_server_data creates a ServerData with predefined config specified by `name`. See\n\n/// ClusterInfo::new_predefined.\n\n///\n\n/// Such a cluster is only meant for test because it use a in-memory storage.\n\npub fn new_inmem_server_data(name: &str) -> ServerData {\n\n let ci = new_cluster(name);\n\n\n\n let sto = MemEngine::new().unwrap();\n\n let sto = Arc::new(sto);\n\n\n\n let node_ids: Vec<NodeId> = ci.nodes.keys().cloned().collect();\n\n let node_id: NodeId = node_ids[0].clone();\n\n\n\n ServerData::new(sto, ci, node_id)\n\n}\n", "file_path": "components/epaxos/src/testutil_cluster.rs", "rank": 60, "score": 85727.58255700406 }, { "content": "fn new_inst() -> TestInstance {\n\n TestInstance { id: 0, foo: 1 }\n\n}\n\n\n", "file_path": "components/storage/src/test_storage.rs", "rank": 61, "score": 84593.835991285 }, { "content": "#[test]\n\nfn test_display_replicate_reply_normal() {\n\n let cmn = \"last:(3, 4), iid:(1, 2), phase\";\n\n\n\n let mut r = ReplicateReply {\n\n err: None,\n\n last_ballot: Some((3, 4).into()),\n\n instance_id: Some((1, 2).into()),\n\n phase: None,\n\n };\n\n\n\n {\n\n r.phase = Some(replicate_reply::Phase::Prepare(PrepareReply {\n\n deps: Some(instidvec![(1, 2), (3, 4)].into()),\n\n deps_committed: vec![true, false],\n\n }));\n\n let ph = \"Prepare{deps[1]:[(1, 2, 0), (3, 4, 0)], c:[true, false]}\";\n\n\n\n assert_eq!(format!(\"{{err:None, {}:{}}}\", cmn, ph), format!(\"{}\", r));\n\n }\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_display.rs", "rank": 62, "score": 84483.79053315394 }, { "content": "#[test]\n\nfn test_deps_cmp_instance_id() {\n\n let id12 = Dep::from((1, 2));\n\n let id34 = Dep::from((3, 4));\n\n\n\n let ids = Deps {\n\n dep_vec: vec![id12, id34],\n\n };\n\n\n\n // TODO compare with instanceId\n\n\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&instid!(1, 3)));\n\n assert_eq!(Some(Ordering::Equal), ids.partial_cmp(&instid!(1, 2)));\n\n assert_eq!(Some(Ordering::Greater), ids.partial_cmp(&instid!(1, 1)));\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&instid!(3, 5)));\n\n assert_eq!(Some(Ordering::Equal), ids.partial_cmp(&instid!(3, 4)));\n\n assert_eq!(Some(Ordering::Greater), ids.partial_cmp(&instid!(3, 3)));\n\n assert_eq!(Some(Ordering::Less), ids.partial_cmp(&instid!(2, 1)));\n\n\n\n assert!(ids < instid!(1, 3));\n\n assert!(ids > instid!(1, 1));\n\n assert!(ids == InstanceId::from((1, 2)));\n\n\n\n // Absent replica-id always results in Less\n\n assert!(ids < instid!(2, 1));\n\n assert!(ids <= instid!(2, 1));\n\n\n\n assert!(!(ids == InstanceId::from((2, 2))));\n\n assert!(ids != InstanceId::from((2, 2)));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_deps.rs", "rank": 63, "score": 84069.78882798507 }, { "content": "#[test]\n\nfn test_get_max_instance_ids() {\n\n let (i12, i13, i34) = (inst!((1, 2)), inst!((1, 3)), inst!((3, 4)));\n\n\n\n let insts = vec![((1, 2), i12), ((1, 3), i13), ((3, 4), i34)];\n\n\n\n let r = new_foo_replica(3, new_mem_sto(), &insts);\n\n let maxs = r.get_max_instance_ids(&[1, 3, 5]);\n\n assert_eq!(\n\n maxs,\n\n InstanceIdVec::from(instidvec![(1, 3), (3, 4), (5, -1)])\n\n );\n\n}\n\n\n", "file_path": "components/epaxos/src/replica/test_replica.rs", "rank": 64, "score": 84069.78882798507 }, { "content": "#[test]\n\nfn test_display_instance_id_vec() {\n\n assert_eq!(\n\n \"[(1, 2), (3, 4)]\",\n\n format!(\"{}\", InstanceIdVec::from(instidvec![(1, 2), (3, 4)]))\n\n );\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_display.rs", "rank": 65, "score": 84069.78882798507 }, { "content": "fn assert_inst_iter(it: InstanceIter, want: &[&Instance]) {\n\n let mut n = 0;\n\n\n\n for act_inst in it {\n\n assert_eq!(act_inst.cmds, want[n].cmds);\n\n assert_eq!(act_inst.ballot, want[n].ballot);\n\n\n\n assert_eq!(act_inst.instance_id, want[n].instance_id);\n\n\n\n n = n + 1;\n\n }\n\n\n\n assert_eq!(want.len(), n);\n\n}\n", "file_path": "components/epaxos/src/iters/test_iters.rs", "rank": 66, "score": 82937.96582926545 }, { "content": "pub fn open(path: &str) -> Result<DB, String> {\n\n let mut db_opt = DBOptions::new();\n\n\n\n let cfs_opts = get_all_cfs_opts();\n\n\n\n let mut exist_cfs_opts = vec![];\n\n let mut new_cfs_opts = vec![];\n\n\n\n if !db_exists(path)? {\n\n db_opt.create_if_missing(true);\n\n\n\n for x in cfs_opts {\n\n // TODO: after renaming DBColumnFamily::Default to Record, it is not a default cf thus not a existent cf.\n\n // if x.cf == DBColumnFamily::Record {\n\n // exist_cfs_opts.push(CFOptions::new(x.cf, x.options.clone()));\n\n // } else {\n\n new_cfs_opts.push(CFOptions::new(x.cf, x.options.clone()));\n\n // }\n\n }\n\n\n", "file_path": "components/storage/src/rocks_engine/rocks.rs", "rank": 67, "score": 82515.14787068685 }, { "content": "#[test]\n\n#[should_panic(expected = \"inst.instance_id is unexpected to be None\")]\n\nfn test_handle_prepare_request_panic_local_instance_id_none() {\n\n let inst = foo_inst!(None, [(2, 0)]);\n\n let req_inst = foo_inst!((1, 0), [(0, 0)]);\n\n\n\n _handle_prepare_request((0, 0), inst, req_inst);\n\n}\n\n\n", "file_path": "components/epaxos/src/replica/test_replica.rs", "rank": 68, "score": 77670.78901530209 }, { "content": "pub fn new_two_sto(rid1: ReplicaId, rid2: ReplicaId) -> (Storage, Storage) {\n\n let eng = Arc::new(MemEngine::new().unwrap());\n\n (\n\n Storage::new(rid1, eng.clone()),\n\n Storage::new(rid2, eng.clone()),\n\n )\n\n}\n\n\n", "file_path": "components/epaxos/src/testutil.rs", "rank": 69, "score": 76248.55517902528 }, { "content": "fn new_foo_inst(leader_id: i64) -> Instance {\n\n inst!(\n\n (leader_id, 1),\n\n (2, _),\n\n [(), (k2)],\n\n [(1, 10), (2, 20), (3, 30)],\n\n )\n\n}\n\n\n", "file_path": "components/epaxos/src/replica/test_replica.rs", "rank": 70, "score": 73235.9370328353 }, { "content": "fn _test_updated_inst(got: &Instance, cmds: Vec<Command>, committed: bool, executed: bool) {\n\n assert_eq!(cmds, got.cmds, \"cmds\");\n\n assert_eq!(committed, got.committed, \"committed\");\n\n // TODO: check executed status of instance by RepicaStatus::Exec\n\n}\n\n\n", "file_path": "components/epaxos/src/replica/test_replica.rs", "rank": 71, "score": 60546.767901969 }, { "content": "fn main() {\n\n // tonic_build::compile_protos(\"../proto/helloworld.proto\").unwrap();\n\n\n\n // On travis `rustup component add rustfmt` report an error that can not find rustfmt on a\n\n // nightly channel.\n\n // Thus we disable formatting generated code on travis.\n\n let fmt = option_env!(\"TRAVIS_RUST_VERSION\").is_none();\n\n tonic_build::configure()\n\n .format(fmt)\n\n .build_client(true)\n\n .build_server(true)\n\n .type_attribute(\"OpCode\", \"#[derive(enum_utils::FromStr)]\")\n\n //TODO command contains vec<u8> that can not be copied.\n\n // .type_attribute(\"Command\", \"#[derive(Copy)]\")\n\n .type_attribute(\n\n \"Value.value_enum\",\n\n \"#[derive(Eq, derive_more::From, derive_more::TryInto)]\",\n\n )\n\n .type_attribute(\"Value\", \"#[derive(Eq)]\")\n\n .type_attribute(\"InstanceId\", \"#[derive(Copy, Eq, Ord, PartialOrd, Hash)]\")\n", "file_path": "components/epaxos/build.rs", "rank": 72, "score": 59920.925376847415 }, { "content": "/// Conflict defines API to check if two vars conflicts with each other.\n\npub trait Conflict {\n\n fn conflict(&self, with: &Self) -> bool;\n\n}\n", "file_path": "components/epaxos/src/qpaxos/conflict.rs", "rank": 73, "score": 58476.20499021844 }, { "content": "pub trait Iter {\n\n fn get_instance_iter(&self, iid: InstanceId, include: bool, reverse: bool) -> InstanceIter;\n\n fn get_iter(&self, cur: Vec<u8>, include: bool, reverse: bool, cf: DBColumnFamily)\n\n -> RawKVIter;\n\n}\n\n\n\nimpl Iter for Storage {\n\n fn get_instance_iter(&self, iid: InstanceId, include: bool, reverse: bool) -> InstanceIter {\n\n InstanceIter {\n\n curr_inst_id: iid,\n\n include,\n\n storage: self.clone(),\n\n reverse,\n\n }\n\n }\n\n\n\n fn get_iter(\n\n &self,\n\n cur: Vec<u8>,\n\n include: bool,\n", "file_path": "components/epaxos/src/iters/iters.rs", "rank": 74, "score": 58476.20499021844 }, { "content": "/// WithNameSpace wraps a key with a prefix namespace.\n\n/// E.g.: key: \"abc\" -> key with namespace \"my_namespace/abc\";\n\n///\n\n/// It must guarantee that different namespace never generate identical output.\n\npub trait WithNameSpace {\n\n /// prepend_ns wraps a key with namespace string, e.g.:\n\n /// key: \"foo\" with ns:NameSpace = 5i64: \"5/foo\".\n\n fn prepend_ns<K: AsStorageKey + ?Sized>(&self, key: &K) -> Vec<u8>;\n\n\n\n /// strip_ns strip namespace prefix from key, If the key belongs to another namespace, it\n\n /// returns None.\n\n fn strip_ns<'a>(&self, key: &'a [u8]) -> Option<&'a [u8]>;\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct NameSpace {\n\n pub ns: Vec<u8>,\n\n}\n\n\n\nimpl<T: fmt::Display> From<T> for NameSpace {\n\n fn from(v: T) -> Self {\n\n NameSpace {\n\n ns: format!(\"{}/\", v).into(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "components/storage/src/traits.rs", "rank": 75, "score": 58476.20499021844 }, { "content": "/// AsStorageKey defines API to convert a struct into/from a storage key in byte stream.\n\npub trait AsStorageKey {\n\n /// into_key converts a struct into bytes.\n\n fn into_key(&self) -> Vec<u8>;\n\n\n\n /// key_len returns the length of the result key in bytes. This would be used to pre-alloc mem.\n\n /// An unoptimized default impl just builds the key and returns the length.\n\n fn key_len(&self) -> usize {\n\n // default impl\n\n self.into_key().len()\n\n }\n\n\n\n /// from_key converts back from bytes into a struct.\n\n fn from_key(_buf: &[u8]) -> Self\n\n where\n\n Self: std::marker::Sized,\n\n {\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "components/storage/src/traits.rs", "rank": 76, "score": 58476.20499021844 }, { "content": "pub trait ToStringExt {\n\n fn tostr_ext(&self) -> String;\n\n}\n\n\n\nimpl ToStringExt for String {\n\n fn tostr_ext(&self) -> String {\n\n format!(\"{}\", self)\n\n }\n\n}\n\n\n\nimpl<T: ToStringExt> ToStringExt for Option<T> {\n\n fn tostr_ext(&self) -> String {\n\n match self {\n\n Some(v) => v.tostr_ext(),\n\n None => \"None\".into(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: ToStringExt> ToStringExt for Vec<T> {\n", "file_path": "components/epaxos/src/qpaxos/display.rs", "rank": 77, "score": 57355.0098547075 }, { "content": "#[test]\n\nfn test_cele_format() {\n\n let decorator = slog_term::PlainSyncDecorator::new(TestWriter);\n\n let drain = CeleFormat::new(decorator).fuse();\n\n let logger = slog::Logger::root(drain, slog::o!());\n\n\n\n slog_info!(logger, \"logger ready\");\n\n\n\n slog_info!(logger, \"get request from {}\", \"test run\");\n\n slog_info!(logger, \"get request: \"; \"key\" => \"my_key\", \"req_id\" => \"my_req_id\");\n\n\n\n slog_warn!(logger, \"client timeout: \"; \"timeout_ms\" => 3000);\n\n\n\n slog_error!(logger, \"failed and got: \";\n\n \"is_true\" => true,\n\n \"is_none\" => None as Option<u8>,\n\n \"errors\" => ?[\"error1\", \"error2\"], // `?[xxx]` is translated to `format(\"{:?}\", [xxx])`\n\n );\n\n\n\n let expect = r#\"[2020/05/03 10:13:55.035 +08:00] [INFO] [src/setup/test_format.rs:32] logger ready\n\n[2020/05/03 10:13:55.038 +08:00] [INFO] [src/setup/test_format.rs:34] get request from test run\n", "file_path": "src/setup/test_format.rs", "rank": 78, "score": 56204.492269179405 }, { "content": "#[test]\n\nfn test_instanceid_derived() {\n\n let inst_id1 = InstanceId {\n\n replica_id: 1,\n\n idx: 10,\n\n };\n\n let inst_id2 = inst_id1;\n\n\n\n assert_eq!(inst_id1, inst_id2);\n\n assert_eq!(inst_id1, (1, 10).into());\n\n assert_eq!(inst_id1, InstanceId::from((1, 10)));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 79, "score": 56204.492269179405 }, { "content": "/// write log record fields\n\nfn write_log_fields(\n\n rd: &mut dyn RecordDecorator,\n\n record: &Record,\n\n values: &OwnedKVList,\n\n) -> io::Result<()> {\n\n let mut serializer = Serializer::new(rd, false, true); // no comma, print record kvs just as what write\n\n\n\n record.kv().serialize(record, &mut serializer)?;\n\n values.serialize(record, &mut serializer)?;\n\n\n\n serializer.finish()\n\n}\n", "file_path": "src/setup/log_format.rs", "rank": 80, "score": 56204.492269179405 }, { "content": "#[test]\n\nfn test_ballotnum_derived() {\n\n let b1 = BallotNum {\n\n num: 10,\n\n replica_id: 5,\n\n };\n\n let b2 = b1;\n\n\n\n assert_eq!(b1, b2);\n\n assert_eq!(b1, (10, 5).into());\n\n assert_eq!(b1, BallotNum::from((10, 5)));\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 81, "score": 56204.492269179405 }, { "content": "pub trait CeleThreadPool {\n\n fn new(size: usize) -> Self;\n\n\n\n fn dispatch<F: FnOnce() + Send + 'static>(&self, f: F) -> Result<(), CeleThreadsError>;\n\n\n\n fn destory(&mut self) -> Result<(), CeleThreadsError>;\n\n}\n\n\n\n/// # Examples\n\n/// ```\n\n/// extern crate cele_threads;\n\n/// use cele_threads::{CeleThreadPool, CeleThreads};\n\n///\n\n/// use std::thread::sleep;\n\n/// use std::time::Duration;\n\n///\n\n/// fn main() {\n\n/// let mut pool: CeleThreads = CeleThreadPool::new(10);\n\n///\n\n/// for num in 0 .. 100 {\n", "file_path": "components/cele_threads/src/cele_threads.rs", "rank": 82, "score": 55308.55000982415 }, { "content": "#[test]\n\nfn test_quorums() {\n\n let cases: Vec<(i32, i32, i32)> = vec![\n\n (0, 1, 1),\n\n (1, 1, 1),\n\n (2, 2, 2),\n\n (3, 2, 2),\n\n (4, 3, 3),\n\n (5, 3, 3),\n\n (6, 4, 4),\n\n (7, 4, 5),\n\n (8, 5, 5),\n\n (9, 5, 6),\n\n ];\n\n\n\n for (n_replicas, q, fastq) in cases {\n\n assert_eq!(q, quorum(n_replicas), \"quorum n={}\", n_replicas);\n\n assert_eq!(\n\n fastq,\n\n fast_quorum(n_replicas),\n\n \"fast-quorum n={}\",\n\n n_replicas\n\n );\n\n }\n\n}\n", "file_path": "components/epaxos/src/qpaxos/test_quorums.rs", "rank": 83, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_value_from() {\n\n let a = \"foo\";\n\n let r = Value::from(a);\n\n assert_eq!(\n\n Value {\n\n value_enum: Some(ValueEnum::Vbytes(\"foo\".as_bytes().into()))\n\n },\n\n r\n\n );\n\n\n\n let a: Vec<u8> = \"foo\".as_bytes().into();\n\n let r = Value::from(a);\n\n assert_eq!(\n\n Value {\n\n value_enum: Some(ValueEnum::Vbytes(\"foo\".as_bytes().into()))\n\n },\n\n r\n\n );\n\n\n\n let a: i64 = 3;\n\n let r = Value::from(a);\n\n assert_eq!(\n\n Value {\n\n value_enum: Some(ValueEnum::Vi64(3))\n\n },\n\n r\n\n );\n\n}\n", "file_path": "components/epaxos/src/qpaxos/test_value.rs", "rank": 84, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_serverdata() {\n\n let yaml = \"\n\nnodes:\n\n 127.0.0.1:4441:\n\n api_addr: 127.0.0.1:3331\n\n replication: 127.0.0.1:5551\n\n 192.168.0.1:4442:\n\n api_addr: 192.168.0.1:3332\n\n api_uaddr: /var/run/usocket2\n\n replication: 192.168.0.1:4442\n\ngroups:\n\n- range:\n\n - a\n\n - p\n\n replicas:\n\n 1: 192.168.0.1:4442\n\n\";\n\n let ci = ClusterInfo::from_str(yaml).unwrap();\n\n let sto = MemEngine::new().unwrap();\n\n let sto = Arc::new(sto);\n", "file_path": "components/epaxos/src/serverdata/test_serverdata.rs", "rank": 85, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_request_commit_pb() {\n\n let inst = new_foo_inst();\n\n\n\n let pp = MakeRequest::commit(100, &inst);\n\n test_enc_dec!(pp, ReplicateRequest);\n\n\n\n let req: CommitRequest = pp.phase.unwrap().try_into().unwrap();\n\n\n\n test_request_common!(pp, inst, 100);\n\n assert_eq!(inst.cmds, req.cmds);\n\n assert_eq!(inst.deps, req.deps);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 86, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_replica_status_to_key() {\n\n let k = ReplicaStatus::Exec.into_key();\n\n assert_eq!(\"/exec\", str::from_utf8(&k).unwrap());\n\n\n\n let k = ReplicaStatus::MaxInstance.into_key();\n\n assert_eq!(\"/max_inst\", str::from_utf8(&k).unwrap());\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 87, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_record_from() {\n\n let foo: Vec<u8> = \"foo\".as_bytes().into();\n\n let a = \"foo\";\n\n let r = Record::from(a);\n\n assert_eq!(\n\n Record {\n\n value: Some(Value {\n\n value_enum: Some(ValueEnum::Vbytes(foo.clone()))\n\n }),\n\n },\n\n r\n\n );\n\n\n\n let a: Vec<u8> = \"foo\".as_bytes().into();\n\n let r = Record::from(a);\n\n assert_eq!(\n\n Record {\n\n value: Some(Value {\n\n value_enum: Some(ValueEnum::Vbytes(foo.clone()))\n\n }),\n", "file_path": "components/epaxos/src/qpaxos/test_record.rs", "rank": 88, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_ns_storage() {\n\n let eng = MemEngine::new().unwrap();\n\n let eng = Arc::new(eng);\n\n let w = Storage::new(NameSpace { ns: \"5/\".into() }, eng);\n\n test_base_trait(&w);\n\n}\n", "file_path": "components/storage/src/test_storage.rs", "rank": 89, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_command_from() {\n\n let c = Command {\n\n op: OpCode::Set as i32,\n\n key: \"key\".as_bytes().to_vec(),\n\n value: \"value\".as_bytes().to_vec(),\n\n };\n\n\n\n assert_eq!(c, (OpCode::Set, \"key\", \"value\").into());\n\n assert_eq!(c, (\"Set\", \"key\", \"value\").into());\n\n let k = \"key\".as_bytes();\n\n let v = \"value\".as_bytes();\n\n assert_eq!(c, (OpCode::Set, k, v).into());\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_command.rs", "rank": 90, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_open() {\n\n use tempfile::Builder;\n\n\n\n let tmp_root = Builder::new().tempdir().unwrap();\n\n let db_path = format!(\"{}/test\", tmp_root.path().display());\n\n println!(\"db path:{}\", db_path);\n\n let db = open(&db_path).unwrap();\n\n\n\n assert_eq!(db.path(), db_path);\n\n\n\n let mut cfs = db.cf_names();\n\n let mut exp: Vec<&str> = vec![];\n\n for cf in DBColumnFamily::all() {\n\n exp.push(cf.into());\n\n let s: &str = cf.into();\n\n println!(\"{}\", s);\n\n }\n\n\n\n assert_eq!(cfs.sort(), exp.sort());\n\n}\n", "file_path": "components/storage/src/rocks_engine/rocks.rs", "rank": 91, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_storage_no_overriding() {\n\n let k = \"foo\".as_bytes().to_vec();\n\n let v1 = \"111\".as_bytes().to_vec();\n\n let v2 = \"222\".as_bytes().to_vec();\n\n\n\n {\n\n // rawkv api does not support name space.\n\n let (w1, w2) = two_storages();\n\n\n\n w1.set_raw(DBColumnFamily::Status, &k, &v1).unwrap();\n\n w2.set_raw(DBColumnFamily::Status, &k, &v2).unwrap();\n\n\n\n let r = w1.get_raw(DBColumnFamily::Status, &k).unwrap();\n\n assert_eq!(v2, r.unwrap());\n\n\n\n let r = w2.get_raw(DBColumnFamily::Status, &k).unwrap();\n\n assert_eq!(v2, r.unwrap());\n\n }\n\n\n\n {\n", "file_path": "components/storage/src/test_storage.rs", "rank": 92, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_request_accept_pb() {\n\n let inst = new_foo_inst();\n\n\n\n let pp = MakeRequest::accept(100, &inst);\n\n test_enc_dec!(pp, ReplicateRequest);\n\n\n\n let req: AcceptRequest = pp.phase.unwrap().try_into().unwrap();\n\n\n\n test_request_common!(pp, inst, 100);\n\n assert_eq!(inst.deps, req.deps);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/t.rs", "rank": 93, "score": 55118.93073531549 }, { "content": "#[test]\n\nfn test_deps_from() {\n\n let iid = Dep::from((1, 2));\n\n\n\n let sl: &[_] = &[iid];\n\n let ids: Deps = sl.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let ids: Deps = vec![iid].into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let sl: &[_] = &[(1, 2), (3, 4)];\n\n let ids: Deps = sl.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let sl: &[(i32, i64)] = &[(1, 2), (3, 4)];\n\n let ids: Deps = sl.into();\n\n assert_eq!(iid, ids[1]);\n\n\n\n let ids: Deps = vec![Dep::from((1, 2))].into();\n\n assert_eq!(iid, ids[1]);\n\n}\n\n\n", "file_path": "components/epaxos/src/qpaxos/test_deps.rs", "rank": 94, "score": 55118.93073531549 }, { "content": "/// Create a stupid replica with some instances stored.\n\nfn new_foo_replica(\n\n replica_id: i64,\n\n engine: Arc<dyn RawKV>,\n\n insts: &[((i64, i64), Instance)],\n\n) -> Replica {\n\n let r = testutil::new_replica(replica_id, vec![0, 1, 2], vec![], engine);\n\n\n\n for (iid, inst) in insts.iter() {\n\n let iid = InstanceId::from(iid);\n\n r.storage.set(DBColumnFamily::Instance, &iid, inst).unwrap();\n\n }\n\n\n\n r\n\n}\n\n\n", "file_path": "components/epaxos/src/replica/test_replica.rs", "rank": 95, "score": 54102.84648776734 }, { "content": "#[test]\n\nfn test_conf_from_str() {\n\n let cont = \"\n\nnodes:\n\n 127.0.0.1:4441:\n\n api_addr: 127.0.0.1:3331\n\n replication: 127.0.0.1:5551\n\n 192.168.0.1:4442:\n\n api_addr: 192.168.0.1:3332\n\n api_uaddr: /var/run/usocket2\n\n replication: 192.168.0.1:4442\n\ngroups:\n\n- range:\n\n - a\n\n - b\n\n replicas:\n\n 1: 192.168.0.1:4442\n\n 2: 192.168.0.1:4442\n\n\";\n\n\n\n let (_tmpf, ci) = load_conf(cont).unwrap();\n\n let ci2 = ClusterInfo::from_str(cont).unwrap();\n\n assert_eq!(ci, ci2);\n\n}\n\n\n", "file_path": "components/epaxos/src/conf/test_conf.rs", "rank": 96, "score": 54098.500637849254 }, { "content": "#[test]\n\nfn test_conf_groups() {\n\n let cont = \"\n\nnodes:\n\n 127.0.0.1:4441:\n\n api_addr: 127.0.0.1:3331\n\n replication: 127.0.0.1:5551\n\n 192.168.0.1:4442:\n\n api_addr: 192.168.0.1:3332\n\n replication: 192.168.0.1:4442\n\ngroups:\n\n- range:\n\n - a\n\n - b\n\n replicas:\n\n 1: 192.168.0.1:4442\n\n 2: 192.168.0.1:4442\n\n- range:\n\n - b\n\n - c\n\n replicas:\n", "file_path": "components/epaxos/src/conf/test_conf.rs", "rank": 97, "score": 54098.500637849254 }, { "content": "#[test]\n\nfn test_conf_replica() {\n\n let cont = \"\n\nnodes:\n\n 127.0.0.1:4441:\n\n api_addr: 127.0.0.1:3331\n\n replication: 127.0.0.1:5551\n\n 192.168.0.1:4442:\n\n api_addr: 192.168.0.1:3332\n\n replication: 192.168.0.1:4442\n\ngroups:\n\n- range:\n\n - a\n\n - b\n\n replicas:\n\n 1: 192.168.0.1:4442\n\n 2: 192.168.0.1:4442\n\n\";\n\n\n\n let (_f, ci) = load_conf(cont).unwrap();\n\n assert!(ci.get_replica_node(100).is_none());\n", "file_path": "components/epaxos/src/conf/test_conf.rs", "rank": 98, "score": 54098.500637849254 }, { "content": "#[test]\n\nfn test_base_iter() {\n\n let sto: Storage = Storage::new(\"foo\", Arc::new(MemEngine::new().unwrap()));\n\n\n\n let mut ks = vec![];\n\n let mut vs = vec![];\n\n for i in 0..100 {\n\n let k = format!(\"K{:>03}\", i).as_bytes().to_vec();\n\n let v = format!(\"V{:?}\", i).as_bytes().to_vec();\n\n sto.set_raw(DBColumnFamily::Record, &k, &v).unwrap();\n\n ks.push(k);\n\n vs.push(v);\n\n }\n\n\n\n let cur = \"K000\".as_bytes().to_vec();\n\n let it = sto.get_iter(cur, false, true, DBColumnFamily::Record);\n\n let r: Vec<_> = it.collect();\n\n assert_eq!(0, r.len());\n\n\n\n let cur = \"K099\".as_bytes().to_vec();\n\n let it = sto.get_iter(cur, false, false, DBColumnFamily::Record);\n", "file_path": "components/epaxos/src/iters/test_iters.rs", "rank": 99, "score": 54098.500637849254 } ]
Rust
src/gui/raw_control.rs
rodrigocfd/winsafe
3e9e60cb12902f51e953bcedbecd911262ed795a
use std::ptr::NonNull; use std::sync::Arc; use crate::aliases::ErrResult; use crate::co; use crate::enums::IdMenu; use crate::gui::base::Base; use crate::gui::events::{EventsView, WindowEventsAll}; use crate::gui::privs::{multiply_dpi, paint_control_borders}; use crate::gui::raw_base::RawBase; use crate::gui::resizer::{Horz, Vert}; use crate::gui::traits::{AsWindow, Child, ParentEvents, UiThread, Window}; use crate::handles::{HBRUSH, HCURSOR, HICON}; use crate::handles::HWND; use crate::structs::{POINT, SIZE, WNDCLASSEX}; use crate::various::WString; struct Obj { base: RawBase, opts: WindowControlOpts, } impl Window for Obj { fn hwnd(&self) -> HWND { self.base.hwnd() } } #[derive(Clone)] pub(in crate::gui) struct RawControl(Arc<Obj>); impl Window for RawControl { fn hwnd(&self) -> HWND { self.0.base.hwnd() } } impl AsWindow for RawControl { fn as_window(&self) -> Arc<dyn Window> { self.0.clone() } } impl UiThread for RawControl { fn run_ui_thread<F>(&self, func: F) where F: FnOnce() -> ErrResult<()>, { self.0.base.run_ui_thread(func); } } impl ParentEvents for RawControl { fn on(&self) -> &WindowEventsAll { self.0.base.on() } } impl Child for RawControl { fn ctrl_id(&self) -> u16 { self.0.opts.ctrl_id } } impl RawControl { pub(in crate::gui) fn new( parent_base_ref: &Base, opts: WindowControlOpts) -> RawControl { let (horz, vert) = (opts.horz_resize, opts.vert_resize); let wnd = Self( Arc::new( Obj { base: RawBase::new(Some(parent_base_ref)), opts, }, ), ); wnd.default_message_handlers(parent_base_ref, horz, vert); wnd } pub(in crate::gui) fn base_ref(&self) -> &Base { self.0.base.base_ref() } fn default_message_handlers(&self, parent_base_ref: &Base, horz: Horz, vert: Vert) { self.base_ref().default_message_handlers(); parent_base_ref.privileged_events_ref().wm(parent_base_ref.create_or_initdlg(), { let self2 = self.clone(); let parent_base_ptr = NonNull::from(parent_base_ref); move |_| { let opts = &self2.0.opts; let mut wcx = WNDCLASSEX::default(); let mut class_name_buf = WString::default(); RawBase::fill_wndclassex(self2.base_ref().parent_hinstance()?, opts.class_style, opts.class_icon, opts.class_icon, opts.class_bg_brush, opts.class_cursor, &mut wcx, &mut class_name_buf)?; let atom = self2.0.base.register_class(&mut wcx)?; let mut wnd_pos = opts.position; let mut wnd_sz = opts.size; multiply_dpi(Some(&mut wnd_pos), Some(&mut wnd_sz))?; self2.0.base.create_window( atom, None, IdMenu::Id(opts.ctrl_id), wnd_pos, wnd_sz, opts.ex_style, opts.style, )?; unsafe { parent_base_ptr.as_ref().resizer_add( parent_base_ptr.as_ref(), self2.base_ref().hwnd_ref(), horz, vert)?; } Ok(0) } }); self.on().wm_nc_paint({ let self2 = self.clone(); move |p| { paint_control_borders(*self2.base_ref().hwnd_ref(), p)?; Ok(()) } }); } } pub struct WindowControlOpts { pub class_name: String, pub class_style: co::CS, pub class_icon: HICON, pub class_cursor: HCURSOR, pub class_bg_brush: HBRUSH, pub position: POINT, pub size: SIZE, pub style: co::WS, pub ex_style: co::WS_EX, pub ctrl_id: u16, pub horz_resize: Horz, pub vert_resize: Vert, } impl Default for WindowControlOpts { fn default() -> Self { Self { class_name: "".to_owned(), class_style: co::CS::DBLCLKS, class_icon: HICON::NULL, class_cursor: HCURSOR::NULL, class_bg_brush: HBRUSH::from_sys_color(co::COLOR::WINDOW), position: POINT { x: 0, y: 0 }, size: SIZE { cx: 0, cy: 0 }, style: co::WS::CHILD | co::WS::TABSTOP | co::WS::GROUP | co::WS::VISIBLE | co::WS::CLIPCHILDREN | co::WS::CLIPSIBLINGS, ex_style: co::WS_EX::LEFT, ctrl_id: 0, horz_resize: Horz::None, vert_resize: Vert::None, } } }
use std::ptr::NonNull; use std::sync::Arc; use crate::aliases::ErrResult; use crate::co; use crate::enums::IdMenu; use crate::gui::base::Base; use crate::gui::events::{EventsView, WindowEventsAll}; use crate::gui::privs::{multiply_dpi, paint_control_borders}; use crate::gui::raw_base::RawBase; use crate::gui::resizer::{Horz, Vert}; use crate::gui::traits::{AsWindow, Child, ParentEvents, UiThread, Window}; use crate::handles::{HBRUSH, HCURSOR, HICON}; use crate::handles::HWND; use crate::structs::{POINT, SIZE, WNDCLASSEX}; use crate::various::WString; struct Obj { base: RawBase, opts: WindowControlOpts, } impl Window for Obj { fn hwnd(&self) -> HWND { self.base.hwnd() } } #[derive(Clone)] pub(in crate::gui) struct RawControl(Arc<Obj>); impl Window for RawControl { fn hwnd(&self) -> HWND { self.0.base.hwnd() } } impl AsWindow for RawControl { fn as_window(&self) -> Arc<dyn Window> { self.0.clone() } } impl UiThread for RawControl { fn run_ui_thread<F>(&self, func: F) where F: FnOnce() -> ErrResult<()>, { self.0.base.run_ui_thread(func); } } impl ParentEvents for RawControl { fn on(&self) -> &WindowEventsAll { self.0.base.on() } } impl Child for RawControl { fn ctrl_id(&self) -> u16 { self.0.opts.ctrl_id } } impl RawControl { pub(in crate::gui) fn new( parent_base_ref: &Base, opts: WindowControlOpts) -> RawControl { let (horz, vert) = (opts.horz_resize, opts.vert_resize); let wnd = Self( Arc::new( Obj { base: RawBase::new(Some(parent_base_ref)), opts, }, ), ); wnd.default_message_handlers(parent_base_ref, horz, vert); wnd } pub(in crate::gui) fn base_ref(&self) -> &Base { self.0.base.base_ref() } fn default_message_handlers(&self, parent_base_ref: &Base, horz: Horz, vert: Vert) { self.base_ref().default_message_handlers(); parent_base_ref.privileged_events_ref().wm(parent_base_ref.create_or_initdlg(), { let self2 = self.clone(); let parent_base_ptr = NonNull::from(parent_base_ref); move |_| { let opts = &self2.0.opts; let mut wcx = WNDCLASSEX::default(); let mut class_name_buf = WString::default(); RawBase::fill_wndclassex(self2.base_ref().parent_hinstance()?, opts.class_style, opts.class_icon, opts.class_icon, opts.class_bg_brush, opts.class_cursor, &mut wcx, &mut class_name_buf)?; let atom = self2.0.base.register_class(&mut wcx)?; let mut wnd_pos = opts.position; let mut wnd_sz = opts.size; multiply_dpi(Some(&mut wnd_pos), Some(&mut wnd_sz))?; self2.0.base.create_window( atom, None, IdMenu::Id(opts.ctrl_id), wnd_pos, wnd_sz, opts.ex_style, opts.style, )?; unsafe { parent_base_ptr.as_ref().resizer_add( parent_base_ptr.as_ref(), self2.base_ref().hwnd_ref(), horz, vert)?; } Ok(0) } }); self.on().wm_nc_paint({ let self2 = self.clone(); move |p| { paint_control_borders(*self2.base_ref().hwnd_ref(), p)?; Ok(()) } }); } } pub struct WindowControlOpts { pub class_name: String, pub class_style: co::CS, pub class_icon: HICON, pub class_cursor: HCURSOR, pub class_bg_brush: HBRUSH, pub position: POINT, pub size: SIZE, pub style: co::WS, pub ex_style: co::WS_EX, pub ctrl_id: u16, pub horz_resize: Horz, pub vert_resize: Vert, } impl Default for WindowControlOpts { fn default() -> Self { Sel
}
f { class_name: "".to_owned(), class_style: co::CS::DBLCLKS, class_icon: HICON::NULL, class_cursor: HCURSOR::NULL, class_bg_brush: HBRUSH::from_sys_color(co::COLOR::WINDOW), position: POINT { x: 0, y: 0 }, size: SIZE { cx: 0, cy: 0 }, style: co::WS::CHILD | co::WS::TABSTOP | co::WS::GROUP | co::WS::VISIBLE | co::WS::CLIPCHILDREN | co::WS::CLIPSIBLINGS, ex_style: co::WS_EX::LEFT, ctrl_id: 0, horz_resize: Horz::None, vert_resize: Vert::None, } }
function_block-function_prefixed
[ { "content": "/// [`RegisterClassEx`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-registerclassexw)\n\n/// function.\n\npub fn RegisterClassEx(wcx: &WNDCLASSEX) -> WinResult<ATOM> {\n\n\tmatch unsafe { user32::RegisterClassExW(wcx as *const _ as _) } {\n\n\t\t0 => Err(GetLastError()),\n\n\t\tatom => Ok(ATOM(atom)),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 0, "score": 331877.86579519516 }, { "content": "/// [`EnumWindows`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-enumwindows)\n\n/// function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::{EnumWindows, HWND};\n\n///\n\n/// EnumWindows(|hwnd: HWND| -> bool {\n\n/// println!(\"HWND: {}\", hwnd);\n\n/// true\n\n/// })?;\n\n/// ```\n\npub fn EnumWindows<F>(func: F) -> WinResult<()>\n\n\twhere F: Fn(HWND) -> bool,\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tuser32::EnumWindows(\n\n\t\t\t\tenum_windows_proc::<F> as _,\n\n\t\t\t\t&func as *const _ as _,\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\nextern \"system\" fn enum_windows_proc<F>(hwnd: HWND, lparam: isize) -> BOOL\n\n\twhere F: Fn(HWND) -> bool,\n\n{\n\n\tlet func = unsafe { &*(lparam as *const F) };\n\n\tfunc(hwnd) as _\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 1, "score": 312939.96734034864 }, { "content": "/// [`GetMessagePos`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getmessagepos)\n\n/// function.\n\npub fn GetMessagePos() -> POINT {\n\n\tlet xy = unsafe { user32::GetMessagePos() };\n\n\tPOINT::new(LOWORD(xy) as _, HIWORD(xy) as _)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 2, "score": 246667.09501007455 }, { "content": "/// [`GetCommandLine`](https://docs.microsoft.com/en-us/windows/win32/api/processenv/nf-processenv-getcommandlinew)\n\n/// function.\n\n///\n\n/// For an example, see [`CommandLineToArgv`](crate::CommandLineToArgv).\n\npub fn GetCommandLine() -> String {\n\n\tWString::from_wchars_nullt(unsafe { kernel32::GetCommandLineW() }).to_string()\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 3, "score": 246301.42282397638 }, { "content": "/// [`GetEnvironmentStrings`](https://docs.microsoft.com/en-us/windows/win32/api/processenv/nf-processenv-getenvironmentstringsw)\n\n/// function.\n\n///\n\n/// Returns the parsed strings, and automatically frees the retrieved\n\n/// environment block with\n\n/// [`FreeEnvironmentStrings`](https://docs.microsoft.com/en-us/windows/win32/api/processenv/nf-processenv-freeenvironmentstringsw).\n\n///\n\n/// # Examples\n\n///\n\n/// Retrieving and printing the key/value pairs of all environment strings:\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::GetEnvironmentStrings;\n\n///\n\n/// let env_vars = GetEnvironmentStrings()?;\n\n/// for (k, v) in env_vars.iter() {\n\n/// println!(\"{} = {}\", k, v);\n\n/// }\n\n/// ```\n\npub fn GetEnvironmentStrings() -> WinResult<HashMap<String, String>> {\n\n\tunsafe { kernel32::GetEnvironmentStringsW().as_mut() }\n\n\t\t.map(|ptr| {\n\n\t\t\tlet vec_env_strs = parse_multi_z_str(ptr as *mut _ as _);\n\n\t\t\tunsafe { kernel32::FreeEnvironmentStringsW(ptr); }\n\n\n\n\t\t\tlet mut map = HashMap::with_capacity(vec_env_strs.len());\n\n\t\t\tfor env_str in vec_env_strs {\n\n\t\t\t\tlet pair: Vec<&str> = env_str.split(\"=\").collect();\n\n\t\t\t\tmap.insert(pair[0].to_owned(), pair[1].to_owned());\n\n\t\t\t}\n\n\t\t\tmap\n\n\t\t})\n\n\t\t.ok_or_else(|| GetLastError())\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 4, "score": 236484.77552822768 }, { "content": "/// [`GetMenuCheckMarkDimensions`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getmenucheckmarkdimensions)\n\n/// function.\n\npub fn GetMenuCheckMarkDimensions() -> SIZE {\n\n\tlet dims = unsafe { user32::GetMenuCheckMarkDimensions() };\n\n\tSIZE::new(LOWORD(dims as _) as _, HIWORD(dims as _) as _)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 5, "score": 236243.2852427564 }, { "content": "/// [`GetCursorPos`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getcursorpos)\n\n/// function.\n\npub fn GetCursorPos() -> WinResult<POINT> {\n\n\tlet mut pt = POINT::default();\n\n\tbool_to_winresult(\n\n\t\tunsafe { user32::GetCursorPos(&mut pt as *mut _ as _) },\n\n\t).map(|_| pt)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 6, "score": 230186.39392187982 }, { "content": "/// [`GetCurrentDirectory`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-getcurrentdirectory)\n\n/// function.\n\npub fn GetCurrentDirectory() -> WinResult<String> {\n\n\tlet mut buf = WString::new_alloc_buffer(MAX_PATH + 1);\n\n\tmatch unsafe {\n\n\t\tkernel32::GetCurrentDirectoryW(buf.buffer_size() as _, buf.as_mut_ptr())\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n\n\t\t_ => Ok(buf.to_string()),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 7, "score": 229848.00622787236 }, { "content": "/// [`GetSystemDirectory`](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemdirectoryw)\n\n/// function.\n\npub fn GetSystemDirectory() -> WinResult<String> {\n\n\tlet mut buf = WString::new_alloc_buffer(MAX_PATH + 1);\n\n\tmatch unsafe {\n\n\t\tkernel32::GetSystemDirectoryW(buf.as_mut_ptr(), buf.buffer_size() as _)\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n\n\t\t_ => Ok(buf.to_string()),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 8, "score": 229848.00622787236 }, { "content": "/// [`GetUserName`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-getusernamew)\n\n/// function.\n\npub fn GetUserName() -> WinResult<String> {\n\n\tlet mut buf = WString::new_alloc_buffer(UNLEN + 1);\n\n\tlet mut sz = buf.buffer_size() as u32;\n\n\n\n\tmatch unsafe { advapi32::GetUserNameW(buf.as_mut_ptr(), &mut sz) } {\n\n\t\t0 => Err(GetLastError()),\n\n\t\t_ => Ok(buf.to_string()),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 9, "score": 229848.00622787236 }, { "content": "/// [`GetComputerName`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-getcomputernamew)\n\n/// function.\n\npub fn GetComputerName() -> WinResult<String> {\n\n\tlet mut buf = WString::new_alloc_buffer(MAX_COMPUTERNAME_LENGTH + 1);\n\n\tlet mut sz = buf.buffer_size() as u32;\n\n\n\n\tbool_to_winresult(\n\n\t\tunsafe { kernel32::GetComputerNameW(buf.as_mut_ptr(), &mut sz) },\n\n\t).map(|_| buf.to_string())\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 10, "score": 229848.00622787236 }, { "content": "/// [`GetTempPath`](https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-gettemppathw)\n\n/// function.\n\npub fn GetTempPath() -> WinResult<String> {\n\n\tlet mut buf = WString::new_alloc_buffer(MAX_PATH + 1);\n\n\tmatch unsafe {\n\n\t\tkernel32::GetTempPathW(buf.buffer_size() as _, buf.as_mut_ptr()) }\n\n\t{\n\n\t\t0 => Err(GetLastError()),\n\n\t\t_ => Ok(buf.to_string()),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 11, "score": 229848.00622787236 }, { "content": "/// [`GetLogicalDriveStrings`](https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-getlogicaldrivestringsw)\n\n/// function.\n\npub fn GetLogicalDriveStrings() -> WinResult<Vec<String>> {\n\n\tmatch unsafe {\n\n\t\tkernel32::GetLogicalDriveStringsW(0, std::ptr::null_mut())\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n\n\t\tlen => {\n\n\t\t\tlet mut buf = WString::new_alloc_buffer(len as usize + 1);\n\n\n\n\t\t\tmatch unsafe {\n\n\t\t\t\tkernel32::GetLogicalDriveStringsW(len, buf.as_mut_ptr())\n\n\t\t\t} {\n\n\t\t\t\t0 => Err(GetLastError()),\n\n\t\t\t\t_ => Ok(parse_multi_z_str(unsafe { buf.as_ptr() })),\n\n\t\t\t}\n\n\t\t},\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 12, "score": 228694.74071660452 }, { "content": "/// [`IsWindowsVersionOrGreater`](https://docs.microsoft.com/en-us/windows/win32/api/versionhelpers/nf-versionhelpers-iswindowsversionorgreater)\n\n/// function.\n\npub fn IsWindowsVersionOrGreater(\n\n\tmajor_version: u16, minor_version: u16,\n\n\tservice_pack_major: u16) -> WinResult<bool>\n\n{\n\n\tlet mut osvi = OSVERSIONINFOEX::default();\n\n\tlet cond_mask = VerSetConditionMask(\n\n\t\tVerSetConditionMask(\n\n\t\t\tVerSetConditionMask(0, co::VER_MASK::MAJORVERSION, co::VER_COND::GREATER_EQUAL),\n\n\t\t\tco::VER_MASK::MINORVERSION, co::VER_COND::GREATER_EQUAL,\n\n\t\t),\n\n\t\tco::VER_MASK::SERVICEPACKMAJOR, co::VER_COND::GREATER_EQUAL\n\n\t);\n\n\n\n\tosvi.dwMajorVersion = major_version as _;\n\n\tosvi.dwMinorVersion = minor_version as _;\n\n\tosvi.wServicePackMajor = service_pack_major;\n\n\n\n\tVerifyVersionInfo(\n\n\t\t&mut osvi,\n\n\t\tco::VER_MASK::MAJORVERSION | co::VER_MASK::MINORVERSION | co::VER_MASK::SERVICEPACKMAJOR,\n\n\t\tcond_mask,\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 13, "score": 228371.98844695717 }, { "content": "/// [`GetStartupInfo`](https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-getstartupinfow)\n\n/// function.\n\npub fn GetStartupInfo(si: &mut STARTUPINFO) {\n\n\tunsafe { kernel32::GetStartupInfoW(si as *mut _ as _) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 14, "score": 224829.20156926225 }, { "content": "/// [`GetSystemTime`](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemtime)\n\n/// function.\n\npub fn GetSystemTime(st: &mut SYSTEMTIME) {\n\n\tunsafe { kernel32::GetSystemTime(st as *mut _ as _) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 15, "score": 224829.20156926225 }, { "content": "/// [`ExpandEnvironmentStrings`](https://docs.microsoft.com/en-us/windows/win32/api/processenv/nf-processenv-expandenvironmentstringsw)\n\n/// function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::ExpandEnvironmentStrings;\n\n///\n\n/// let expanded = ExpandEnvironmentStrings(\n\n/// \"Os %OS%, home %HOMEPATH% and temp %TEMP%\",\n\n/// )?;\n\n///\n\n/// println!(\"{}\", expanded);\n\n/// ```\n\npub fn ExpandEnvironmentStrings(src: &str) -> WinResult<String> {\n\n\tlet wsrc = WString::from_str(src);\n\n\tlet len = unsafe {\n\n\t\tkernel32::ExpandEnvironmentStringsW(\n\n\t\t\twsrc.as_ptr(),\n\n\t\t\tstd::ptr::null_mut(),\n\n\t\t\t0,\n\n\t\t)\n\n\t};\n\n\n\n\tlet mut buf = WString::new_alloc_buffer(len as _);\n\n\tmatch unsafe {\n\n\t\tkernel32::ExpandEnvironmentStringsW(\n\n\t\t\twsrc.as_ptr(),\n\n\t\t\tbuf.as_mut_ptr(),\n\n\t\t\tlen,\n\n\t\t)\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n\n\t\t_ => Ok(buf.to_string()),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 16, "score": 224246.90596507993 }, { "content": "/// [`AdjustWindowRectEx`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-adjustwindowrectex)\n\n/// function.\n\npub fn AdjustWindowRectEx(\n\n\trc: &mut RECT, style: co::WS,\n\n\thas_menu: bool, ex_style: co::WS_EX) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tuser32::AdjustWindowRectEx(\n\n\t\t\t\trc as *mut _ as _,\n\n\t\t\t\tstyle.0,\n\n\t\t\t\thas_menu as _,\n\n\t\t\t\tex_style.0,\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 17, "score": 222617.0535452592 }, { "content": "/// [`GetSystemInfo`](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsysteminfo)\n\n/// function.\n\npub fn GetSystemInfo(si: &mut SYSTEM_INFO) {\n\n\tunsafe { kernel32::GetSystemInfo(si as *mut _ as _) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 18, "score": 220077.276684949 }, { "content": "/// [`OutputDebugString`](https://docs.microsoft.com/en-us/windows/win32/api/debugapi/nf-debugapi-outputdebugstringw)\n\n/// function.\n\npub fn OutputDebugString(output_string: &str) {\n\n\tunsafe {\n\n\t\tkernel32::OutputDebugStringW(WString::from_str(output_string).as_ptr())\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 19, "score": 216783.23375137226 }, { "content": "/// [`GetNativeSystemInfo`](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getnativesysteminfo)\n\n/// function.\n\npub fn GetNativeSystemInfo(si: &mut SYSTEM_INFO) {\n\n\tunsafe { kernel32::GetNativeSystemInfo(si as *mut _ as _) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 20, "score": 215602.20573772467 }, { "content": "/// [`GetSystemTimeAsFileTime`](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemtimeasfiletime)\n\n/// function.\n\npub fn GetSystemTimeAsFileTime(ft: &mut FILETIME) {\n\n\tunsafe { kernel32::GetSystemTimeAsFileTime(ft as *mut _ as _) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 21, "score": 215602.17026479624 }, { "content": "/// [`MoveFile`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-movefilew)\n\n/// function.\n\npub fn MoveFile(existing_file: &str, new_file: &str) -> WinResult<()> {\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tkernel32::MoveFileW(\n\n\t\t\t\tWString::from_str(existing_file).as_ptr(),\n\n\t\t\t\tWString::from_str(new_file).as_ptr(),\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 22, "score": 213272.70964588108 }, { "content": "/// [`GetDialogBaseUnits`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getdialogbaseunits)\n\n/// function.\n\npub fn GetDialogBaseUnits() -> i32 {\n\n\tunsafe { user32::GetDialogBaseUnits() }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 23, "score": 212688.6183648404 }, { "content": "/// Any child window.\n\npub trait Child: Window {\n\n\t/// Returns the control ID.\n\n\tfn ctrl_id(&self) -> u16;\n\n}\n\n\n", "file_path": "src/gui/traits.rs", "rank": 24, "score": 211984.58199018482 }, { "content": "/// [`GetSystemTimePreciseAsFileTime`](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemtimepreciseasfiletime)\n\n/// function.\n\npub fn GetSystemTimePreciseAsFileTime(ft: &mut FILETIME) {\n\n\tunsafe { kernel32::GetSystemTimePreciseAsFileTime(ft as *mut _ as _) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 25, "score": 211380.47435330861 }, { "content": "/// [`TrackMouseEvent`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-trackmouseevent)\n\n/// function.\n\npub fn TrackMouseEvent(tme: &mut TRACKMOUSEEVENT) -> WinResult<()> {\n\n\tbool_to_winresult(\n\n\t\tunsafe { user32::TrackMouseEvent(tme as *mut _ as _) },\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 26, "score": 211140.47157658852 }, { "content": "/// [`IsWindowsServer`](https://docs.microsoft.com/en-us/windows/win32/api/versionhelpers/nf-versionhelpers-iswindowsserver)\n\n/// function.\n\npub fn IsWindowsServer() -> WinResult<bool> {\n\n\tlet mut osvi = OSVERSIONINFOEX::default();\n\n\tosvi.wProductType = co::VER_NT::WORKSTATION;\n\n\tlet cond_mask = VerSetConditionMask(\n\n\t\t0, co::VER_MASK::PRODUCT_TYPE, co::VER_COND::EQUAL);\n\n\tVerifyVersionInfo(&mut osvi, co::VER_MASK::PRODUCT_TYPE, cond_mask)\n\n\t\t.map(|b| !b) // not workstation\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 27, "score": 208878.71235123219 }, { "content": "/// [`SHFileOperation`](https://docs.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-shfileoperationw)\n\n/// function.\n\npub fn SHFileOperation(file_op: &mut SHFILEOPSTRUCT) -> WinResult<()> {\n\n\tmatch unsafe {\n\n\t\tshell32::SHFileOperationW(file_op as *mut _ as _)\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n\n\t\t_ => Ok(()),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 28, "score": 206918.81059495432 }, { "content": "/// [`CoTaskMemFree`](https://docs.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-cotaskmemfree)\n\n/// function.\n\npub fn CoTaskMemFree<T>(pv: *mut T) {\n\n\tunsafe { ole32::CoTaskMemFree(pv as _) }\n\n}\n\n\n", "file_path": "src/com/funcs.rs", "rank": 29, "score": 206918.77456618636 }, { "content": "/// [`GlobalMemoryStatusEx`](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-globalmemorystatusex)\n\n/// function.\n\npub fn GlobalMemoryStatusEx(msx: &mut MEMORYSTATUSEX) -> WinResult<()> {\n\n\tbool_to_winresult(\n\n\t\tunsafe { kernel32::GlobalMemoryStatusEx(msx as *mut _ as _) },\n\n\t)\n\n}\n\n\n\n/// [`HIBYTE`](https://docs.microsoft.com/en-us/previous-versions/windows/desktop/legacy/ms632656(v=vs.85))\n\n/// function. Originally a macro.\n\npub const fn HIBYTE(v: u16) -> u8 {\n\n\t(v >> 8 & 0xff) as _\n\n}\n\n\n\n/// Returns the high-order `u32` of an `u64`.\n\npub const fn HIDWORD(v: u64) -> u32 {\n\n\t(v >> 32 & 0xffff_ffff) as _\n\n}\n\n\n\n/// [`HIWORD`](https://docs.microsoft.com/en-us/previous-versions/windows/desktop/legacy/ms632657(v=vs.85))\n\n/// function. Originally a macro.\n\npub const fn HIWORD(v: u32) -> u16 {\n\n\t(v >> 16 & 0xffff) as _\n\n}\n\n\n\n/// [`HRESULT_FROM_WIN32`](https://docs.microsoft.com/en-us/windows/win32/api/winerror/nf-winerror-hresult_from_win32)\n\n/// function. Originally a macro.\n\npub const fn HRESULT_FROM_WIN32(hr: HRESULT) -> co::ERROR {\n\n\tco::ERROR((hr as u32) & 0xffff)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 30, "score": 206918.77456618636 }, { "content": "/// [`IsWindowsVistaOrGreater`](https://docs.microsoft.com/en-us/windows/win32/api/versionhelpers/nf-versionhelpers-iswindowsvistaorgreater)\n\n/// function.\n\npub fn IsWindowsVistaOrGreater() -> WinResult<bool> {\n\n\tIsWindowsVersionOrGreater(\n\n\t\tHIBYTE(co::WIN32::WINNT_VISTA.0) as _,\n\n\t\tLOBYTE(co::WIN32::WINNT_VISTA.0) as _,\n\n\t\t0,\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 31, "score": 203824.57208175652 }, { "content": "/// Replaces the extension by the given one.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::path;\n\n///\n\n/// let p = path::replace_extension(\n\n/// \"C:\\\\Temp\\\\something.txt\", \".sh\"); // C:\\Temp\\something.sh\n\n/// ```\n\npub fn replace_extension(full_path: &str, new_extension: &str) -> String {\n\n\tif let Some(last) = full_path.chars().last() {\n\n\t\tif last == '\\\\' { // full_path is a directory, do nothing\n\n\t\t\treturn rtrim_backslash(full_path).to_owned();\n\n\t\t}\n\n\t}\n\n\n\n\tlet new_has_dot = new_extension.chars().next() == Some('.');\n\n\tfull_path.rfind('.')\n\n\t\t.map_or_else(\n\n\t\t\t|| format!(\"{}{}{}\", // file name without extension, just append it\n\n\t\t\t\tfull_path,\n\n\t\t\t\tif new_has_dot { \"\" } else { \".\" },\n\n\t\t\t\tnew_extension,\n\n\t\t\t),\n\n\t\t\t|idx| format!(\"{}{}{}\",\n\n\t\t\t\t&full_path[0..idx],\n\n\t\t\t\tif new_has_dot { \"\" } else { \".\" },\n\n\t\t\t\tnew_extension,\n\n\t\t\t),\n\n\t\t)\n\n}\n\n\n", "file_path": "src/various/path.rs", "rank": 32, "score": 199133.00932340318 }, { "content": "/// Keeps the file name and replaces the path by the given one.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::path;\n\n///\n\n/// let p = path::replace_path( // C:\\another\\foo.txt\n\n/// \"C:\\\\Temp\\\\foo.txt\",\n\n/// \"C:\\\\another\",\n\n/// );\n\n/// ```\n\npub fn replace_path(full_path: &str, new_path: &str) -> String {\n\n\tlet file_name = get_file_name(full_path);\n\n\tformat!(\"{}{}{}\",\n\n\t\trtrim_backslash(new_path),\n\n\t\tif file_name.is_some() { \"\\\\\" } else { \"\" },\n\n\t\tfile_name.unwrap_or(\"\"))\n\n}\n\n\n", "file_path": "src/various/path.rs", "rank": 33, "score": 199132.72320472402 }, { "content": "/// [`SetClipboardData`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-setclipboarddata)\n\n/// function.\n\npub fn SetClipboardData(format: co::CF, hmem: *mut u8) -> WinResult<*mut u8> {\n\n\tunsafe { user32::SetClipboardData(format.0, hmem as _).as_mut() }\n\n\t\t.map(|hmem| hmem as *mut _ as _)\n\n\t\t.ok_or_else(|| GetLastError())\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 34, "score": 199055.99709650062 }, { "content": "/// [`CommandLineToArgv`](https://docs.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-commandlinetoargvw)\n\n/// function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::{CommandLineToArgv, GetCommandLine};\n\n///\n\n/// let args = CommandLineToArgv(&GetCommandLine())?;\n\n/// for arg in args.iter() {\n\n/// println!(\"{}\", arg);\n\n/// }\n\n/// ```\n\npub fn CommandLineToArgv(cmd_line: &str) -> WinResult<Vec<String>> {\n\n\tlet mut num_args = i32::default();\n\n\tlet lp_arr = unsafe {\n\n\t\tshell32::CommandLineToArgvW(\n\n\t\t\tWString::from_str(cmd_line).as_ptr(),\n\n\t\t\t&mut num_args,\n\n\t\t)\n\n\t};\n\n\tif lp_arr.is_null() {\n\n\t\treturn Err(GetLastError());\n\n\t}\n\n\n\n\tlet mut strs = Vec::with_capacity(num_args as _);\n\n\tfor lp in unsafe { std::slice::from_raw_parts(lp_arr, num_args as _) }.iter() {\n\n\t\tstrs.push(WString::from_wchars_nullt(*lp).to_string());\n\n\t}\n\n\n\n\t(HLOCAL { ptr: lp_arr as _ })\n\n\t\t.LocalFree()\n\n\t\t.map(|_| strs)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 35, "score": 198922.9438730072 }, { "content": "/// [`ChooseColor`](https://docs.microsoft.com/en-us/previous-versions/windows/desktop/legacy/ms646912(v=vs.85))\n\n/// function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::{co, ChooseColor, CHOOSECOLOR};\n\n///\n\n/// let parent_hwnd: HWND; // initialized somewhere\n\n///\n\n/// let mut cc = CHOOSECOLOR::default();\n\n/// let mut custom_colors = [COLORREF::new(255, 255, 255); 16];\n\n///\n\n/// cc.hwndOwner = parent_hwnd;\n\n/// cc.Flags = co::CC::ANYCOLOR | co::CC::FULLOPEN | co::CC::RGBINIT;\n\n/// cc.rgbResult = COLORREF::new(255, 0, 0); // color initially chosen\n\n/// cc.set_lpCustColors(&mut custom_colors);\n\n///\n\n/// if ChooseColor(&mut cc)? {\n\n/// println!(\"The color: {} {} {}\",\n\n/// cc.rgbResult.GetRValue(),\n\n/// cc.rgbResult.GetGValue(),\n\n/// cc.rgbResult.GetBValue(),\n\n/// );\n\n/// }\n\n/// ```\n\npub fn ChooseColor(cc: &mut CHOOSECOLOR) -> Result<bool, co::CDERR> {\n\n\tmatch unsafe { comdlg32::ChooseColorW(cc as *mut _ as _) } {\n\n\t\t0 => match CommDlgExtendedError() {\n\n\t\t\tco::CDERR::NoValue => Ok(false),\n\n\t\t\terr => Err(err),\n\n\t\t},\n\n\t\t_ => Ok(true),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 36, "score": 196550.27748769132 }, { "content": "/// Replaces the file name by the given one.\n\npub fn replace_file_name(full_path: &str, new_file: &str) -> String {\n\n\tget_path(full_path)\n\n\t\t.map_or_else(\n\n\t\t\t|| new_file.to_owned(),\n\n\t\t\t|path| format!(\"{}\\\\{}\", path, new_file),\n\n\t\t)\n\n}\n\n\n", "file_path": "src/various/path.rs", "rank": 37, "score": 195343.45422311255 }, { "content": "#[cfg(not(debug_assertions))]\n\npub fn exe_path() -> WinResult<String> {\n\n\tOk(\n\n\t\tget_path(&HINSTANCE::NULL.GetModuleFileName()?)\n\n\t\t\t.unwrap().to_owned(),\n\n\t)\n\n}\n\n\n", "file_path": "src/various/path.rs", "rank": 38, "score": 190290.26563960762 }, { "content": "/// [`ReplaceFile`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-replacefilew)\n\n/// function.\n\npub fn ReplaceFile(\n\n\treplaced: &str, replacement: &str,\n\n\tbackup: Option<&str>, flags: co::REPLACEFILE) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tkernel32::ReplaceFileW(\n\n\t\t\t\tWString::from_str(replaced).as_ptr(),\n\n\t\t\t\tWString::from_str(replacement).as_ptr(),\n\n\t\t\t\tWString::from_opt_str(backup).as_ptr(),\n\n\t\t\t\tflags.0,\n\n\t\t\t\tstd::ptr::null_mut(),\n\n\t\t\t\tstd::ptr::null_mut(),\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 39, "score": 189182.48138853261 }, { "content": "/// [`GetMessage`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getmessagew)\n\n/// function.\n\npub fn GetMessage(\n\n\tmsg: &mut MSG, hwnd: Option<HWND>,\n\n\tmsg_filter_min: u32, msg_filter_max: u32) -> WinResult<bool>\n\n{\n\n\tmatch unsafe {\n\n\t\tuser32::GetMessageW(\n\n\t\t\tmsg as *mut _ as _,\n\n\t\t\thwnd.map_or(std::ptr::null_mut(), |h| h.ptr),\n\n\t\t\tmsg_filter_min, msg_filter_max,\n\n\t\t)\n\n\t} {\n\n\t\t-1 => Err(GetLastError()),\n\n\t\t0 => Ok(false),\n\n\t\t_ => Ok(true),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 40, "score": 189182.48138853261 }, { "content": "/// [`CopyFile`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-copyfilew)\n\n/// function.\n\npub fn CopyFile(\n\n\texisting_file: &str, new_file: &str,\n\n\tfail_if_exists: bool) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tkernel32::CopyFileW(\n\n\t\t\t\tWString::from_str(existing_file).as_ptr(),\n\n\t\t\t\tWString::from_str(new_file).as_ptr(),\n\n\t\t\t\tfail_if_exists as _,\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 41, "score": 189182.48138853261 }, { "content": "/// [`PeekMessage`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-peekmessagew)\n\n/// function.\n\npub fn PeekMessage(\n\n\tmsg: &mut MSG, hwnd: Option<HWND>,\n\n\tmsg_filter_min: u32, msg_filter_max: u32, remove_msg: co::PM) -> bool\n\n{\n\n\tunsafe {\n\n\t\tuser32::PeekMessageW(\n\n\t\t\tmsg as *mut _ as _,\n\n\t\t\thwnd.map_or(std::ptr::null_mut(), |h| h.ptr),\n\n\t\t\tmsg_filter_min,\n\n\t\t\tmsg_filter_max,\n\n\t\t\tremove_msg.0,\n\n\t\t) != 0\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 42, "score": 189182.48138853261 }, { "content": "/// [`AnyPopup`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-anypopup)\n\n/// function.\n\npub fn AnyPopup() -> bool {\n\n\tunsafe { user32::AnyPopup() != 0 }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 43, "score": 184656.12669252738 }, { "content": "/// [`TaskDialogIndirect`](https://docs.microsoft.com/en-us/windows/win32/api/commctrl/nf-commctrl-taskdialogindirect)\n\n/// function.\n\n///\n\n/// Returns:\n\n/// * the selected `co::DLGID` button;\n\n/// * if `pRadioButtons` of [`TASKDIALOGCONFIG`](crate::TASKDIALOGCONFIG) struct\n\n/// was set, the `u16` control ID of one of the specified radio buttons;\n\n/// otherwise zero.\n\n///\n\n/// If you don't need all customizations, consider the\n\n/// [`TaskDialog`](crate::HWND::TaskDialog) method.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::{co, gui, IconIdTdicon};\n\n/// use winsafe::{TASKDIALOG_BUTTON TASKDIALOGCONFIG, TaskDialogIndirect};\n\n/// use winsafe::WString;\n\n///\n\n/// let wnd: gui::WindowMain; // initialized somewhere\n\n///\n\n/// let mut tdc = TASKDIALOGCONFIG::default();\n\n/// tdc.hwndParent = wnd.hwnd();\n\n/// tdc.dwCommonButtons = co::TDCBF::YES | co::TDCBF::NO;\n\n/// tdc.set_hMainIcon(IconIdTdicon::Tdicon(co::TD_ICON::INFORMATION));\n\n///\n\n/// let mut title = WString::from_str(\"Title\");\n\n/// tdc.set_pszWindowTitle(Some(&mut title));\n\n///\n\n/// let mut header = WString::from_str(\"Header\");\n\n/// tdc.set_pszMainInstruction(Some(&mut header));\n\n///\n\n/// let mut body = WString::from_str(\"Body\");\n\n/// tdc.set_pszContent(Some(&mut body));\n\n///\n\n/// // A custom button to appear before Yes and No.\n\n/// let mut btn1 = TASKDIALOG_BUTTON::default();\n\n/// let mut btn1_text = WString::from_str(\"Hello\");\n\n/// btn1.set_pszButtonText(Some(&mut btn1_text));\n\n/// btn1.set_nButtonID(333); // this ID is returned if user clicks this button\n\n/// let btns_slice = &mut [btn1];\n\n/// tdc.set_pButtons(Some(btns_slice));\n\n///\n\n/// TaskDialogIndirect(&tdc, None)?;\n\n/// ```\n\npub fn TaskDialogIndirect(\n\n\ttask_config: &TASKDIALOGCONFIG,\n\n\tverification_flag_checked: Option<&mut bool>) -> WinResult<(co::DLGID, u16)>\n\n{\n\n\tlet mut pn_button = i32::default();\n\n\tlet mut pn_radio_button = i32::default();\n\n\tlet mut pf_bool: BOOL = 0;\n\n\n\n\thr_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tcomctl32::TaskDialogIndirect(\n\n\t\t\t\ttask_config as *const _ as _,\n\n\t\t\t\t&mut pn_button,\n\n\t\t\t\t&mut pn_radio_button,\n\n\t\t\t\tverification_flag_checked.as_ref()\n\n\t\t\t\t\t.map_or(std::ptr::null_mut(), |_| &mut pf_bool),\n\n\t\t\t)\n\n\t\t},\n\n\t)?;\n\n\n\n\tif let Some(pf) = verification_flag_checked {\n\n\t\t*pf = pf_bool != 0;\n\n\t}\n\n\tOk((co::DLGID(pn_button as _), pn_radio_button as _))\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 44, "score": 184604.829883517 }, { "content": "/// [`ChangeDisplaySettings`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-changedisplaysettingsw)\n\n/// function.\n\npub fn ChangeDisplaySettings(\n\n\tdev_mode: &mut DEVMODE,\n\n\tflags: co::CDS) -> Result<co::DISP_CHANGE, co::DISP_CHANGE>\n\n{\n\n\tlet ret = unsafe {\n\n\t\tuser32::ChangeDisplaySettingsW(dev_mode as *mut _ as _, flags.0)\n\n\t};\n\n\n\n\tif ret < 0 {\n\n\t\tErr(co::DISP_CHANGE(ret))\n\n\t} else {\n\n\t\tOk(co::DISP_CHANGE(ret))\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 45, "score": 184567.70123898445 }, { "content": "/// [`GetSystemTimes`](https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-getsystemtimes)\n\n/// function.\n\npub fn GetSystemTimes(\n\n\tidle_time: &mut FILETIME,\n\n\tkernel_time: &mut FILETIME,\n\n\tuser_time: &mut FILETIME) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tkernel32::GetSystemTimes(\n\n\t\t\t\tidle_time as *mut _ as _,\n\n\t\t\t\tkernel_time as *mut _ as _,\n\n\t\t\t\tuser_time as *mut _ as _,\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 46, "score": 184567.70123898445 }, { "content": "/// [`AttachThreadInput`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-attachthreadinput)\n\n/// function.\n\npub fn AttachThreadInput(\n\n\tattach_id: u32, attach_to_id: u32, do_attach: bool) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tuser32::AttachThreadInput(attach_id, attach_to_id, do_attach as _)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 47, "score": 184567.70123898445 }, { "content": "/// [`InitCommonControls`](https://docs.microsoft.com/en-us/windows/win32/api/commctrl/nf-commctrl-initcommoncontrols)\n\n/// function.\n\npub fn InitCommonControls() {\n\n\tunsafe { comctl32::InitCommonControls() }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 48, "score": 184567.70123898445 }, { "content": "/// [`VerifyVersionInfo`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-verifyversioninfow)\n\n/// function.\n\npub fn VerifyVersionInfo(\n\n\tosvix: &mut OSVERSIONINFOEX,\n\n\ttype_mask: co::VER_MASK,\n\n\tcondition_mask: u64) -> WinResult<bool>\n\n{\n\n\tmatch unsafe {\n\n\t\tkernel32::VerifyVersionInfoW(\n\n\t\t\tosvix as *mut _ as _,\n\n\t\t\ttype_mask.0,\n\n\t\t\tcondition_mask,\n\n\t\t)\n\n\t} {\n\n\t\t0 => match GetLastError() {\n\n\t\t\tco::ERROR::OLD_WIN_VERSION => Ok(false),\n\n\t\t\terr => Err(err),\n\n\t\t},\n\n\t\t_ => Ok(true),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 49, "score": 184567.70123898445 }, { "content": "/// [`Shell_NotifyIcon`](https://docs.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-shell_notifyiconw)\n\n/// function.\n\npub fn Shell_NotifyIcon(\n\n\tmessage: co::NIM, data: &mut NOTIFYICONDATA) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe { shell32::Shell_NotifyIconW(message.0, data as *mut _ as _) },\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 50, "score": 184567.6652102165 }, { "content": "/// [`CoUninitialize`](https://docs.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-couninitialize)\n\n/// function.\n\n///\n\n/// **Note:** Must be called **after** all COM interfaces have been released,\n\n/// otherwise you'll get a segmentation fault error with\n\n/// `STATUS_ACCESS_VIOLATION` code.\n\npub fn CoUninitialize() {\n\n\tunsafe { ole32::CoUninitialize() }\n\n}\n", "file_path": "src/com/funcs.rs", "rank": 51, "score": 184567.21385138133 }, { "content": "/// Converts a concrete window into an upcasted reference.\n\npub trait AsWindow: Window {\n\n\tfn as_window(&self) -> Arc<dyn Window>;\n\n}\n\n\n", "file_path": "src/gui/traits.rs", "rank": 52, "score": 180534.79132793975 }, { "content": "/// [`LockSetForegroundWindow`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-locksetforegroundwindow)\n\n/// function.\n\npub fn LockSetForegroundWindow(lock_code: co::LSFW) -> WinResult<()> {\n\n\tbool_to_winresult(\n\n\t\tunsafe { user32::LockSetForegroundWindow(lock_code.0) },\n\n\t)\n\n}\n\n\n\n/// Returns the low-order `u32` of an `u64`.\n\npub const fn LODWORD(v: u64) -> u32 {\n\n\t(v & 0xffff_ffff) as _\n\n}\n\n\n\n/// [`LOWORD`](https://docs.microsoft.com/en-us/previous-versions/windows/desktop/legacy/ms632659(v=vs.85))\n\n/// function. Originally a macro.\n\npub const fn LOWORD(v: u32) -> u16 {\n\n\t(v & 0xffff) as _\n\n}\n\n\n\n/// Function that implements\n\n/// [`MAKELONG`](https://docs.microsoft.com/en-us/previous-versions/windows/desktop/legacy/ms632660(v=vs.85)),\n\n/// [`MAKEWPARAM`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-makewparam),\n", "file_path": "src/funcs.rs", "rank": 53, "score": 180282.07890583007 }, { "content": "/// [`GetGUIThreadInfo`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getguithreadinfo)\n\n/// function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::{GetGUIThreadInfo, GUITHREADINFO, HWND};\n\n///\n\n/// let hwnd: HWND; // initialized somewhere\n\n///\n\n/// let mut gti = GUITHREADINFO::default();\n\n/// GetGUIThreadInfo(\n\n/// hwnd.GetWindowThreadProcessId(),\n\n/// &mut gti,\n\n/// )?;\n\n///\n\n/// println!(\"Caret rect: {}\", gti.rcCaret);\n\n/// ```\n\npub fn GetGUIThreadInfo(\n\n\tthread_id: u32, gti: &mut GUITHREADINFO) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe { user32::GetGUIThreadInfo(thread_id, gti as *mut _ as _) }\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 54, "score": 180281.7021392017 }, { "content": "/// [`SHGetFileInfo`](https://docs.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-shgetfileinfow)\n\n/// function.\n\n///\n\n/// **Note:** If you are returning an icon in the `hIcon` member of\n\n/// [`SHFILEINFO`](crate::SHFILEINFO), it must be paired with an\n\n/// [`HICON::DestroyIcon`](crate::HICON::DestroyIcon) call.\n\npub fn SHGetFileInfo(\n\n\tpath: &str, file_attrs: co::FILE_ATTRIBUTE,\n\n\tshfi: &mut SHFILEINFO, flags: co::SHGFI) -> WinResult<u32>\n\n{\n\n\tmatch unsafe {\n\n\t\tshell32::SHGetFileInfoW(\n\n\t\t\tWString::from_str(path).as_ptr(),\n\n\t\t\tfile_attrs.0,\n\n\t\t\tshfi as *mut _ as _,\n\n\t\t\tstd::mem::size_of::<SHFILEINFO>() as _,\n\n\t\t\tflags.0,\n\n\t\t)\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n\n\t\tn => Ok(n as _),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 55, "score": 180262.4570180685 }, { "content": "/// [`MultiByteToWideChar`](https://docs.microsoft.com/en-us/windows/win32/api/stringapiset/nf-stringapiset-multibytetowidechar)\n\n/// function.\n\n///\n\n/// The resulting `Vec<u16>` includes a terminating null.\n\npub fn MultiByteToWideChar(\n\n\tcode_page: co::CP, flags: co::MBC,\n\n\tmulti_byte_str: &[u8]) -> WinResult<Vec<u16>>\n\n{\n\n\tmatch unsafe {\n\n\t\tkernel32::MultiByteToWideChar(\n\n\t\t\tcode_page.0 as _,\n\n\t\t\tflags.0,\n\n\t\t\tmulti_byte_str.as_ptr(),\n\n\t\t\tmulti_byte_str.len() as _,\n\n\t\t\tstd::ptr::null_mut(),\n\n\t\t\t0,\n\n\t\t)\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n\n\t\tnum_bytes => {\n\n\t\t\tlet num_bytes = num_bytes as usize + 1; // add room for terminating null\n\n\t\t\tlet mut dest_buf: Vec<u16> = vec![0x0000; num_bytes as _];\n\n\n\n\t\t\tmatch unsafe {\n", "file_path": "src/funcs.rs", "rank": 56, "score": 180261.97018290538 }, { "content": "/// [`WideCharToMultiByte`](https://docs.microsoft.com/en-us/windows/win32/api/stringapiset/nf-stringapiset-widechartomultibyte)\n\n/// function.\n\n///\n\n/// The resulting `Vec<u16>` includes a terminating null.\n\npub fn WideCharToMultiByte(\n\n\tcode_page: co::CP, flags: co::WC,\n\n\twide_char_str: &[u16], default_char: Option<u8>,\n\n\tused_default_char: Option<&mut bool>) -> WinResult<Vec<u8>> {\n\n\n\n\tlet mut default_char_buf = default_char.unwrap_or_default();\n\n\n\n\tmatch unsafe {\n\n\t\tkernel32::WideCharToMultiByte(\n\n\t\t\tcode_page.0 as _,\n\n\t\t\tflags.0,\n\n\t\t\twide_char_str.as_ptr(),\n\n\t\t\twide_char_str.len() as _,\n\n\t\t\tstd::ptr::null_mut(),\n\n\t\t\t0,\n\n\t\t\t&mut default_char_buf,\n\n\t\t\tstd::ptr::null_mut(),\n\n\t\t)\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n", "file_path": "src/funcs.rs", "rank": 57, "score": 180261.97018290538 }, { "content": "/// [`EnumDisplaySettingsEx`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-enumdisplaysettingsexw)\n\n/// function\n\npub fn EnumDisplaySettingsEx(\n\n\tdevice_name: Option<&str>,\n\n\tmode_num: co::ENUM_SETTINGS,\n\n\tdev_mode: &mut DEVMODE,\n\n\tflags: co::EDS) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tuser32::EnumDisplaySettingsExW(\n\n\t\t\t\tdevice_name.map_or(std::ptr::null(), |lp| WString::from_str(lp).as_ptr()),\n\n\t\t\t\tmode_num.0,\n\n\t\t\t\tdev_mode as *mut _ as _,\n\n\t\t\t\tflags.0\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 58, "score": 180257.28851545192 }, { "content": "/// [`SystemTimeToFileTime`](https://docs.microsoft.com/en-us/windows/win32/api/timezoneapi/nf-timezoneapi-systemtimetofiletime)\n\n/// function.\n\npub fn SystemTimeToFileTime(\n\n\tst: &SYSTEMTIME, ft: &mut FILETIME) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tkernel32::SystemTimeToFileTime(\n\n\t\t\t\tst as *const _ as _,\n\n\t\t\t\tft as *mut _ as _,\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 59, "score": 180257.28851545192 }, { "content": "/// [`FileTimeToSystemTime`](https://docs.microsoft.com/en-us/windows/win32/api/timezoneapi/nf-timezoneapi-filetimetosystemtime)\n\n/// function.\n\npub fn FileTimeToSystemTime(\n\n\tfile_time: &FILETIME, system_time: &mut SYSTEMTIME) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tkernel32::FileTimeToSystemTime(\n\n\t\t\t\tfile_time as *const _ as _,\n\n\t\t\t\tsystem_time as *mut _ as _,\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 60, "score": 180257.28851545192 }, { "content": "/// [`VerSetConditionMask`](https://docs.microsoft.com/en-us/windows/win32/api/winnt/nf-winnt-versetconditionmask)\n\n/// function.\n\npub fn VerSetConditionMask(\n\n\tcondition_mask: u64, type_mask: co::VER_MASK, condition: co::VER_COND) -> u64\n\n{\n\n\tunsafe {\n\n\t\tkernel32::VerSetConditionMask(condition_mask, type_mask.0, condition.0)\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 61, "score": 180257.28851545192 }, { "content": "/// [`SoundSentry`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-soundsentry)\n\n/// function.\n\npub fn SoundSentry() -> bool {\n\n\tunsafe { user32::SoundSentry() != 0 }\n\n}\n\n\n\n/// [`SystemParametersInfo`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-systemparametersinfow)\n\n/// function.\n\n///\n\n/// **Note:** The `pvParam` type varies according to `uiAction`. If you set it\n\n/// wrong, you're likely to cause a buffer overrun.\n\npub unsafe fn SystemParametersInfo<T>(\n\n\taction: co::SPI,\n\n\tui_param: u32,\n\n\tpv_param: &mut T,\n\n\twin_ini: co::SPIF) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tuser32::SystemParametersInfoW(\n\n\t\t\taction.0,\n\n\t\t\tui_param,\n\n\t\t\tpv_param as *mut _ as _,\n\n\t\t\twin_ini.0,\n\n\t\t),\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 62, "score": 180041.34596038514 }, { "content": "/// [`IsWow64Message`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-iswow64message)\n\n/// function.\n\npub fn IsWow64Message() -> bool {\n\n\treturn unsafe { user32::IsWow64Message() != 0}\n\n}\n\n\n\n/// [`LOBYTE`](https://docs.microsoft.com/en-us/previous-versions/windows/desktop/legacy/ms632658(v=vs.85))\n\n/// function. Originally a macro.\n\npub const fn LOBYTE(v: u16) -> u8 {\n\n\t(v & 0xff) as _\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 63, "score": 180041.34596038514 }, { "content": "/// [`GetFileVersionInfoSize`](https://docs.microsoft.com/en-us/windows/win32/api/winver/nf-winver-getfileversioninfosizew)\n\n/// function.\n\npub fn GetFileVersionInfoSize(file_name: &str) -> WinResult<u32> {\n\n\tlet mut dw_handle = u32::default();\n\n\tmatch unsafe {\n\n\t\tversion::GetFileVersionInfoSizeW(\n\n\t\t\tWString::from_str(file_name).as_ptr(),\n\n\t\t\t&mut dw_handle,\n\n\t\t)\n\n\t} {\n\n\t\t0 => Err(GetLastError()),\n\n\t\tsz => Ok(sz)\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 64, "score": 176562.28551961438 }, { "content": "/// [`Sleep`](https://docs.microsoft.com/en-us/windows/win32/api/synchapi/nf-synchapi-sleep)\n\n/// function.\n\npub fn Sleep(milliseconds: u32) {\n\n\tunsafe { kernel32::Sleep(milliseconds) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 65, "score": 176216.84121946478 }, { "content": "/// [`CloseClipboard`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-closeclipboard)\n\n/// function.\n\npub fn CloseClipboard() -> WinResult<()> {\n\n\tbool_to_winresult(unsafe { user32::CloseClipboard() })\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 66, "score": 175730.96926562054 }, { "content": "/// [`EmptyClipboard`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-emptyclipboard)\n\n/// function.\n\npub fn EmptyClipboard() -> WinResult<()> {\n\n\tbool_to_winresult(unsafe { user32::EmptyClipboard() })\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 67, "score": 175730.96926562054 }, { "content": "/// [`ReleaseCapture`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-releasecapture)\n\n/// function.\n\npub fn ReleaseCapture() -> WinResult<()> {\n\n\tbool_to_winresult(unsafe { user32::ReleaseCapture() })\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 68, "score": 175730.96926562054 }, { "content": "/// [`WaitMessage`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-waitmessage)\n\n/// function.\n\npub fn WaitMessage() -> WinResult<()> {\n\n\tbool_to_winresult(unsafe { user32::WaitMessage() })\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 69, "score": 175730.96926562054 }, { "content": "/// [`EndMenu`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-endmenu)\n\n/// function.\n\npub fn EndMenu() -> WinResult<()> {\n\n\tbool_to_winresult(unsafe { user32::EndMenu() })\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 70, "score": 175730.96926562054 }, { "content": "/// [`GetTickCount64`](https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-gettickcount64)\n\n/// function.\n\npub fn GetTickCount64() -> u64 {\n\n\tunsafe { kernel32::GetTickCount64() }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 71, "score": 175730.93266784542 }, { "content": "/// [`SystemTimeToTzSpecificLocalTime`](https://docs.microsoft.com/en-us/windows/win32/api/timezoneapi/nf-timezoneapi-systemtimetotzspecificlocaltime)\n\n/// function.\n\npub fn SystemTimeToTzSpecificLocalTime(\n\n\ttime_zone: Option<&TIME_ZONE_INFORMATION>,\n\n\tuniversal_time: &SYSTEMTIME,\n\n\tlocal_time: &mut SYSTEMTIME) -> WinResult<()>\n\n{\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tkernel32::SystemTimeToTzSpecificLocalTime(\n\n\t\t\t\ttime_zone.map_or(std::ptr::null(), |lp| lp as *const _ as _),\n\n\t\t\t\tuniversal_time as *const _ as _,\n\n\t\t\t\tlocal_time as *mut _ as _,\n\n\t\t\t)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 72, "score": 172436.57024919256 }, { "content": "/// [`GetDoubleClickTime`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getdoubleclicktime)\n\n/// function.\n\npub fn GetDoubleClickTime() -> u32 {\n\n\tunsafe { user32::GetDoubleClickTime() }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 73, "score": 171695.73620261435 }, { "content": "/// [`GetLargePageMinimum`](https://docs.microsoft.com/en-us/windows/win32/api/memoryapi/nf-memoryapi-getlargepageminimum)\n\n/// function.\n\npub fn GetLargePageMinimum() -> u64 {\n\n\tunsafe { kernel32::GetLargePageMinimum() }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 74, "score": 171695.73620261435 }, { "content": "/// [`GetCurrentProcessId`](https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-getcurrentprocessid)\n\n/// function.\n\npub fn GetCurrentProcessId() -> u32 {\n\n\tunsafe { kernel32::GetCurrentProcessId() }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 75, "score": 171695.73620261435 }, { "content": "/// [`GetCurrentThreadId`](https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-getcurrentthreadid)\n\n/// function.\n\npub fn GetCurrentThreadId() -> u32 {\n\n\tunsafe { kernel32::GetCurrentThreadId() }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 76, "score": 171695.73620261435 }, { "content": "/// [`SetProcessDPIAware`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-setprocessdpiaware)\n\n/// function.\n\npub fn SetProcessDPIAware() -> WinResult<()> {\n\n\tbool_to_winresult(unsafe { user32::SetProcessDPIAware() })\n\n}\n\n\n\n/// [`SHAddToRecentDocs`](https://docs.microsoft.com/en-us/windows/win32/api/shlobj_core/nf-shlobj_core-shaddtorecentdocs)\n\n/// function.\n\n///\n\n/// **Note:** The `pv` type varies according to `uFlags`. If you set it wrong,\n\n/// you're likely to cause a buffer overrun.\n\npub unsafe fn SHAddToRecentDocs<T>(flags: co::SHARD, pv: &T) {\n\n\tshell32::SHAddToRecentDocs(flags.0, pv as *const _ as _);\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 77, "score": 167910.2487756 }, { "content": "/// [`IsWindows10OrGreater`](https://docs.microsoft.com/en-us/windows/win32/api/versionhelpers/nf-versionhelpers-iswindows10orgreater)\n\n/// function.\n\npub fn IsWindows10OrGreater() -> WinResult<bool> {\n\n\tIsWindowsVersionOrGreater(\n\n\t\tHIBYTE(co::WIN32::WINNT_WINTHRESHOLD.0) as _,\n\n\t\tLOBYTE(co::WIN32::WINNT_WINTHRESHOLD.0) as _,\n\n\t\t0,\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 78, "score": 167871.26690786786 }, { "content": "/// [`IsWindows8OrGreater`](https://docs.microsoft.com/en-us/windows/win32/api/versionhelpers/nf-versionhelpers-iswindows8orgreater)\n\n/// function.\n\npub fn IsWindows8OrGreater() -> WinResult<bool> {\n\n\tIsWindowsVersionOrGreater(\n\n\t\tHIBYTE(co::WIN32::WINNT_WIN8.0) as _,\n\n\t\tLOBYTE(co::WIN32::WINNT_WIN8.0) as _,\n\n\t\t0,\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 79, "score": 167871.26690786786 }, { "content": "/// [`IsWindows7OrGreater`](https://docs.microsoft.com/en-us/windows/win32/api/versionhelpers/nf-versionhelpers-iswindows7orgreater)\n\n/// function.\n\npub fn IsWindows7OrGreater() -> WinResult<bool> {\n\n\tIsWindowsVersionOrGreater(\n\n\t\tHIBYTE(co::WIN32::WINNT_WIN7.0) as _,\n\n\t\tLOBYTE(co::WIN32::WINNT_WIN7.0) as _,\n\n\t\t0,\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 80, "score": 167871.26690786786 }, { "content": "/// [`GetLastError`](https://docs.microsoft.com/en-us/windows/win32/api/errhandlingapi/nf-errhandlingapi-getlasterror)\n\n/// function.\n\n///\n\n/// This function is automatically called every time a\n\n/// [`WinResult`](crate::WinResult) evaluates to `Err`, so it's unlikely that\n\n/// you ever need to call it.\n\npub fn GetLastError() -> co::ERROR {\n\n\tco::ERROR(unsafe { kernel32::GetLastError() })\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 81, "score": 167870.7140493775 }, { "content": "struct Obj { // actual fields of BaseNativeControl\n\n\thwnd: HWND,\n\n\tparent_ptr: NonNull<Base>,\n\n\tsubclass_events: WindowEvents, // for control subclassing\n\n}\n\n\n\nimpl Window for BaseNativeControl {\n\n\tfn hwnd(&self) -> HWND {\n\n\t\tself.0.hwnd\n\n\t}\n\n}\n\n\n\nimpl BaseNativeControl {\n\n\tpub(in crate::gui) fn new(parent_base_ref: &Base) -> BaseNativeControl {\n\n\t\tSelf(\n\n\t\t\tVeryUnsafeCell::new(\n\n\t\t\t\tObj {\n\n\t\t\t\t\thwnd: HWND::NULL,\n\n\t\t\t\t\tparent_ptr: NonNull::from(parent_base_ref),\n\n\t\t\t\t\tsubclass_events: WindowEvents::new(),\n", "file_path": "src/gui/native_controls/base_native_control.rs", "rank": 82, "score": 166962.25139229064 }, { "content": "/// [`DispatchMessage`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-dispatchmessagew)\n\n/// function.\n\npub fn DispatchMessage(msg: &MSG) -> isize {\n\n\tunsafe { user32::DispatchMessageW(msg as *const _ as _) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 83, "score": 164597.07975254388 }, { "content": "/// [`TranslateMessage`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-translatemessage)\n\n/// function.\n\npub fn TranslateMessage(msg: &MSG) -> bool {\n\n\tunsafe { user32::TranslateMessage(msg as *const _ as _) != 0 }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 84, "score": 164597.07975254388 }, { "content": "/// [`ShowCursor`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-showcursor)\n\n/// function.\n\npub fn ShowCursor(show: bool) -> i32 {\n\n\tunsafe { user32::ShowCursor(show as _) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 85, "score": 164597.07975254388 }, { "content": "/// [`QueryPerformanceCounter`](https://docs.microsoft.com/en-us/windows/win32/api/profileapi/nf-profileapi-queryperformancecounter)\n\n/// function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::{QueryPerformanceCounter, QueryPerformanceFrequency};\n\n///\n\n/// let freq = QueryPerformanceFrequency()?;\n\n/// let start = QueryPerformanceCounter()?;\n\n///\n\n/// // perform some operation...\n\n///\n\n/// let duration_ms =\n\n/// ((QueryPerformanceCounter()? - t0) as f64 / freq as f64) * 1000.0;\n\n///\n\n/// println!(\"Operation lasted {:.2} ms\", duration_ms);\n\n/// ```\n\npub fn QueryPerformanceCounter() -> WinResult<i64> {\n\n\tlet mut perf_count = i64::default();\n\n\tbool_to_winresult(\n\n\t\tunsafe { kernel32::QueryPerformanceCounter(&mut perf_count) },\n\n\t).map(|_| perf_count)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 86, "score": 164094.48743015854 }, { "content": "/// [`QueryPerformanceFrequency`](https://docs.microsoft.com/en-us/windows/win32/api/profileapi/nf-profileapi-queryperformancecounter)\n\n/// function.\n\npub fn QueryPerformanceFrequency() -> WinResult<i64> {\n\n\tlet mut freq = i64::default();\n\n\tbool_to_winresult(\n\n\t\tunsafe { kernel32::QueryPerformanceFrequency(&mut freq) },\n\n\t).map(|_| freq)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 87, "score": 164085.74288307838 }, { "content": "/// [`IsWindows8Point1OrGreater`](https://docs.microsoft.com/en-us/windows/win32/api/versionhelpers/nf-versionhelpers-iswindows8point1orgreater)\n\n/// function.\n\npub fn IsWindows8Point1OrGreater() -> WinResult<bool> {\n\n\tIsWindowsVersionOrGreater(\n\n\t\tHIBYTE(co::WIN32::WINNT_WINBLUE.0) as _,\n\n\t\tLOBYTE(co::WIN32::WINNT_WINBLUE.0) as _,\n\n\t\t0,\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 88, "score": 164085.74288307838 }, { "content": "/// [`PostQuitMessage`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-postquitmessage)\n\n/// function.\n\npub fn PostQuitMessage(exit_code: i32) {\n\n\tunsafe { user32::PostQuitMessage(exit_code) }\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 89, "score": 164085.74288307838 }, { "content": "/// [`IsNativeVhdBoot`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-isnativevhdboot)\n\n/// function.\n\npub fn IsNativeVhdBoot() -> WinResult<bool> {\n\n\tlet mut is_native: BOOL = 0;\n\n\tmatch unsafe { kernel32::IsNativeVhdBoot(&mut is_native) } {\n\n\t\t0 => Err(GetLastError()),\n\n\t\t_ => Ok(is_native != 0),\n\n\t}\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 90, "score": 164085.74288307838 }, { "content": "/// [`GetClipCursor`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getclipcursor)\n\n/// function.\n\npub fn GetClipCursor() -> WinResult<RECT> {\n\n\tlet mut rc = RECT::default();\n\n\tbool_to_winresult(unsafe { user32::GetClipCursor(&mut rc as *mut _ as _) })\n\n\t\t.map(|_| rc)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 91, "score": 164085.74288307838 }, { "content": "/// [`CommDlgExtendedError`](https://docs.microsoft.com/en-us/windows/win32/api/commdlg/nf-commdlg-commdlgextendederror)\n\n/// function.\n\npub fn CommDlgExtendedError() -> co::CDERR {\n\n\tco::CDERR(unsafe { comdlg32::CommDlgExtendedError() })\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 92, "score": 164085.70685431044 }, { "content": "/// Returns an iterator over each part of the path.\n\npub fn iter(full_path: &str) -> impl Iterator<Item = &str> {\n\n\tPathIterator { path: full_path }\n\n}\n\n\n", "file_path": "src/various/path.rs", "rank": 93, "score": 163556.767174028 }, { "content": "struct Obj { // actual fields of WindowEvents\n\n\tmsgs: FuncStore< // ordinary WM messages\n\n\t\tco::WM,\n\n\t\tBox<dyn Fn(WndMsg) -> ErrResult<Option<isize>>>, // return value may be meaningful\n\n\t>,\n\n}\n\n\n\nimpl WindowEvents {\n\n\tpub(in crate::gui) fn new() -> Self {\n\n\t\tSelf(\n\n\t\t\tVeryUnsafeCell::new(\n\n\t\t\t\tObj {\n\n\t\t\t\t\tmsgs: FuncStore::new(),\n\n\t\t\t\t},\n\n\t\t\t),\n\n\t\t)\n\n\t}\n\n\n\n\tpub(in crate::gui) fn is_empty(&self) -> bool {\n\n\t\tself.0.msgs.is_empty()\n", "file_path": "src/gui/events/events_wm.rs", "rank": 94, "score": 163150.7666703047 }, { "content": "/// [`CoCreateInstance`](https://docs.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-cocreateinstance)\n\n/// function.\n\n///\n\n/// Returns an [`IUnknown`](crate::IUnknown)-derived COM object.\n\n///\n\n/// # Examples\n\n///\n\n/// Instantiating an [`ITaskbarList`](crate::shell::ITaskbarList) object:\n\n///\n\n/// ```rust,ignore\n\n/// use winsafe::prelude::*;\n\n/// use winsafe::{co, CoCreateInstance, shell};\n\n///\n\n/// let obj = CoCreateInstance::<shell::ITaskbarList>(\n\n/// &shell::clsid::TaskbarList,\n\n/// None,\n\n/// co::CLSCTX::INPROC_SERVER,\n\n/// )?;\n\n/// ```\n\npub fn CoCreateInstance<T: ComInterface>(\n\n\tclsid: &CLSID,\n\n\tiunk_outer: Option<&mut IUnknown>,\n\n\tcls_context: co::CLSCTX) -> WinResult<T>\n\n{\n\n\tlet mut ppv = ComPtr::null();\n\n\tlet mut ppv_outer = ComPtr::null();\n\n\n\n\thr_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tole32::CoCreateInstance(\n\n\t\t\t\tclsid as *const _ as _,\n\n\t\t\t\tiunk_outer.as_ref()\n\n\t\t\t\t\t.map_or(std::ptr::null_mut(), |_| &mut ppv_outer as *mut _ as _),\n\n\t\t\t\tcls_context.0,\n\n\t\t\t\t&T::IID as *const _ as _,\n\n\t\t\t\t&mut ppv as *mut _ as _,\n\n\t\t\t)\n\n\t\t},\n\n\t).map(|_| {\n\n\t\tif let Some(iunk_outer) = iunk_outer {\n\n\t\t\t*iunk_outer = IUnknown::from(ppv_outer); // create outer Unknown if due\n\n\t\t}\n\n\t\tT::from(ppv) // return new Unknown-derived object\n\n\t})\n\n}\n\n\n", "file_path": "src/com/funcs.rs", "rank": 95, "score": 160542.789717928 }, { "content": "/// [`DecryptFile`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-decryptfilew)\n\n/// function.\n\npub fn DecryptFile(file_name: &str) -> WinResult<()> {\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tadvapi32::DecryptFileW(WString::from_str(file_name).as_ptr(), 0)\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 96, "score": 157253.30074781383 }, { "content": "/// [`DeleteFile`](https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-deletefilew)\n\n/// function.\n\npub fn DeleteFile(file_name: &str) -> WinResult<()> {\n\n\tbool_to_winresult(\n\n\t\tunsafe { kernel32::DeleteFileW(WString::from_str(file_name).as_ptr()) },\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 97, "score": 157253.30074781383 }, { "content": "/// [`EncryptFile`](https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-encryptfilew)\n\n/// function.\n\npub fn EncryptFile(file_name: &str) -> WinResult<()> {\n\n\tbool_to_winresult(\n\n\t\tunsafe {\n\n\t\t\tadvapi32::EncryptFileW(WString::from_str(file_name).as_ptr())\n\n\t\t},\n\n\t)\n\n}\n\n\n", "file_path": "src/funcs.rs", "rank": 98, "score": 157253.30074781383 }, { "content": "\t///\n\n\t/// Defaults to `Vert::None`.\n\n\tpub vert_resize: Vert,\n\n}\n\n\n\nimpl Default for ProgressBarOpts {\n\n\tfn default() -> Self {\n\n\t\tSelf {\n\n\t\t\tposition: POINT::new(0, 0),\n\n\t\t\tsize: SIZE::new(120, 23),\n\n\t\t\tprogress_bar_style: co::PBS::SMOOTH,\n\n\t\t\twindow_style: co::WS::CHILD | co::WS::VISIBLE,\n\n\t\t\twindow_ex_style: co::WS_EX::LEFT,\n\n\t\t\tctrl_id: 0,\n\n\t\t\thorz_resize: Horz::None,\n\n\t\t\tvert_resize: Vert::None,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl ProgressBarOpts {\n\n\tfn define_ctrl_id(mut self) -> Self {\n\n\t\tif self.ctrl_id == 0 {\n\n\t\t\tself.ctrl_id = auto_ctrl_id();\n\n\t\t}\n\n\t\tself\n\n\t}\n\n}\n", "file_path": "src/gui/native_controls/progress_bar.rs", "rank": 99, "score": 75.44749510519473 } ]
Rust
src/main.rs
almusil/portable-ecg
c39cf6aa9cf1aaee1167d2bc05cd0c895f7522c6
#![no_main] #![no_std] use lib as _; use cortex_m::singleton; use heapless::consts::U64; use heapless::spsc::{Queue, SingleCore}; use lib::display::Display; use lib::hw::{ get_calibration, init_clock, init_lcd, Adc, AdcConfig, BeatCounter, BeatTimer, FrameTimer, HwLcd, IliError, LcdInterface, }; use lib::sampler::Sampler; use lib::{BOTTOM_SCROLL_OFFSET, TOP_SCROLL_OFFSET}; use rtic::app; use stm32g0xx_hal::delay::DelayExt; use stm32g0xx_hal::dma::DmaExt; use stm32g0xx_hal::dmamux::DmaMuxIndex; use stm32g0xx_hal::gpio::{GpioExt, Speed}; use stm32g0xx_hal::time::U32Ext; #[app(device = stm32g0xx_hal::stm32, peripherals = true)] const APP: () = { struct Resources { display: Display<'static, U64, HwLcd, IliError>, sampler: Sampler<'static, U64>, frame_timer: FrameTimer, adc: Adc, beat_timer: BeatTimer, beat_counter: BeatCounter, } #[init] fn init(cx: init::Context) -> init::LateResources { let core: rtic::export::Peripherals = cx.core; let device: stm32g0xx_hal::stm32::Peripherals = cx.device; let queue: &'static mut Queue<_, _, _, _> = singleton!(: Queue<u16, U64, u8, SingleCore> = unsafe {Queue::u8_sc()}).unwrap(); let dma_buffer: &'static mut [u16; 4] = singleton!(: [u16; 4] = [0; 4]).unwrap(); let (producer, consumer) = queue.split(); let mut rcc = init_clock(device.RCC); let mut delay = core.SYST.delay(&mut rcc); let gpioa = device.GPIOA.split(&mut rcc); let gpiob = device.GPIOB.split(&mut rcc); let interface = LcdInterface::new( gpiob.pb0.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb1.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb2.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb3.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb4.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb5.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb6.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb7.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb8.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb9.into_push_pull_output().set_speed(Speed::VeryHigh), ); let lcd = init_lcd( interface, gpioa.pa4.into_push_pull_output(), gpioa.pa5.into_push_pull_output(), (TOP_SCROLL_OFFSET, BOTTOM_SCROLL_OFFSET), &mut delay, ) .unwrap(); let display = Display::new(lcd, consumer).unwrap(); let frame_timer = FrameTimer::new(device.TIM6, 30.hz(), &mut rcc); let dma = device.DMA.split(&mut rcc, device.DMAMUX); let mut ch1 = dma.ch1; ch1.mux().select_peripheral(DmaMuxIndex::ADC); let adc = Adc::new( device.ADC, device.TIM1, dma_buffer, AdcConfig::new(gpioa.pa0, ch1, 500.hz()), &mut rcc, &mut delay, ); let sampler = Sampler::new(dma_buffer, producer, get_calibration(), 4095); let beat_timer = BeatTimer::new(device.TIM7, 10_000.ms(), &mut rcc); let beat_counter = BeatCounter::new(device.TIM3, gpioa.pa6, &mut rcc); init::LateResources { display, sampler, frame_timer, adc, beat_timer, beat_counter, } } #[idle(resources = [frame_timer, adc, beat_counter, beat_timer])] fn idle(mut cx: idle::Context) -> ! { cx.resources .beat_counter .lock(|counter: &mut BeatCounter| counter.start()); cx.resources.beat_timer.lock(|timer: &mut BeatTimer| { timer.start(); }); cx.resources.frame_timer.lock(|timer: &mut FrameTimer| { timer.start(); }); cx.resources.adc.lock(|adc: &mut Adc| { adc.start(); }); loop { cortex_m::asm::nop(); } } #[task(binds = DMA_CHANNEL1, priority = 2, resources = [adc, sampler])] fn dma(cx: dma::Context) { let adc: &mut Adc = cx.resources.adc; let sampler: &mut Sampler<'_, _> = cx.resources.sampler; adc.unpend(); sampler.sample::<IliError>().unwrap(); } #[task(binds = TIM6, priority = 1, resources = [display, frame_timer])] fn tim6(cx: tim6::Context) { let frame_timer: &mut FrameTimer = cx.resources.frame_timer; let display: &mut Display<'_, _, _, _> = cx.resources.display; frame_timer.unpend(); display.frame().unwrap(); } #[task(binds = TIM7, priority = 1, resources = [beat_counter, beat_timer, display])] fn tim7(cx: tim7::Context) { let counter: &mut BeatCounter = cx.resources.beat_counter; let timer: &mut BeatTimer = cx.resources.beat_timer; let display: &mut Display<'_, _, _, _> = cx.resources.display; timer.unpend(); display.update_bpm(counter.read() * 6).unwrap(); counter.reset(); } };
#![no_main] #![no_std] use lib as _; use cortex_m::singleton; use heapless::consts::U64; use heapless::spsc::{Queue, SingleCore}; use lib::display::Display; use lib::hw::{ get_calibration, init_clock, init_lcd, Adc, AdcConfig, BeatCounter, BeatTimer, FrameTimer, HwLcd, IliError, LcdInterface, }; use lib::sampler::Sampler; use lib::{BOTTOM_SCROLL_OFFSET, TOP_SCROLL_OFFSET}; use rtic::app; use stm32g0xx_hal::delay::DelayExt; use stm32g0xx_hal::dma::DmaExt; use stm32g0xx_hal::dmamux::DmaMuxIndex; use stm32g0xx_hal::gpio::{GpioExt, Speed}; use stm32g0xx_hal::time::U32Ext; #[app(device = stm32g0xx_hal::stm32, peripherals = true)] const APP: () = { struct Resources { display: Display<'static, U64, HwLcd, IliError>, sampler: Sampler<'static, U64>, frame_timer: FrameTimer, adc: Adc, beat_timer: BeatTimer, beat_counter: BeatCounter, } #[init] fn init(cx: init::Context) -> init::LateResources { let core: rtic::export::Peripherals = cx.core; let device: stm32g0xx_hal::stm32::Peripherals = cx.device; let queue: &'static mut Queue<_, _, _, _> = singleton!(: Queue<u16, U64, u8, SingleCore> = unsafe {Queue::u8_sc()}).unwrap(); let
_counter; let timer: &mut BeatTimer = cx.resources.beat_timer; let display: &mut Display<'_, _, _, _> = cx.resources.display; timer.unpend(); display.update_bpm(counter.read() * 6).unwrap(); counter.reset(); } };
dma_buffer: &'static mut [u16; 4] = singleton!(: [u16; 4] = [0; 4]).unwrap(); let (producer, consumer) = queue.split(); let mut rcc = init_clock(device.RCC); let mut delay = core.SYST.delay(&mut rcc); let gpioa = device.GPIOA.split(&mut rcc); let gpiob = device.GPIOB.split(&mut rcc); let interface = LcdInterface::new( gpiob.pb0.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb1.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb2.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb3.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb4.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb5.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb6.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb7.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb8.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb9.into_push_pull_output().set_speed(Speed::VeryHigh), ); let lcd = init_lcd( interface, gpioa.pa4.into_push_pull_output(), gpioa.pa5.into_push_pull_output(), (TOP_SCROLL_OFFSET, BOTTOM_SCROLL_OFFSET), &mut delay, ) .unwrap(); let display = Display::new(lcd, consumer).unwrap(); let frame_timer = FrameTimer::new(device.TIM6, 30.hz(), &mut rcc); let dma = device.DMA.split(&mut rcc, device.DMAMUX); let mut ch1 = dma.ch1; ch1.mux().select_peripheral(DmaMuxIndex::ADC); let adc = Adc::new( device.ADC, device.TIM1, dma_buffer, AdcConfig::new(gpioa.pa0, ch1, 500.hz()), &mut rcc, &mut delay, ); let sampler = Sampler::new(dma_buffer, producer, get_calibration(), 4095); let beat_timer = BeatTimer::new(device.TIM7, 10_000.ms(), &mut rcc); let beat_counter = BeatCounter::new(device.TIM3, gpioa.pa6, &mut rcc); init::LateResources { display, sampler, frame_timer, adc, beat_timer, beat_counter, } } #[idle(resources = [frame_timer, adc, beat_counter, beat_timer])] fn idle(mut cx: idle::Context) -> ! { cx.resources .beat_counter .lock(|counter: &mut BeatCounter| counter.start()); cx.resources.beat_timer.lock(|timer: &mut BeatTimer| { timer.start(); }); cx.resources.frame_timer.lock(|timer: &mut FrameTimer| { timer.start(); }); cx.resources.adc.lock(|adc: &mut Adc| { adc.start(); }); loop { cortex_m::asm::nop(); } } #[task(binds = DMA_CHANNEL1, priority = 2, resources = [adc, sampler])] fn dma(cx: dma::Context) { let adc: &mut Adc = cx.resources.adc; let sampler: &mut Sampler<'_, _> = cx.resources.sampler; adc.unpend(); sampler.sample::<IliError>().unwrap(); } #[task(binds = TIM6, priority = 1, resources = [display, frame_timer])] fn tim6(cx: tim6::Context) { let frame_timer: &mut FrameTimer = cx.resources.frame_timer; let display: &mut Display<'_, _, _, _> = cx.resources.display; frame_timer.unpend(); display.frame().unwrap(); } #[task(binds = TIM7, priority = 1, resources = [beat_counter, beat_timer, display])] fn tim7(cx: tim7::Context) { let counter: &mut BeatCounter = cx.resources.beat
random
[ { "content": "pub fn init_lcd(\n\n interface: LcdInterface,\n\n lcd_rst: LcdRst,\n\n lcd_rd: LcdRD,\n\n scroller_offset: (u16, u16),\n\n delay: &mut Delay<SYST>,\n\n) -> Result<HwLcd, IliError> {\n\n let mut lcd_rd = lcd_rd;\n\n lcd_rd.set_high().unwrap();\n\n IliLcd::new(interface, lcd_rst, scroller_offset, delay)\n\n}\n\n\n", "file_path": "lib/hw/helper.rs", "rank": 0, "score": 84727.1648885833 }, { "content": "struct Dimension;\n\n\n\nimpl Dimension {\n\n const WIDTH: i32 = 480;\n\n const HEIGHT: i32 = 320;\n\n}\n\n\n\npub(crate) struct Offset;\n\n\n\nimpl Offset {\n\n const BOTTOM: i32 = 10;\n\n const TOP: i32 = 10;\n\n pub(crate) const LEFT: i32 = 10;\n\n pub(crate) const RIGHT: i32 = 50;\n\n}\n\n\n", "file_path": "lib/display.rs", "rank": 1, "score": 76294.0718782434 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Data {\n\n pub(crate) y: u16,\n\n pub(crate) height: u16,\n\n}\n\n\n\nimpl Data {\n\n fn new(y: u16, height: u16) -> Self {\n\n Data { y, height }\n\n }\n\n}\n\n\n\nimpl From<(u16, u16)> for Data {\n\n fn from(samples: (u16, u16)) -> Self {\n\n let (y, height) = if samples.0 < samples.1 {\n\n (samples.0, samples.1 - samples.0)\n\n } else {\n\n (samples.1, samples.0 - samples.1)\n\n };\n\n Data::new(y, height)\n\n }\n\n}\n\n\n", "file_path": "lib/display.rs", "rank": 2, "score": 76294.0718782434 }, { "content": "struct Frame;\n\n\n\nimpl Frame {\n\n const BORDER_WIDTH: i32 = 2;\n\n\n\n const TOP_LEFT: Point = Point::new(Offset::LEFT, Offset::TOP);\n\n const BOTTOM_RIGHT: Point = Point::new(\n\n Dimension::WIDTH - Offset::RIGHT - 1,\n\n Dimension::HEIGHT - Offset::BOTTOM - 1,\n\n );\n\n\n\n const WIDTH: i32 = Dimension::WIDTH - Offset::LEFT - Offset::RIGHT;\n\n const HEIGHT: i32 = Dimension::HEIGHT - Offset::TOP - Offset::BOTTOM;\n\n}\n\n\n", "file_path": "lib/display.rs", "rank": 3, "score": 76294.0718782434 }, { "content": "struct Color;\n\n\n\nimpl Color {\n\n const BACKGROUND: Rgb565 = Rgb565::BLACK;\n\n const FRAME_BORDER: Rgb565 = Rgb565::WHITE;\n\n const DATA: Rgb565 = Rgb565::YELLOW;\n\n const BPM_TEXT: Rgb565 = Rgb565::RED;\n\n}\n\n\n", "file_path": "lib/display.rs", "rank": 4, "score": 76294.0718782434 }, { "content": "struct InnerAdc<I> {\n\n adc: ADC,\n\n _input: I,\n\n}\n\n\n\n// FIXME Move this in some fashionable way upstream\n\n\n\nimpl<I> InnerAdc<I>\n\nwhere\n\n I: AdcChannel<HalAdc, ID = u8>,\n\n{\n\n pub fn new<D: DelayUs<u8>>(pac_adc: ADC, input: I, rcc: &mut Rcc, delay: &mut D) -> Self {\n\n InnerAdc::<I>::enable_clock_and_reset(rcc);\n\n let mut adc = InnerAdc {\n\n adc: pac_adc,\n\n _input: input,\n\n };\n\n adc.disable();\n\n adc.enable_vreg(delay);\n\n adc.calibrate();\n", "file_path": "lib/hw/adc.rs", "rank": 5, "score": 75483.49758347542 }, { "content": "struct DataColumn;\n\n\n\nimpl DataColumn {\n\n const TEXT_WIDTH: i32 = 12 * 3;\n\n const TEXT_HEIGHT: i32 = 16;\n\n const TEXT_SPACING: i32 = 5;\n\n const TEXT_BPM_POSITION: Point = Point::new(\n\n Frame::BOTTOM_RIGHT.x\n\n + Frame::BORDER_WIDTH\n\n + (Offset::RIGHT - Frame::BORDER_WIDTH - DataColumn::TEXT_WIDTH) / 2,\n\n Frame::TOP_LEFT.y + DataColumn::TEXT_SPACING,\n\n );\n\n const TEXT_BPM_VAL_POSITION: Point = Point::new(\n\n DataColumn::TEXT_BPM_POSITION.x,\n\n DataColumn::TEXT_BPM_POSITION.y + DataColumn::TEXT_HEIGHT + DataColumn::TEXT_SPACING,\n\n );\n\n}\n\n\n", "file_path": "lib/display.rs", "rank": 6, "score": 73196.00841242519 }, { "content": "struct Dma<C> {\n\n channel: C,\n\n}\n\n\n\nimpl<C> Dma<C>\n\nwhere\n\n C: DmaChannel,\n\n{\n\n pub fn new(channel: C, peripheral_addr: u32, memory_addr: u32, len: u16) -> Self {\n\n let mut dma = Dma { channel };\n\n dma.configure(peripheral_addr, memory_addr, len);\n\n dma\n\n }\n\n\n\n pub fn start(&mut self) {\n\n self.channel.clear_event(Event::HalfTransfer);\n\n self.channel.listen(Event::HalfTransfer);\n\n self.channel.enable();\n\n }\n\n\n", "file_path": "lib/hw/adc.rs", "rank": 7, "score": 68602.64423279409 }, { "content": "pub fn init_clock(pac_rcc: RCC) -> Rcc {\n\n // ((16 MHz / 4) * 32) / 2 = 64 MHz\n\n let pll_config = PllConfig::with_hsi(4, 32, 2);\n\n pac_rcc.freeze(Config::pll().pll_cfg(pll_config))\n\n}\n\n\n", "file_path": "lib/hw/helper.rs", "rank": 8, "score": 68089.2682410272 }, { "content": "/// Terminates the application and makes `probe-run` exit with exit-code = 0\n\npub fn exit() -> ! {\n\n loop {\n\n cortex_m::asm::bkpt();\n\n }\n\n}\n", "file_path": "lib/lib.rs", "rank": 9, "score": 54368.94421857885 }, { "content": "struct UnusedPin;\n\n\n\nimpl TimerPin<TIM1> for UnusedPin {\n\n type Channel = Channel4;\n\n\n\n fn setup(&self) {\n\n // Do nothing\n\n }\n\n}\n\n\n\npub struct SampleTimer {\n\n _timer: Pwm<TIM1>,\n\n trig: PwmPin<TIM1, Channel4>,\n\n}\n\n\n\nimpl SampleTimer {\n\n pub fn new(pac_timer: TIM1, freq: Hertz, rcc: &mut Rcc) -> Self {\n\n let timer = pac_timer.pwm(freq, rcc);\n\n let trig = timer.bind_pin(UnusedPin);\n\n SampleTimer {\n", "file_path": "lib/hw/timers.rs", "rank": 10, "score": 49944.38067391742 }, { "content": "pub fn get_calibration() -> u16 {\n\n Calibration.vref_int.read()\n\n}\n", "file_path": "lib/hw/helper.rs", "rank": 11, "score": 44270.653532477714 }, { "content": "fn map(to_map: u32, in_min: u32, in_max: u32, out_min: u32, out_max: u32) -> u32 {\n\n (to_map - in_min) * (out_max - out_min) / (in_max - in_min) + out_min\n\n}\n", "file_path": "lib/display.rs", "rank": 12, "score": 44260.68866460178 }, { "content": "use heapless::spsc::{Producer, SingleCore};\n\nuse heapless::ArrayLength;\n\n\n\nuse crate::error::Error;\n\nuse crate::error::Result;\n\nuse crate::Buffer;\n\n\n\npub struct Sampler<'a, LEN>\n\nwhere\n\n LEN: ArrayLength<u16>,\n\n{\n\n producer: Producer<'a, u16, LEN, u8, SingleCore>,\n\n buffer: &'static Buffer,\n\n first_half: bool,\n\n calibration: u32,\n\n full_scale: u16,\n\n}\n\n\n\nimpl<'a, LEN> Sampler<'a, LEN>\n\nwhere\n", "file_path": "lib/sampler.rs", "rank": 13, "score": 38631.413581297835 }, { "content": " LEN: ArrayLength<u16>,\n\n{\n\n pub fn new(\n\n buffer: &'static Buffer,\n\n producer: Producer<'a, u16, LEN, u8, SingleCore>,\n\n vref_calibration: u16,\n\n full_scale: u16,\n\n ) -> Self {\n\n // 3V * 1000 to prevent floating math\n\n let calibration = vref_calibration as u32 * 3000;\n\n Sampler {\n\n producer,\n\n buffer,\n\n calibration,\n\n full_scale,\n\n first_half: true,\n\n }\n\n }\n\n\n\n pub fn sample<LCDER>(&mut self) -> Result<(), LCDER> {\n", "file_path": "lib/sampler.rs", "rank": 14, "score": 38630.17065450074 }, { "content": " let (vref, input) = self.get_raw_data();\n\n self.first_half ^= true;\n\n let sample = self.convert(vref, input);\n\n self.producer.enqueue(sample).map_err(|_| Error::Queue)\n\n }\n\n\n\n fn get_raw_data(&self) -> (u16, u16) {\n\n if self.first_half {\n\n (self.buffer[1], self.buffer[0])\n\n } else {\n\n (self.buffer[3], self.buffer[2])\n\n }\n\n }\n\n\n\n fn convert(&self, measured_vref: u16, measured_input: u16) -> u16 {\n\n let v_ref = self.calibration / measured_vref as u32;\n\n let sample = (v_ref * measured_input as u32) / self.full_scale as u32;\n\n sample as u16\n\n }\n\n}\n", "file_path": "lib/sampler.rs", "rank": 15, "score": 38624.40144166705 }, { "content": " LCD: Lcd<Error = LCDER>,\n\n{\n\n current_data: Queue<Data, U512, u16, SingleCore>,\n\n buffer: Consumer<'a, u16, LEN, u8, SingleCore>,\n\n horizontal_position: u16,\n\n last_sample: u16,\n\n last_bpm: u16,\n\n lcd: LCD,\n\n}\n\n\n\nimpl<'a, LEN, LCD, LCDER> Display<'a, LEN, LCD, LCDER>\n\nwhere\n\n LEN: ArrayLength<u16>,\n\n LCD: Lcd<Error = LCDER>,\n\n{\n\n pub fn new(lcd: LCD, buffer: Consumer<'a, u16, LEN, u8, SingleCore>) -> Result<Self, LCDER> {\n\n let mut display = Display {\n\n current_data: unsafe { Queue::u16_sc() },\n\n buffer,\n\n horizontal_position: (Frame::WIDTH - 1) as u16,\n", "file_path": "lib/display.rs", "rank": 16, "score": 37632.895287568026 }, { "content": "use core::fmt::Write;\n\nuse embedded_graphics::fonts::{Font12x16, Text};\n\nuse embedded_graphics::pixelcolor::{Rgb565, RgbColor};\n\nuse embedded_graphics::prelude::{Point, Primitive};\n\nuse embedded_graphics::primitives::Rectangle;\n\nuse embedded_graphics::style::{PrimitiveStyle, PrimitiveStyleBuilder, TextStyle};\n\nuse heapless::consts::{U512, U8};\n\nuse heapless::spsc::Queue;\n\nuse heapless::spsc::{Consumer, SingleCore};\n\nuse heapless::{ArrayLength, String};\n\n\n\nuse crate::error::{Error, Result};\n\nuse crate::hw::Lcd;\n\n\n\nconst SAMPLE_MAX: usize = 3450;\n\nconst SAMPLE_MIN: usize = 0;\n\n\n\npub struct Display<'a, LEN, LCD, LCDER>\n\nwhere\n\n LEN: ArrayLength<u16>,\n", "file_path": "lib/display.rs", "rank": 17, "score": 37631.202979899805 }, { "content": " last_sample: Display::<'a, LEN, LCD, LCDER>::map_sample(0),\n\n last_bpm: 0,\n\n lcd,\n\n };\n\n display.init()?;\n\n Ok(display)\n\n }\n\n\n\n pub fn frame(&mut self) -> Result<(), LCDER> {\n\n let len = self.buffer.len();\n\n for _ in 0..len {\n\n let sample = self.buffer.dequeue().ok_or(Error::Queue)?;\n\n // Scroll\n\n self.scroll()?;\n\n // Remove old data\n\n let data_to_remove = self.current_data.dequeue().ok_or(Error::Queue)?;\n\n self.draw_single(&data_to_remove, Color::BACKGROUND)?;\n\n // Draw current data\n\n let mapped_sample = Display::<'a, LEN, LCD, LCDER>::map_sample(sample);\n\n let data_to_add = (self.last_sample, mapped_sample).into();\n", "file_path": "lib/display.rs", "rank": 18, "score": 37629.75664410087 }, { "content": " }\n\n\n\n fn init_data(&mut self) -> Result<(), LCDER> {\n\n let zero = Display::<'a, LEN, LCD, LCDER>::map_sample(0);\n\n let data = (zero, zero).into();\n\n for _ in 0..Frame::WIDTH {\n\n self.draw_single(&data, Color::DATA)?;\n\n self.scroll()?;\n\n self.current_data.enqueue(data).map_err(|_| Error::Queue)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn map_sample(sample: u16) -> u16 {\n\n map(\n\n sample as u32,\n\n SAMPLE_MIN as u32,\n\n SAMPLE_MAX as u32,\n\n 0,\n\n Frame::HEIGHT as u32 - 1,\n\n ) as u16\n\n }\n\n}\n\n\n", "file_path": "lib/display.rs", "rank": 19, "score": 37628.67646244925 }, { "content": " if self.horizontal_position >= Frame::WIDTH as u16 {\n\n self.horizontal_position = 0;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn init(&mut self) -> Result<(), LCDER> {\n\n self.lcd.clear(Color::BACKGROUND).map_err(Error::Lcd)?;\n\n self.init_frame()?;\n\n self.init_data_column()?;\n\n self.init_data()?;\n\n Ok(())\n\n }\n\n\n\n fn init_frame(&mut self) -> Result<(), LCDER> {\n\n let top_left = Point::new(\n\n Frame::TOP_LEFT.x - Frame::BORDER_WIDTH,\n\n Frame::TOP_LEFT.y - Frame::BORDER_WIDTH,\n\n );\n\n let bottom_right = Point::new(\n", "file_path": "lib/display.rs", "rank": 20, "score": 37627.99032158532 }, { "content": " fn draw_bpm_value(&mut self, bpm: u16, color: Rgb565) -> Result<(), LCDER> {\n\n let mut buffer = String::<U8>::new();\n\n write!(&mut buffer, \"{:>3}\", bpm).map_err(|_| Error::BufferWrite)?;\n\n let bpm_val = Text::new(&buffer, DataColumn::TEXT_BPM_VAL_POSITION)\n\n .into_styled(TextStyle::new(Font12x16, color));\n\n self.lcd.draw(&bpm_val).map_err(Error::Lcd)?;\n\n Ok(())\n\n }\n\n\n\n fn draw_single(&mut self, data: &Data, color: Rgb565) -> Result<(), LCDER> {\n\n let x = (Frame::TOP_LEFT.x as u16 + self.horizontal_position) as i32;\n\n let y = (Frame::BOTTOM_RIGHT.y as u16 - data.y) as i32;\n\n let rect = Rectangle::new(Point::new(x, y - data.height as i32), Point::new(x, y))\n\n .into_styled(PrimitiveStyle::with_fill(color));\n\n self.lcd.draw(&rect).map_err(Error::Lcd)\n\n }\n\n\n\n fn scroll(&mut self) -> Result<(), LCDER> {\n\n self.lcd.scroll(1).map_err(Error::Lcd)?;\n\n self.horizontal_position += 1;\n", "file_path": "lib/display.rs", "rank": 21, "score": 37626.255365549914 }, { "content": " self.draw_single(&data_to_add, Color::DATA)?;\n\n self.current_data\n\n .enqueue(data_to_add)\n\n .map_err(|_| Error::Queue)?;\n\n // Save mapped as last\n\n self.last_sample = mapped_sample;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn update_bpm(&mut self, bpm: u16) -> Result<(), LCDER> {\n\n if bpm != self.last_bpm {\n\n self.draw_bpm_value(self.last_bpm, Color::BACKGROUND)?;\n\n self.draw_bpm_value(bpm, Color::BPM_TEXT)?;\n\n self.last_bpm = bpm;\n\n }\n\n Ok(())\n\n }\n\n\n", "file_path": "lib/display.rs", "rank": 22, "score": 37625.925338746376 }, { "content": " Frame::BOTTOM_RIGHT.x + Frame::BORDER_WIDTH,\n\n Frame::BOTTOM_RIGHT.y + Frame::BORDER_WIDTH,\n\n );\n\n let border = Rectangle::new(top_left, bottom_right).into_styled(\n\n PrimitiveStyleBuilder::new()\n\n .stroke_width(Frame::BORDER_WIDTH as u32)\n\n .stroke_color(Color::FRAME_BORDER)\n\n .fill_color(Color::BACKGROUND)\n\n .build(),\n\n );\n\n self.lcd.draw(&border).map_err(Error::Lcd)?;\n\n Ok(())\n\n }\n\n\n\n fn init_data_column(&mut self) -> Result<(), LCDER> {\n\n let bpm = Text::new(\"BPM\", DataColumn::TEXT_BPM_POSITION)\n\n .into_styled(TextStyle::new(Font12x16, Color::BPM_TEXT));\n\n self.draw_bpm_value(self.last_bpm, Color::BPM_TEXT)?;\n\n self.lcd.draw(&bpm).map_err(Error::Lcd)?;\n\n Ok(())\n", "file_path": "lib/display.rs", "rank": 23, "score": 37625.247500192134 }, { "content": " adc.enable();\n\n adc.configure();\n\n adc\n\n }\n\n\n\n pub fn start(&mut self) {\n\n self.adc.isr.write(|w| {\n\n w.eoc().set_bit();\n\n w.eos().set_bit()\n\n });\n\n self.adc.cr.modify(|_, w| w.adstart().set_bit());\n\n }\n\n\n\n pub fn get_dma_address() -> u32 {\n\n unsafe { &(*ADC::ptr()).dr as *const _ as u32 }\n\n }\n\n\n\n fn configure(&mut self) {\n\n self.adc.cfgr1.write(|w| unsafe {\n\n // External trigger rising edge\n", "file_path": "lib/hw/adc.rs", "rank": 24, "score": 35720.500957019496 }, { "content": "use core::ops::Deref;\n\nuse stm32g0xx_hal::analog::adc::{Adc as HalAdc, VRef};\n\nuse stm32g0xx_hal::delay::Delay;\n\nuse stm32g0xx_hal::dma::{Channel as DmaChannel, Direction, Event, Priority, WordSize};\n\nuse stm32g0xx_hal::hal::adc::Channel as AdcChannel;\n\nuse stm32g0xx_hal::hal::blocking::delay::DelayUs;\n\nuse stm32g0xx_hal::rcc::Rcc;\n\nuse stm32g0xx_hal::stm32g0::stm32g070::{ADC, RCC, SYST, TIM1};\n\nuse stm32g0xx_hal::time::Hertz;\n\nuse volatile_register::RO;\n\n\n\nuse crate::hw::timers::SampleTimer;\n\nuse crate::Buffer;\n\n\n\npub struct AdcConfig<I, C> {\n\n input: I,\n\n dma_channel: C,\n\n frequency: Hertz,\n\n}\n\n\n", "file_path": "lib/hw/adc.rs", "rank": 25, "score": 35720.107795212105 }, { "content": " }\n\n\n\n fn enable_clock_and_reset(_: &mut Rcc) {\n\n let rcc = unsafe { &(*RCC::ptr()) };\n\n rcc.apbenr2.modify(|_, w| w.adcen().set_bit());\n\n rcc.apbrstr2.modify(|_, w| w.adcrst().set_bit());\n\n rcc.apbrstr2.modify(|_, w| w.adcrst().clear_bit());\n\n }\n\n\n\n fn enable_vreg<D: DelayUs<u8>>(&mut self, delay: &mut D) {\n\n self.adc.cr.modify(|_, w| w.advregen().set_bit());\n\n // Max starting time declared by stm32g070 datasheet is 20 us\n\n delay.delay_us(20);\n\n }\n\n\n\n fn enable(&mut self) {\n\n self.adc.isr.write(|w| w.adrdy().set_bit());\n\n self.adc.cr.modify(|_, w| w.aden().set_bit());\n\n while self.adc.isr.read().adrdy().bit_is_clear() {}\n\n }\n", "file_path": "lib/hw/adc.rs", "rank": 26, "score": 35719.94875733554 }, { "content": "impl<I, C> Adc<I, C>\n\nwhere\n\n I: AdcChannel<HalAdc, ID = u8>,\n\n C: DmaChannel,\n\n{\n\n pub fn new(\n\n pac_adc: ADC,\n\n pac_timer: TIM1,\n\n buffer: &mut Buffer,\n\n config: AdcConfig<I, C>,\n\n rcc: &mut Rcc,\n\n delay: &mut Delay<SYST>,\n\n ) -> Self {\n\n let adc = InnerAdc::new(pac_adc, config.input, rcc, delay);\n\n let memory_addr = buffer.as_ptr() as u32;\n\n let dma = Dma::new(\n\n config.dma_channel,\n\n InnerAdc::<I>::get_dma_address(),\n\n memory_addr,\n\n buffer.len() as u16,\n", "file_path": "lib/hw/adc.rs", "rank": 27, "score": 35719.505267343324 }, { "content": " );\n\n let trig = SampleTimer::new(pac_timer, config.frequency, rcc);\n\n Adc { adc, dma, trig }\n\n }\n\n\n\n pub fn start(&mut self) {\n\n self.adc.start();\n\n self.dma.start();\n\n self.trig.start();\n\n }\n\n\n\n pub fn unpend(&mut self) {\n\n self.dma.unpend();\n\n }\n\n}\n\n\n", "file_path": "lib/hw/adc.rs", "rank": 28, "score": 35718.32050940011 }, { "content": "\n\n fn disable(&mut self) {\n\n let cr = self.adc.cr.read();\n\n if cr.aden().bit_is_clear() {\n\n return;\n\n }\n\n if cr.adstart().bit_is_set() {\n\n self.adc.cr.modify(|_, w| w.adstp().set_bit());\n\n }\n\n self.adc.cr.modify(|_, w| w.addis().set_bit());\n\n while self.adc.cr.read().aden().bit_is_set() {}\n\n self.adc.isr.write(|w| w.adrdy().set_bit());\n\n }\n\n\n\n fn calibrate(&mut self) {\n\n self.adc.cr.modify(|_, w| w.adcal().set_bit());\n\n while self.adc.isr.read().eocal().bit_is_clear() {}\n\n self.adc.isr.write(|w| w.eocal().set_bit());\n\n }\n\n}\n", "file_path": "lib/hw/adc.rs", "rank": 29, "score": 35718.05614829051 }, { "content": "impl<I, C> AdcConfig<I, C>\n\nwhere\n\n I: AdcChannel<HalAdc, ID = u8>,\n\n C: DmaChannel,\n\n{\n\n pub fn new(input: I, dma_channel: C, frequency: Hertz) -> Self {\n\n AdcConfig {\n\n input,\n\n dma_channel,\n\n frequency,\n\n }\n\n }\n\n}\n\n\n\npub struct Adc<I, C> {\n\n adc: InnerAdc<I>,\n\n dma: Dma<C>,\n\n trig: SampleTimer,\n\n}\n\n\n", "file_path": "lib/hw/adc.rs", "rank": 30, "score": 35718.01907107095 }, { "content": " w.exten().bits(0b01);\n\n // External trigger 1\n\n w.extsel().bits(0b001);\n\n // Right alignment\n\n w.align().clear_bit();\n\n // 12-bit resolution\n\n w.res().bits(0b00);\n\n // Circular DMA\n\n w.dmacfg().set_bit();\n\n // Enable DMA requests\n\n w.dmaen().set_bit()\n\n });\n\n // Enable Vref\n\n self.adc.ccr.write(|w| w.vrefen().set_bit());\n\n // 160.5 cycles for the best precision\n\n self.adc.smpr.write(|w| unsafe { w.smp1().bits(0b111) });\n\n // Select input channel and Vref\n\n self.adc\n\n .chselr()\n\n .write(|w| unsafe { w.chsel().bits(1 << VRef::channel() | 1 << I::channel()) });\n", "file_path": "lib/hw/adc.rs", "rank": 31, "score": 35717.449780622745 }, { "content": " pub fn unpend(&mut self) {\n\n self.channel.clear_event(Event::HalfTransfer);\n\n }\n\n\n\n fn configure(&mut self, peripheral_addr: u32, memory_addr: u32, len: u16) {\n\n self.channel.set_priority_level(Priority::VeryHigh);\n\n self.channel.set_word_size::<u16>(WordSize::BITS16);\n\n self.channel.set_direction(Direction::FromPeripheral);\n\n self.channel.set_peripheral_address(peripheral_addr, false);\n\n self.channel.set_memory_address(memory_addr, true);\n\n self.channel.set_transfer_length(len);\n\n self.channel.set_circular_mode(true);\n\n }\n\n}\n\n\n", "file_path": "lib/hw/adc.rs", "rank": 32, "score": 35716.92005979847 }, { "content": "\n\npub struct Calibration;\n\n\n\nimpl Calibration {\n\n #[inline(always)]\n\n pub fn ptr() -> *const CalibrationRegBlock {\n\n 0x1fff_75aa as *const _\n\n }\n\n}\n\n\n\nimpl Deref for Calibration {\n\n type Target = CalibrationRegBlock;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n unsafe { &*Self::ptr() }\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct CalibrationRegBlock {\n\n pub vref_int: RO<u16>,\n\n}\n", "file_path": "lib/hw/adc.rs", "rank": 33, "score": 35715.02370594147 }, { "content": "#![no_std]\n\n\n\nuse core::sync::atomic::{AtomicUsize, Ordering};\n\n\n\nuse defmt_rtt as _; // global logger\n\nuse panic_probe as _;\n\n\n\npub mod display;\n\npub mod error;\n\npub mod hw;\n\npub mod sampler;\n\n\n\npub const TOP_SCROLL_OFFSET: u16 = display::Offset::LEFT as u16;\n\npub const BOTTOM_SCROLL_OFFSET: u16 = display::Offset::RIGHT as u16;\n\n\n\npub type Buffer = [u16; 4];\n\n\n\nstatic COUNT: AtomicUsize = AtomicUsize::new(0);\n\ndefmt::timestamp!(\"{=usize}\", {\n\n let n = COUNT.load(Ordering::Relaxed);\n\n COUNT.store(n + 1, Ordering::Relaxed);\n\n n\n\n});\n\n\n\n/// Terminates the application and makes `probe-run` exit with exit-code = 0\n", "file_path": "lib/lib.rs", "rank": 34, "score": 17065.460778193075 }, { "content": "pub type Result<T, LCDER> = core::result::Result<T, Error<LCDER>>;\n\n\n\n#[derive(Debug)]\n\npub enum Error<LCD> {\n\n // Hw LCD error\n\n Lcd(LCD),\n\n // Queue error\n\n Queue,\n\n // Buffer\n\n BufferWrite,\n\n}\n", "file_path": "lib/error.rs", "rank": 35, "score": 13548.678809393896 }, { "content": "use cortex_m::peripheral::SYST;\n\nuse display_interface_parallel_gpio::PGPIO8BitInterface;\n\nuse stm32g0xx_hal::delay::Delay;\n\nuse stm32g0xx_hal::dma::C1;\n\nuse stm32g0xx_hal::gpio::gpioa::{PA0, PA4, PA5, PA6};\n\nuse stm32g0xx_hal::gpio::gpiob::{PB0, PB1, PB2, PB3, PB4, PB5, PB6, PB7, PB8, PB9};\n\nuse stm32g0xx_hal::gpio::{Analog, DefaultMode, Output, PushPull};\n\nuse stm32g0xx_hal::prelude::OutputPin;\n\nuse stm32g0xx_hal::rcc::{Config, PllConfig, Rcc, RccExt};\n\nuse stm32g0xx_hal::stm32g0::stm32g070::RCC;\n\n\n\nuse crate::hw::adc::{Adc as HwAdc, Calibration};\n\nuse crate::hw::lcd::{IliError, IliLcd};\n\nuse crate::hw::timers::BeatCounterTimer;\n\n\n", "file_path": "lib/hw/helper.rs", "rank": 36, "score": 12796.716848431599 }, { "content": "use core::convert::Infallible;\n\nuse display_interface_parallel_gpio::WriteOnlyDataCommand;\n\nuse embedded_graphics::drawable::Drawable;\n\nuse embedded_graphics::pixelcolor::Rgb565;\n\nuse embedded_graphics::DrawTarget;\n\nuse ili9341::{DisplaySize320x480, Error, Ili9341, Orientation, Scroller};\n\nuse stm32g0xx_hal::hal::blocking::delay::DelayMs;\n\nuse stm32g0xx_hal::hal::digital::v2::OutputPin;\n\n\n\nuse crate::hw::Lcd;\n\n\n\n#[derive(Debug)]\n\npub struct IliError(pub Error<Infallible>);\n\n\n\npub struct IliLcd<I, R> {\n\n ili: Ili9341<I, R>,\n\n scroller: Scroller,\n\n}\n\n\n\nimpl<I, R> IliLcd<I, R>\n", "file_path": "lib/hw/lcd.rs", "rank": 37, "score": 12796.70985879635 }, { "content": "use embedded_graphics::pixelcolor::Rgb565;\n\nuse embedded_graphics::prelude::Drawable;\n\n\n\nmod adc;\n\nmod helper;\n\nmod lcd;\n\nmod timers;\n\n\n\npub use adc::AdcConfig;\n\npub use helper::*;\n\npub use lcd::IliError;\n\npub use timers::{BeatTimer, FrameTimer};\n\n\n", "file_path": "lib/hw/mod.rs", "rank": 38, "score": 12796.690682712666 }, { "content": " }\n\n\n\n pub fn read(&self) -> u16 {\n\n self.timer.cnt.read().cnt_l().bits()\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.timer.cnt.reset();\n\n }\n\n\n\n fn configure(&mut self) {\n\n // Divide input clock by 4 for tDTS\n\n self.timer.cr1.write(|w| unsafe { w.ckd().bits(0b10) });\n\n // Select input Tx -> CH1 Input\n\n self.timer\n\n .tisel\n\n .write(|w| unsafe { w.ti1sel().bits(0b0000) });\n\n self.timer.ccmr1_input().write(|w| unsafe {\n\n // IC1 -> T1\n\n w.cc1s().bits(0b01);\n", "file_path": "lib/hw/timers.rs", "rank": 39, "score": 12795.242856766761 }, { "content": "use stm32g0xx_hal::hal::timer::CountDown;\n\nuse stm32g0xx_hal::hal::PwmPin as PwmPinTrait;\n\nuse stm32g0xx_hal::rcc::Rcc;\n\nuse stm32g0xx_hal::stm32g0::stm32g070::{RCC, TIM1, TIM3, TIM6, TIM7};\n\nuse stm32g0xx_hal::time::{Hertz, MicroSecond};\n\nuse stm32g0xx_hal::timer::pins::TimerPin;\n\nuse stm32g0xx_hal::timer::pwm::{Pwm, PwmExt, PwmPin};\n\nuse stm32g0xx_hal::timer::{Channel1, Channel4, Timer, TimerExt};\n\n\n\npub struct FrameTimer {\n\n timer: Timer<TIM6>,\n\n freq: Hertz,\n\n}\n\n\n\nimpl FrameTimer {\n\n pub fn new(pac_tim: TIM6, freq: Hertz, rcc: &mut Rcc) -> Self {\n\n FrameTimer {\n\n timer: pac_tim.timer(rcc),\n\n freq,\n\n }\n", "file_path": "lib/hw/timers.rs", "rank": 40, "score": 12794.62795150061 }, { "content": "where\n\n I: WriteOnlyDataCommand,\n\n R: OutputPin<Error = Infallible>,\n\n{\n\n pub fn new<D>(\n\n interface: I,\n\n reset: R,\n\n scoller_offset: (u16, u16),\n\n delay: &mut D,\n\n ) -> Result<Self, IliError>\n\n where\n\n D: DelayMs<u16>,\n\n {\n\n let mut ili = Ili9341::new(\n\n interface,\n\n reset,\n\n delay,\n\n Orientation::Landscape,\n\n DisplaySize320x480,\n\n )\n", "file_path": "lib/hw/lcd.rs", "rank": 41, "score": 12794.512683413794 }, { "content": " self.timer\n\n .arr\n\n .write(|w| unsafe { w.arr_l().bits(u16::max_value()) });\n\n\n\n // Trigger update event to load the registers\n\n self.timer.cr1.modify(|_, w| w.urs().set_bit());\n\n self.timer.egr.write(|w| w.ug().set_bit());\n\n self.timer.cr1.modify(|_, w| w.urs().clear_bit());\n\n }\n\n\n\n fn enable_clock_and_reset(_: &mut Rcc) {\n\n let rcc = unsafe { &(*RCC::ptr()) };\n\n rcc.apbenr1.modify(|_, w| w.tim3en().set_bit());\n\n rcc.apbrstr1.modify(|_, w| w.tim3rst().set_bit());\n\n rcc.apbrstr1.modify(|_, w| w.tim3rst().clear_bit());\n\n }\n\n}\n\n\n", "file_path": "lib/hw/timers.rs", "rank": 42, "score": 12793.993542642513 }, { "content": " fn draw<D: Drawable<Rgb565>>(&mut self, drawable: D) -> Result<(), Self::Error> {\n\n drawable.draw(&mut self.ili).map_err(IliError)\n\n }\n\n\n\n fn scroll(&mut self, num_of_lines: u16) -> Result<(), Self::Error> {\n\n self.ili\n\n .scroll_vertically(&mut self.scroller, num_of_lines)\n\n .map_err(IliError)\n\n }\n\n}\n", "file_path": "lib/hw/lcd.rs", "rank": 43, "score": 12793.530171634398 }, { "content": " }\n\n\n\n pub fn start(&mut self) {\n\n self.timer.clear_irq();\n\n self.timer.listen();\n\n self.timer.start(self.freq);\n\n }\n\n\n\n pub fn unpend(&mut self) {\n\n self.timer.clear_irq();\n\n }\n\n}\n\n\n\npub struct BeatTimer {\n\n timer: Timer<TIM7>,\n\n timeout: MicroSecond,\n\n}\n\n\n\nimpl BeatTimer {\n\n pub fn new(pac_tim: TIM7, timeout: MicroSecond, rcc: &mut Rcc) -> Self {\n", "file_path": "lib/hw/timers.rs", "rank": 44, "score": 12793.258597110409 }, { "content": "}\n\n\n\nimpl<I> BeatCounterTimer<I>\n\nwhere\n\n I: TimerPin<TIM3, Channel = Channel1>,\n\n{\n\n pub fn new(pac_timer: TIM3, input: I, rcc: &mut Rcc) -> Self {\n\n BeatCounterTimer::<I>::enable_clock_and_reset(rcc);\n\n input.setup();\n\n\n\n let mut counter = BeatCounterTimer {\n\n timer: pac_timer,\n\n _input: input,\n\n };\n\n counter.configure();\n\n counter\n\n }\n\n\n\n pub fn start(&mut self) {\n\n self.timer.cr1.modify(|_, w| w.cen().set_bit());\n", "file_path": "lib/hw/timers.rs", "rank": 45, "score": 12793.127002136132 }, { "content": " BeatTimer {\n\n timer: pac_tim.timer(rcc),\n\n timeout,\n\n }\n\n }\n\n\n\n pub fn start(&mut self) {\n\n self.timer.clear_irq();\n\n self.timer.listen();\n\n self.timer.start(self.timeout);\n\n }\n\n\n\n pub fn unpend(&mut self) {\n\n self.timer.clear_irq();\n\n }\n\n}\n\n\n\npub struct BeatCounterTimer<I> {\n\n timer: TIM3,\n\n _input: I,\n", "file_path": "lib/hw/timers.rs", "rank": 46, "score": 12793.091014444351 }, { "content": " // Filter tDTS * 4, 8 samples\n\n w.ic1f().bits(0b1111);\n\n // No prescaler\n\n w.ic1psc().bits(0b00)\n\n });\n\n self.timer.ccer.write(|w| {\n\n // Non-inverted rising edge\n\n w.cc1p().clear_bit();\n\n w.cc1np().clear_bit()\n\n });\n\n self.timer.smcr.write(|w| unsafe {\n\n // External clock 1\n\n w.sms().bits(0b111);\n\n // T1FP1 as trigger\n\n w.ts().bits(0b101)\n\n });\n\n\n\n // Set prescaler as 0\n\n self.timer.psc.write(|w| unsafe { w.psc().bits(0) });\n\n // Set ARR to max value\n", "file_path": "lib/hw/timers.rs", "rank": 47, "score": 12793.059967429725 }, { "content": " _timer: timer,\n\n trig,\n\n }\n\n }\n\n\n\n pub fn start(&mut self) {\n\n self.trig.set_duty(self.trig.get_max_duty() / 2);\n\n self.trig.enable();\n\n }\n\n}\n", "file_path": "lib/hw/timers.rs", "rank": 48, "score": 12792.954677041653 }, { "content": " .map_err(IliError)?;\n\n let scroller = ili\n\n .configure_vertical_scroll(scoller_offset.0, scoller_offset.1)\n\n .map_err(IliError)?;\n\n\n\n Ok(IliLcd { ili, scroller })\n\n }\n\n}\n\n\n\nimpl<I, R> Lcd for IliLcd<I, R>\n\nwhere\n\n I: WriteOnlyDataCommand,\n\n R: OutputPin<Error = Infallible>,\n\n{\n\n type Error = IliError;\n\n\n\n fn clear(&mut self, color: Rgb565) -> Result<(), Self::Error> {\n\n self.ili.clear(color).map_err(IliError)\n\n }\n\n\n", "file_path": "lib/hw/lcd.rs", "rank": 49, "score": 12792.17082617183 }, { "content": "pub trait Lcd {\n\n type Error;\n\n fn clear(&mut self, color: Rgb565) -> Result<(), Self::Error>;\n\n fn draw<D: Drawable<Rgb565>>(&mut self, drawable: D) -> Result<(), Self::Error>;\n\n fn scroll(&mut self, num_of_lines: u16) -> Result<(), Self::Error>;\n\n}\n", "file_path": "lib/hw/mod.rs", "rank": 50, "score": 11510.65857495346 }, { "content": "// ADC DMA channel\n\ntype DmaChannel = C1;\n", "file_path": "lib/hw/helper.rs", "rank": 51, "score": 10964.725151601795 }, { "content": "// PA0 - ADC ECG input channel\n\ntype InputChannel = PA0<Analog>;\n", "file_path": "lib/hw/helper.rs", "rank": 52, "score": 10465.883791840173 }, { "content": "// PA6 - ECG beat counter input\n\ntype CounterInput = PA6<DefaultMode>;\n\n\n\n// RESERVED for future use\n\n// PA1 - Comparator threshold - ADC input\n\n// PA2 - USART2_TX\n\n// PA3 - USART2_RX\n\n\n\n// PA4 - LCD_RST (Reset)\n\npub type LcdRst = PA4<Output<PushPull>>;\n\n// PA5 - LCD_RD (Read signal)\n\npub type LcdRD = PA5<Output<PushPull>>;\n\n\n\npub type Adc = HwAdc<InputChannel, DmaChannel>;\n\npub type BeatCounter = BeatCounterTimer<CounterInput>;\n\npub type LcdInterface =\n\n PGPIO8BitInterface<LcdD0, LcdD1, LcdD2, LcdD3, LcdD4, LcdD5, LcdD6, LcdD7, LcdDC, LcdWR>;\n\npub type HwLcd = IliLcd<LcdInterface, LcdRst>;\n\n\n", "file_path": "lib/hw/helper.rs", "rank": 53, "score": 10007.97556718864 }, { "content": "// PB5 - LCD_D5\n\ntype LcdD5 = PB5<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 54, "score": 9590.63250481841 }, { "content": "// PB4 - LCD_D4\n\ntype LcdD4 = PB4<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 55, "score": 9590.63250481841 }, { "content": "// PB3 - LCD_D3\n\ntype LcdD3 = PB3<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 56, "score": 9590.63250481841 }, { "content": "// PB1 - LCD_D1\n\ntype LcdD1 = PB1<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 57, "score": 9590.63250481841 }, { "content": "// PB0 - LCD_D0\n\ntype LcdD0 = PB0<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 58, "score": 9590.63250481841 }, { "content": "// PB9 - LCD_WR (Write signal)\n\ntype LcdWR = PB9<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 59, "score": 9590.63250481841 }, { "content": "// PB7 - LCD_D7\n\ntype LcdD7 = PB7<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 60, "score": 9590.63250481841 }, { "content": "// PB2 - LCD_D2\n\ntype LcdD2 = PB2<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 61, "score": 9590.63250481841 }, { "content": "// PB8 - LCD_DC (Command[Low]/Data[High])\n\ntype LcdDC = PB8<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 62, "score": 9590.63250481841 }, { "content": "// PB6 - LCD_D6\n\ntype LcdD6 = PB6<Output<PushPull>>;\n", "file_path": "lib/hw/helper.rs", "rank": 63, "score": 9590.63250481841 } ]
Rust
2021/src/day17.rs
shrugalic/advent_of_code
8d18a3dbdcf847a667ab553f5441676003b9362a
use std::cmp::max; use std::ops::{AddAssign, RangeInclusive}; const INPUT: &str = include_str!("../input/day17.txt"); pub(crate) fn day17_part1() -> isize { Probe::from(INPUT).highest_point() } pub(crate) fn day17_part2() -> usize { Probe::from(INPUT).trajectory_count() } #[derive(Debug, PartialEq)] struct Probe { target_area: RangeInclusive<Pair>, } impl Probe { fn highest_point(&self) -> isize { self.target_trajectory().0 } fn trajectory_count(&self) -> usize { self.target_trajectory().1 } fn target_trajectory(&self) -> (isize, usize) { let mut max_ys = vec![]; let mut target_velocity_count = 0; let y_velocity_range = self.y_velocity_range(); for x in self.x_velocity_range() { for y in y_velocity_range.clone() { if let Some(max_y) = self.simulate_trajectory(x, y) { max_ys.push(max_y); target_velocity_count += 1; } } } (*max_ys.iter().max().unwrap(), target_velocity_count) } fn simulate_trajectory(&self, x: isize, y: isize) -> Option<isize> { let mut position = Pair::new(0, 0); let mut velocity = Pair::new(x, y); let mut max_y = isize::MIN; while !(position.is_past(&self.target_area) || velocity.cannot_reach(&self.target_area, &position)) { max_y = max(max_y, position.y); position += velocity; velocity.x = max(velocity.x - 1, 0); velocity.y -= 1; if position.is_within(&self.target_area) { return Some(max_y); } } None } fn x_velocity_range(&self) -> RangeInclusive<isize> { let mut min_x = 1; while Probe::reachable(min_x) < self.target_area.start().x { min_x += 1; } let max_x = self.target_area.end().x; min_x..=max_x } fn reachable(x: isize) -> isize { x * (x + 1) / 2 } fn y_velocity_range(&self) -> RangeInclusive<isize> { let min_y = self.target_area.start().y; let max_y = self.target_area.start().y.abs(); min_y..=max_y } } impl From<&str> for Probe { fn from(input: &str) -> Self { let range_from = |range: &str| -> RangeInclusive<isize> { let (start, end) = range.split_once("..").unwrap(); start.parse().unwrap()..=end.parse().unwrap() }; let (x, y) = input .trim() .trim_start_matches("target area: x=") .split_once(", y=") .map(|(x, y)| (range_from(x), range_from(y))) .unwrap(); Probe { target_area: Pair::new(*x.start(), *y.start())..=Pair::new(*x.end(), *y.end()), } } } #[derive(Debug, PartialEq, Copy, Clone)] struct Pair { x: isize, y: isize, } impl Pair { fn new(x: isize, y: isize) -> Self { Self { x, y } } fn is_past(&self, target_area: &RangeInclusive<Pair>) -> bool { self.x > target_area.end().x || self.y < target_area.start().y } fn is_within(&self, target_area: &RangeInclusive<Pair>) -> bool { (target_area.start().x..=target_area.end().x).contains(&self.x) && (target_area.start().y..=target_area.end().y).contains(&self.y) } fn cannot_reach(&self, target_area: &RangeInclusive<Pair>, position: &Pair) -> bool { self.x == 0 && position.x < target_area.start().x } } impl AddAssign for Pair { fn add_assign(&mut self, rhs: Self) { self.x += rhs.x; self.y += rhs.y; } } #[cfg(test)] mod tests { use super::*; const EXAMPLE: &str = "target area: x=20..30, y=-10..-5"; #[test] fn part1_example() { assert_eq!(45, Probe::from(EXAMPLE).highest_point()); } #[test] fn part1() { assert_eq!(5565, day17_part1()); } #[test] fn part2_example() { assert_eq!(112, Probe::from(EXAMPLE).trajectory_count()); } #[test] fn part2() { assert_eq!(2118, day17_part2()); } }
use std::cmp::max; use std::ops::{AddAssign, RangeInclusive}; const INPUT: &str = include_str!("../input/day17.txt"); pub(crate) fn day17_part1() -> isize { Probe::from(INPUT).highest_point() } pub(crate) fn day17_part2() -> usize { Probe::from(INPUT).trajectory_count() } #[derive(Debug, PartialEq)] struct Probe { target_area: RangeInclusive<Pair>, } impl Probe { fn highest_point(&self) -> isize { self.target_trajectory().0 } fn trajectory_count(&self) -> usize { self.target_trajectory().1 } fn target_trajectory(&self) -> (isize, usize) { let mut max_ys = vec![]; let mut target_velocity_count = 0; let y_velocity_range = self.y_velocity_range(); for x in self.x_velocity_range() { for y in y_velocity_range.clone() {
} } (*max_ys.iter().max().unwrap(), target_velocity_count) } fn simulate_trajectory(&self, x: isize, y: isize) -> Option<isize> { let mut position = Pair::new(0, 0); let mut velocity = Pair::new(x, y); let mut max_y = isize::MIN; while !(position.is_past(&self.target_area) || velocity.cannot_reach(&self.target_area, &position)) { max_y = max(max_y, position.y); position += velocity; velocity.x = max(velocity.x - 1, 0); velocity.y -= 1; if position.is_within(&self.target_area) { return Some(max_y); } } None } fn x_velocity_range(&self) -> RangeInclusive<isize> { let mut min_x = 1; while Probe::reachable(min_x) < self.target_area.start().x { min_x += 1; } let max_x = self.target_area.end().x; min_x..=max_x } fn reachable(x: isize) -> isize { x * (x + 1) / 2 } fn y_velocity_range(&self) -> RangeInclusive<isize> { let min_y = self.target_area.start().y; let max_y = self.target_area.start().y.abs(); min_y..=max_y } } impl From<&str> for Probe { fn from(input: &str) -> Self { let range_from = |range: &str| -> RangeInclusive<isize> { let (start, end) = range.split_once("..").unwrap(); start.parse().unwrap()..=end.parse().unwrap() }; let (x, y) = input .trim() .trim_start_matches("target area: x=") .split_once(", y=") .map(|(x, y)| (range_from(x), range_from(y))) .unwrap(); Probe { target_area: Pair::new(*x.start(), *y.start())..=Pair::new(*x.end(), *y.end()), } } } #[derive(Debug, PartialEq, Copy, Clone)] struct Pair { x: isize, y: isize, } impl Pair { fn new(x: isize, y: isize) -> Self { Self { x, y } } fn is_past(&self, target_area: &RangeInclusive<Pair>) -> bool { self.x > target_area.end().x || self.y < target_area.start().y } fn is_within(&self, target_area: &RangeInclusive<Pair>) -> bool { (target_area.start().x..=target_area.end().x).contains(&self.x) && (target_area.start().y..=target_area.end().y).contains(&self.y) } fn cannot_reach(&self, target_area: &RangeInclusive<Pair>, position: &Pair) -> bool { self.x == 0 && position.x < target_area.start().x } } impl AddAssign for Pair { fn add_assign(&mut self, rhs: Self) { self.x += rhs.x; self.y += rhs.y; } } #[cfg(test)] mod tests { use super::*; const EXAMPLE: &str = "target area: x=20..30, y=-10..-5"; #[test] fn part1_example() { assert_eq!(45, Probe::from(EXAMPLE).highest_point()); } #[test] fn part1() { assert_eq!(5565, day17_part1()); } #[test] fn part2_example() { assert_eq!(112, Probe::from(EXAMPLE).trajectory_count()); } #[test] fn part2() { assert_eq!(2118, day17_part2()); } }
if let Some(max_y) = self.simulate_trajectory(x, y) { max_ys.push(max_y); target_velocity_count += 1; }
if_condition
[ { "content": "fn parse_family(input: Vec<&str>) -> Vec<Vec<isize>> {\n\n // Vector of family members. This is only needed to get a unique index for each location\n\n let mut family: Vec<_> = vec![];\n\n // Happiness from each family member to all other family members (by index)\n\n let mut happiness: Vec<Vec<isize>> = vec![];\n\n\n\n for line in input {\n\n let parts: Vec<_> = line.split(|c| c == ' ' || c == '.').collect();\n\n\n\n // Example: Alice would gain 54 happiness units by sitting next to Bob.\n\n let center = parts[0].to_string();\n\n let sign = parts[3].parse::<isize>().unwrap() * if parts[2] == \"gain\" { 1 } else { -1 };\n\n let neighbor = parts[10].to_string();\n\n\n\n let mut get_family_mumber_id = |member| {\n\n if let Some(idx) = family.iter().position(|m| m == &member) {\n\n idx\n\n } else {\n\n family.push(member);\n\n happiness.iter_mut().for_each(|h| h.push(0));\n", "file_path": "2015/src/day13.rs", "rank": 0, "score": 366106.2547303121 }, { "content": "fn parse_distances_from(input: Vec<&str>) -> Vec<Vec<usize>> {\n\n // Vector of location names. This is only needed to get a unique index for each location\n\n let mut locations: Vec<String> = vec![];\n\n // Distances from each location to all other locations (by index)\n\n let mut distances: Vec<Vec<usize>> = vec![];\n\n\n\n input.iter().for_each(|line| {\n\n let mut get_index_for_location = |loc| {\n\n if let Some(idx) = locations.iter().position(|l| l == &loc) {\n\n idx\n\n } else {\n\n locations.push(loc);\n\n distances.iter_mut().for_each(|d| d.push(usize::MAX));\n\n distances.push(vec![0; locations.len()]);\n\n locations.len() - 1\n\n }\n\n };\n\n // Example: London to Dublin = 464\n\n let split: Vec<_> = line.split_ascii_whitespace().collect();\n\n let (loc1, loc2, distance) = (split[0], split[2], split[4].parse().unwrap());\n", "file_path": "2015/src/day09.rs", "rank": 1, "score": 365656.9485482758 }, { "content": "fn parse_containers(input: Vec<&str>) -> Vec<usize> {\n\n input\n\n .into_iter()\n\n .map(|line| line.parse().unwrap())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n const EXAMPLE: &str = \"\\\n\n20\n\n15\n\n10\n\n5\n\n5\";\n\n\n\n #[test]\n", "file_path": "2015/src/day17.rs", "rank": 2, "score": 365152.6729214514 }, { "content": "fn parse_supplies(input: &str) -> Vec<Vec<usize>> {\n\n input\n\n .trim()\n\n .split(\"\\n\\n\")\n\n .filter_map(|s| s.lines().map(|s| s.parse().ok()).collect())\n\n .collect()\n\n}\n\n\n", "file_path": "2022/src/day01.rs", "rank": 3, "score": 365152.6729214514 }, { "content": "fn route_lengths(input: Vec<&str>) -> Vec<usize> {\n\n let distances = parse_distances_from(input);\n\n generate_permutations_of_n_indices(distances.len())\n\n .into_iter()\n\n .map(|order| {\n\n order\n\n .windows(2)\n\n .into_iter()\n\n .map(|d| distances[d[0]][d[1]])\n\n .sum()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2015/src/day09.rs", "rank": 4, "score": 365152.6729214514 }, { "content": "fn parse(input: &str) -> Vec<isize> {\n\n input\n\n .trim()\n\n .split(',')\n\n .filter_map(|n| n.parse().ok())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"16,1,2,0,4,2,7,1,2,14\";\n\n\n\n #[test]\n\n fn part1_example() {\n\n let positions = parse(EXAMPLE);\n\n assert_eq!(37, minimal_fuel_to_align(positions, false));\n\n }\n\n\n", "file_path": "2021/src/day07.rs", "rank": 5, "score": 361347.9582044218 }, { "content": "fn parse(input: &str) -> Vec<usize> {\n\n input\n\n .trim()\n\n .lines()\n\n .filter_map(|s| s.parse().ok())\n\n .collect()\n\n}\n\n\n", "file_path": "2021/src/day01.rs", "rank": 6, "score": 360852.8508521026 }, { "content": "fn parse(input: &str) -> Vec<usize> {\n\n input\n\n .trim()\n\n .split(',')\n\n .filter_map(|n| n.parse().ok())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"3,4,3,1,2\";\n\n\n\n #[test]\n\n fn part1_example() {\n\n assert_eq!(26, multiply(parse(EXAMPLE), 18));\n\n assert_eq!(5934, multiply(parse(EXAMPLE), 80));\n\n }\n\n\n", "file_path": "2021/src/day06.rs", "rank": 7, "score": 360852.8508521027 }, { "content": "fn sum_of_numbers(input: Vec<&str>) -> isize {\n\n input\n\n .iter()\n\n .map(|line| {\n\n line.split(|c| [',', '[', ']', '{', '}', ':'].contains(&c))\n\n .filter_map(|s| s.parse::<isize>().ok())\n\n .sum::<isize>()\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "2015/src/day12.rs", "rank": 8, "score": 354225.2055865847 }, { "content": "fn number_of_constellations(input: Vec<&str>) -> usize {\n\n let mut points: Vec<Point> = input.into_iter().map(Point::from).collect();\n\n\n\n let mut cons: Vec<Constellation> = vec![];\n\n while let Some(point) = points.pop() {\n\n let close_cons = point.remove_close_constellations(&mut cons);\n\n let mut new_con = Constellation::from(point);\n\n new_con.add_cons(close_cons);\n\n cons.push(new_con);\n\n }\n\n cons.len()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n #[test]\n\n fn part1_example1() {\n", "file_path": "2018/src/day25.rs", "rank": 9, "score": 353746.36871956114 }, { "content": "fn diagnostic_checksum(input: Vec<&str>) -> usize {\n\n let mut touring_machine = TouringMachine::from(input);\n\n touring_machine.check_sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n const EXAMPLE: &str = \"\\\n\nBegin in state A.\n\nPerform a diagnostic checksum after 6 steps.\n\n\n\nIn state A:\n\n If the current value is 0:\n\n - Write the value 1.\n\n - Move one slot to the right.\n\n - Continue with state B.\n\n If the current value is 1:\n", "file_path": "2017/src/day25.rs", "rank": 10, "score": 353746.36871956114 }, { "content": "fn distance_to_origin(input: Vec<&str>) -> usize {\n\n curr_and_max_distances_to_origin(input).0\n\n}\n\n\n", "file_path": "2017/src/day11.rs", "rank": 11, "score": 353746.36871956114 }, { "content": "fn trip_severity(input: Vec<&str>) -> usize {\n\n let mut firewalls = parse_firewalls(input);\n\n\n\n let mut severity = 0;\n\n for depth in 0..firewalls.len() {\n\n // Move packet along the top layer, and sum up severity if it's scanned\n\n // by the scanner of the optional firewall at the current depth\n\n if let Some(firewall) = firewalls.get(depth).unwrap() {\n\n if firewall.is_scanning_top() {\n\n severity += depth * firewall.range;\n\n }\n\n }\n\n firewalls\n\n .iter_mut()\n\n .filter_map(|f| f.as_mut())\n\n .for_each(|firewall| {\n\n if firewall.is_scanning_down {\n\n firewall.scanner += 1;\n\n if firewall.scanner == firewall.range - 1 {\n\n firewall.is_scanning_down = false;\n", "file_path": "2017/src/day13.rs", "rank": 12, "score": 353746.36871956114 }, { "content": "fn count_escaping_overhead(input: Vec<&str>) -> usize {\n\n let (orig_unescaped_count, escaped_count) = input\n\n .iter()\n\n .map(|s| get_escaped_counts(s))\n\n .reduce(|a, b| (a.0 + b.0, a.1 + b.1))\n\n .unwrap();\n\n escaped_count - orig_unescaped_count\n\n}\n\n\n", "file_path": "2015/src/day08.rs", "rank": 13, "score": 347092.09982168314 }, { "content": "fn count_programs_in_group0(input: Vec<&str>) -> usize {\n\n count_group0_members_or_total_groups(input, true)\n\n}\n\n\n", "file_path": "2017/src/day12.rs", "rank": 14, "score": 347092.0998216831 }, { "content": "fn total_ribbon_needed(input: Vec<&str>) -> usize {\n\n input.iter().map(|s| Box::from(s).ribbon_needed()).sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1_examples() {\n\n assert_eq!(58, Box::from(\"2x3x4\").wrapping_paper_needed());\n\n assert_eq!(43, Box::from(\"1x1x10\").wrapping_paper_needed());\n\n }\n\n\n\n #[test]\n\n fn part1() {\n\n assert_eq!(1588178, day02_part1());\n\n }\n\n\n\n #[test]\n", "file_path": "2015/src/day02.rs", "rank": 15, "score": 347092.09982168314 }, { "content": "fn count_total_groups(input: Vec<&str>) -> usize {\n\n count_group0_members_or_total_groups(input, false)\n\n}\n\n\n", "file_path": "2017/src/day12.rs", "rank": 16, "score": 347092.09982168314 }, { "content": "fn longest_route_length(input: Vec<&str>) -> usize {\n\n route_lengths(input).into_iter().max().unwrap()\n\n}\n", "file_path": "2015/src/day09.rs", "rank": 17, "score": 347092.0998216831 }, { "content": "fn delay_to_get_through_safely(input: Vec<&str>) -> usize {\n\n // We only care about firewalls when they're scanning the top layer, where packets travel\n\n // These are the periods where their scanner is at the top\n\n let firewall_periods: Vec<Option<usize>> = parse_firewalls(input)\n\n .into_iter()\n\n .map(|fw| fw.map(|firewall| (firewall.range - 1) * 2))\n\n .collect();\n\n\n\n let mut packet_depths: VecDeque<Option<usize>> = VecDeque::new();\n\n for starting_delay in 0..usize::MAX {\n\n // Add a new packet, which also moves the traveling packets along\n\n packet_depths.push_front(Some(starting_delay));\n\n\n\n // Check if a packet made it to the other side\n\n if let Some(Some(winners_delay)) = packet_depths.get(firewall_periods.len()) {\n\n return *winners_delay;\n\n }\n\n\n\n // Remove any packets scanned by a firewall\n\n for (depth, delay) in packet_depths\n", "file_path": "2017/src/day13.rs", "rank": 18, "score": 347092.0998216831 }, { "content": "fn count_mul_instructions(input: Vec<&str>) -> usize {\n\n let instr = input.into_iter().map(Instr::from).collect::<Vec<_>>();\n\n let mut program = Program::new(0, &instr);\n\n let mut mul_count = 0;\n\n loop {\n\n let instr = program.instruction();\n\n if matches!(instr, Some(Instr::Mul(_, _))) {\n\n mul_count += 1;\n\n }\n\n match program.step() {\n\n Running => {}\n\n SentOutput(_) => (),\n\n AwaitingInput | Terminated => {\n\n return mul_count;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2017/src/day23.rs", "rank": 19, "score": 347092.09982168314 }, { "content": "fn count_unescaping_overhead(input: Vec<&str>) -> usize {\n\n let (orig_escaped_count, unescaped_count) = input\n\n .iter()\n\n .map(|s| get_unescaped_counts(s))\n\n .reduce(|a, b| (a.0 + b.0, a.1 + b.1))\n\n .unwrap();\n\n orig_escaped_count - unescaped_count\n\n}\n\n\n", "file_path": "2015/src/day08.rs", "rank": 20, "score": 347092.09982168314 }, { "content": "fn max_distance_to_origin(input: Vec<&str>) -> usize {\n\n curr_and_max_distances_to_origin(input).1\n\n}\n\n\n", "file_path": "2017/src/day11.rs", "rank": 21, "score": 347092.09982168314 }, { "content": "fn shortest_route_length(input: Vec<&str>) -> usize {\n\n route_lengths(input).into_iter().min().unwrap()\n\n}\n", "file_path": "2015/src/day09.rs", "rank": 22, "score": 347092.09982168314 }, { "content": "fn pixels_after_n_iterations(input: Vec<&str>, n: usize) -> usize {\n\n let rules: Vec<Rule> = input.iter().map(Rule::from).collect();\n\n let pattern = Pattern::from(STARTING_PATTERN);\n\n let mut cache: HashMap<(Pattern, usize), usize> = HashMap::new();\n\n iterate(n, &rules, pattern, &mut cache)\n\n}\n\n\n", "file_path": "2017/src/day21.rs", "rank": 23, "score": 346299.8573339368 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.lines().collect()\n\n}\n\n\n\nmod day01;\n\nuse crate::day01::{day1_part1, day1_part2};\n", "file_path": "2017/src/main.rs", "rank": 24, "score": 342479.96086981345 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.lines().collect()\n\n}\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\nuse crate::day01::day1_part2;\n\nmod day01;\n", "file_path": "2019/src/main.rs", "rank": 25, "score": 342479.96086981345 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.trim().lines().collect()\n\n}\n\n\n\nmod day01;\n\nuse crate::day01::{day01_part1, day01_part2};\n", "file_path": "2016/src/main.rs", "rank": 26, "score": 342479.96086981345 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.lines().collect()\n\n}\n\n\n\nuse crate::day01::{day1_part1, day1_part2};\n\nmod day01;\n", "file_path": "2018/src/main.rs", "rank": 27, "score": 342479.96086981345 }, { "content": "fn parse(input: &str) -> Vec<&str> {\n\n input.trim().lines().collect()\n\n}\n\n\n\nmod day01;\n\nmod permutation;\n\nuse crate::day01::{day01_part1, day01_part2};\n", "file_path": "2015/src/main.rs", "rank": 28, "score": 342479.96086981345 }, { "content": "fn sum_of_numbers_without_red(input: Vec<&str>) -> isize {\n\n input.iter().map(|s| sum_without_red(s)).sum()\n\n}\n\n\n", "file_path": "2015/src/day12.rs", "rank": 29, "score": 341297.5172001405 }, { "content": "fn curr_and_max_distances_to_origin(input: Vec<&str>) -> (usize, usize) {\n\n let steps: Vec<Dir> = input[0].split(',').map(Dir::from).collect();\n\n let mut max_dist = 0;\n\n\n\n let mut pos = Hex::default();\n\n for dir in steps {\n\n pos.move_in(dir);\n\n max_dist = max_dist.max(pos.distance_to_origin());\n\n }\n\n\n\n (pos.distance_to_origin(), max_dist)\n\n}\n\n\n\npub(crate) fn day11_part2() -> usize {\n\n max_distance_to_origin(parse(INPUT))\n\n}\n\n\n", "file_path": "2017/src/day11.rs", "rank": 30, "score": 341195.0745745023 }, { "content": "fn sum_of_valid_sector_ids(input: Vec<&str>) -> usize {\n\n input\n\n .into_iter()\n\n .filter_map(extract_valid_room)\n\n .map(|(_, id)| id)\n\n .sum()\n\n}\n\n\n", "file_path": "2016/src/day04.rs", "rank": 31, "score": 340848.21101810416 }, { "content": "fn count_possible_triangle_rows(input: Vec<&str>) -> usize {\n\n input\n\n .iter()\n\n .map(|line| {\n\n line.split_ascii_whitespace()\n\n .map(|n| n.parse().unwrap())\n\n .collect::<Vec<_>>()\n\n })\n\n .filter(|s| is_triangle(s[0], s[1], s[2]))\n\n .count()\n\n}\n\n\n", "file_path": "2016/src/day03.rs", "rank": 32, "score": 340848.21101810416 }, { "content": "fn find_matching_memory_part1(input: Vec<&str>) -> usize {\n\n let analysis_result = analysis_result();\n\n for line in input {\n\n let (sue_number, memories) = extract_memories(line);\n\n if analysis_result.part1_matches(memories) {\n\n return sue_number;\n\n }\n\n }\n\n unreachable!()\n\n}\n\n\n", "file_path": "2015/src/day16.rs", "rank": 33, "score": 340848.2110181041 }, { "content": "fn total_wrapping_paper_needed(input: Vec<&str>) -> usize {\n\n input\n\n .iter()\n\n .map(|s| Box::from(s).wrapping_paper_needed())\n\n .sum()\n\n}\n\n\n", "file_path": "2015/src/day02.rs", "rank": 34, "score": 340848.21101810416 }, { "content": "fn find_matching_memory_part2(input: Vec<&str>) -> usize {\n\n let analysis_result = analysis_result();\n\n for line in input {\n\n let (sue_number, memories) = extract_memories(line);\n\n if analysis_result.part2_matches(memories) {\n\n return sue_number;\n\n }\n\n }\n\n unreachable!()\n\n}\n\n\n", "file_path": "2015/src/day16.rs", "rank": 35, "score": 340848.2110181042 }, { "content": "fn number_of_particles_remaining_after_collisions(input: Vec<&str>) -> usize {\n\n let mut particles = parse_input(input);\n\n while particles.iter().any(Particle::is_decelerating)\n\n // This second condition is only for the part 2 example ;)\n\n || particles.len() == 4\n\n {\n\n let mut count_by_position = HashMap::new();\n\n particles.iter_mut().for_each(|particle| {\n\n particle.tick();\n\n *count_by_position.entry(particle.pos).or_insert(0) += 1;\n\n });\n\n for particle_pos in count_by_position\n\n .iter()\n\n .filter(|(_, count)| **count > 1)\n\n .map(|(pos, _)| pos)\n\n {\n\n while let Some(idx) = particles\n\n .iter()\n\n .position(|particle| particle.pos == *particle_pos)\n\n {\n\n particles.swap_remove(idx);\n\n }\n\n }\n\n }\n\n particles.len()\n\n}\n\n\n", "file_path": "2017/src/day20.rs", "rank": 36, "score": 340848.21101810416 }, { "content": "fn size_of_largest_finite_area(input: Vec<&str>) -> usize {\n\n let coords: Vec<_> = input.iter().map(Loc::from).collect();\n\n let (min, max) = (Loc::min(&coords), Loc::max(&coords));\n\n // println!(\"min = {}, max = {}\", min, max);\n\n\n\n // Calculate the manhattan distances from all locations within the min/max rectangle\n\n // to the closest coordinate. It will be None if it's equally close to multiple locations.\n\n let closest_coord_idx_by_loc = indices_of_closest_coordinate(&coords, &min, &max);\n\n // Remove coordinate indices of infinite areas\n\n let finite_area_coord_indices =\n\n remove_infinite_areas(coords, min, max, &closest_coord_idx_by_loc);\n\n // println!(\"Finite area coord indices = {:?}\", finite_area_coord_indices);\n\n\n\n let mut count_by_index: HashMap<Index, usize> = HashMap::new();\n\n closest_coord_idx_by_loc.iter().for_each(|(_, idx)| {\n\n if let Some(idx) = idx {\n\n if finite_area_coord_indices.contains(idx) {\n\n *count_by_index.entry(*idx).or_insert(0) += 1;\n\n }\n\n }\n\n });\n\n // println!(\"{:?}\", count_by_index);\n\n *count_by_index\n\n .iter()\n\n .max_by_key(|(_idx, count)| *count)\n\n .unwrap()\n\n .1\n\n}\n\n\n", "file_path": "2018/src/day06.rs", "rank": 37, "score": 340848.2110181041 }, { "content": "fn count_possible_triangle_columns(input: Vec<&str>) -> usize {\n\n let input: Vec<Vec<usize>> = input\n\n .iter()\n\n .map(|line| {\n\n line.split_ascii_whitespace()\n\n .map(|n| n.parse().unwrap())\n\n .collect()\n\n })\n\n .collect();\n\n input\n\n .windows(3)\n\n .step_by(3)\n\n .map(|n| {\n\n // println!(\"{:?}\", n);\n\n (0..3)\n\n .into_iter()\n\n .filter(move |col| is_triangle(n[0][*col], n[1][*col], n[2][*col]))\n\n .count()\n\n })\n\n .sum()\n", "file_path": "2016/src/day03.rs", "rank": 38, "score": 340848.21101810416 }, { "content": "fn max_distance_after(duration: usize, input: Vec<&str>) -> usize {\n\n parse_reindeer(input)\n\n .into_iter()\n\n .map(|r| r.distance_after(duration))\n\n .max()\n\n .unwrap()\n\n}\n\n\n", "file_path": "2015/src/day14.rs", "rank": 39, "score": 340183.41128733533 }, { "content": "fn max_points_after(duration: usize, input: Vec<&str>) -> usize {\n\n let reindeer = parse_reindeer(input);\n\n let mut time = 0;\n\n let mut distances = vec![0; reindeer.len()];\n\n let mut points = vec![0; reindeer.len()];\n\n while time < duration {\n\n for (idx, reindeer) in reindeer.iter().enumerate() {\n\n if time % reindeer.cycle_duration() < reindeer.fly_duration {\n\n distances[idx] += reindeer.speed;\n\n }\n\n }\n\n let max_distance = distances.iter().max().unwrap();\n\n distances\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, dist)| dist == &max_distance)\n\n .for_each(|(idx, _)| points[idx] += 1);\n\n time += 1;\n\n }\n\n *points.iter().max().unwrap()\n\n}\n\n\n", "file_path": "2015/src/day14.rs", "rank": 40, "score": 340183.4112873353 }, { "content": "fn parse_input(input: Vec<&str>) -> Vec<Vec<char>> {\n\n input.iter().map(|line| line.chars().collect()).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n const EXAMPLE: &str = \" | \\\n\n \\n | +--+ \\\n\n \\n A | C \\\n\n \\n F---|----E|--+ \\\n\n \\n | | | D \\\n\n \\n +B-+ +--+ \\\n\n \\n \";\n\n\n\n #[test]\n\n fn part1_example() {\n\n assert_eq!(\n", "file_path": "2017/src/day19.rs", "rank": 41, "score": 340065.54181322234 }, { "content": "fn parse_and_settle(input: &str) -> (Vec<Brick>, usize) {\n\n let bricks = parse(input);\n\n settle(bricks)\n\n}\n\n\n", "file_path": "2023/src/day22.rs", "rank": 42, "score": 339563.242047036 }, { "content": "fn parse_input(input: Vec<&str>) -> Vec<Component> {\n\n input\n\n .iter()\n\n .map(|s| {\n\n let (l, r) = s.split_once('/').unwrap();\n\n (l.parse().unwrap(), r.parse().unwrap())\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2017/src/day24.rs", "rank": 43, "score": 338182.38698442484 }, { "content": "fn parse_input(input: Vec<&str>) -> Vec<Particle> {\n\n input\n\n .iter()\n\n .map(|line| Particle::parse(line).unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "2017/src/day20.rs", "rank": 44, "score": 338182.3869844249 }, { "content": "fn find_high_score_fix_calories(input: Vec<&str>) -> isize {\n\n find_high_score(input, true)\n\n}\n\n\n\nconst TOTAL_AMOUNT: isize = 100;\n", "file_path": "2015/src/day15.rs", "rank": 45, "score": 335413.74054813595 }, { "content": "fn find_high_score_ignore_calories(input: Vec<&str>) -> isize {\n\n find_high_score(input, false)\n\n}\n\n\n", "file_path": "2015/src/day15.rs", "rank": 46, "score": 335413.74054813595 }, { "content": "fn process_int_code_with_input(v: &mut Vec<isize>, input: isize) -> Option<isize> {\n\n let mut idx = 0;\n\n let mut output = None;\n\n while idx < v.len() {\n\n let s = to_5_digit_string_padded_with_leading_zeroes(v[idx]);\n\n let code = to_num(&s[(s.len() - 2)..s.len()]);\n\n let op = Op::from_code(code);\n\n let modes = extract_modes(&s);\n\n // let pre = format!(\"{:?}: {:?}\", s, op);\n\n match op {\n\n Add | Multiply | LessThan | Equals => {\n\n let p1 = param_value(v, idx + 1, &modes[0]);\n\n let p2 = param_value(v, idx + 2, &modes[1]);\n\n let res = match op {\n\n Add => p1 + p2,\n\n Multiply => p1 * p2,\n\n LessThan => eval(p1 < p2),\n\n Equals => eval(p1 == p2),\n\n _ => unreachable!(),\n\n };\n", "file_path": "2019/src/day05.rs", "rank": 47, "score": 335252.1460130362 }, { "content": "fn number_of_times_program_1_sent_a_value(input: Vec<&str>) -> usize {\n\n let instr = input.into_iter().map(Instr::from).collect::<Vec<_>>();\n\n let mut programs = [Program::new(0, &instr), Program::new(1, &instr)];\n\n let mut send_count = [0, 0];\n\n let mut is_blocked = [false, false];\n\n while !(is_blocked[0] && is_blocked[1]) {\n\n for id in 0..=1 {\n\n match programs[id].step() {\n\n Running => {}\n\n SentOutput(value) => {\n\n send_count[id] += 1;\n\n programs[(id + 1) % 2].receive(value);\n\n }\n\n AwaitingInput | Terminated => {\n\n is_blocked[id] = true;\n\n }\n\n }\n\n }\n\n }\n\n send_count[1]\n", "file_path": "2017/src/day18.rs", "rank": 48, "score": 334977.87466181617 }, { "content": "fn index_of_particle_staying_closest_to_origin(input: Vec<&str>) -> usize {\n\n let particles = parse_input(input);\n\n // The particle that will stay closest to the origin is the one with the smallest acceleration\n\n particles\n\n .iter()\n\n .enumerate()\n\n .min_by_key(|(_, p)| p.acc.abs())\n\n .unwrap()\n\n .0\n\n}\n\n\n", "file_path": "2017/src/day20.rs", "rank": 49, "score": 334977.87466181617 }, { "content": "fn to_vec(input: &str) -> Vec<Entry> {\n\n input.split(' ').map(|s| s.parse().unwrap()).collect()\n\n}\n\n\n", "file_path": "2018/src/day08.rs", "rank": 50, "score": 330740.07428742026 }, { "content": "fn parse(input: &str) -> Vec<Vec<char>> {\n\n input.trim().lines().map(|s| s.chars().collect()).collect()\n\n}\n\n\n\nuse ChunkType::*;\n", "file_path": "2021/src/day10.rs", "rank": 51, "score": 329750.4619563146 }, { "content": "fn discs_from(input: Vec<&str>) -> Vec<Disc> {\n\n input.into_iter().map(Disc::from).collect()\n\n}\n\n\n", "file_path": "2016/src/day15.rs", "rank": 52, "score": 329750.4619563146 }, { "content": "fn parse(input: &str) -> Vec<Vec<u8>> {\n\n input\n\n .trim()\n\n .lines()\n\n .map(|s| s.bytes().collect::<Vec<_>>())\n\n .collect()\n\n}\n\n\n", "file_path": "2022/src/day03.rs", "rank": 53, "score": 329750.4619563146 }, { "content": "fn infections_after_bursts_part2(input: Vec<&str>, burst_count: usize) -> usize {\n\n infections_after_bursts(input, burst_count, Part::Two)\n\n}\n\n\n", "file_path": "2017/src/day22.rs", "rank": 54, "score": 328955.0684712877 }, { "content": "fn infections_after_bursts_part1(input: Vec<&str>, burst_count: usize) -> usize {\n\n infections_after_bursts(input, burst_count, Part::One)\n\n}\n\n\n", "file_path": "2017/src/day22.rs", "rank": 55, "score": 328955.0684712877 }, { "content": "fn parse_input(input: &[&str]) -> (Vec<Sample>, Vec<Instruction>) {\n\n // samples are divided by single empty lines from each other\n\n let parts = input.split(|line| line.is_empty());\n\n // after the samples there are 3 empty lines, so the parts from above\n\n // will be contain two empty slices between samples and program\n\n let samples: Vec<Sample> = parts\n\n .clone()\n\n .take_while(|slice| !slice.is_empty())\n\n .map(|sample| parse_sample(sample))\n\n .collect();\n\n let instructions: &[&str] = parts.skip_while(|slice| !slice.is_empty()).nth(2).unwrap();\n\n let program = parse_program(instructions);\n\n (samples, program)\n\n}\n\n\n", "file_path": "2018/src/day16.rs", "rank": 56, "score": 326805.2297037762 }, { "content": "fn shortest_path(input: Vec<&str>, part: Part) -> usize {\n\n let maze = Maze::from(input);\n\n maze.length_of_shortest_path_from_start_to_end(part)\n\n}\n\n\n", "file_path": "2019/src/day20.rs", "rank": 57, "score": 326535.6816166745 }, { "content": "fn parse_input(input: &str) -> Vec<bool> {\n\n input.chars().map(|c| c == '^').collect()\n\n}\n\n\n", "file_path": "2016/src/day18.rs", "rank": 58, "score": 325281.30004883406 }, { "content": "fn parse_reindeer(input: Vec<&str>) -> Vec<Reindeer> {\n\n let reindeer: Vec<_> = input.iter().map(Reindeer::from).collect();\n\n reindeer\n\n}\n\n\n", "file_path": "2015/src/day14.rs", "rank": 59, "score": 324590.68871977454 }, { "content": "fn parse_ingredients(input: Vec<&str>) -> Vec<Ingredient> {\n\n input.into_iter().map(Ingredient::from).collect()\n\n}\n\n\n", "file_path": "2015/src/day15.rs", "rank": 60, "score": 324590.68871977454 }, { "content": "fn parse_weights(input: Vec<&str>) -> Vec<Weight> {\n\n input.into_iter().map(|l| l.parse().unwrap()).collect()\n\n}\n\n\n", "file_path": "2015/src/day24.rs", "rank": 61, "score": 324590.68871977454 }, { "content": "fn parse_instructions(input: Vec<&str>) -> Vec<Instruction> {\n\n input.into_iter().map(Instruction::from).collect()\n\n}\n\n\n", "file_path": "2015/src/day23.rs", "rank": 62, "score": 324590.68871977454 }, { "content": "fn parse_operations(input: Vec<&str>) -> Vec<Op> {\n\n input.into_iter().map(Op::from).collect()\n\n}\n\n\n", "file_path": "2016/src/day21.rs", "rank": 63, "score": 324590.68871977454 }, { "content": "fn count_used_cells(input: &str) -> usize {\n\n let grid = generate_grid(input);\n\n grid.iter()\n\n .map(|row| row.iter().filter(|&s| s == &State::Used).count())\n\n .sum()\n\n}\n\n\n", "file_path": "2017/src/day14.rs", "rank": 64, "score": 323508.9439927166 }, { "content": "fn shuffle_deck(size: usize, input: Vec<&str>) -> FullDeck {\n\n let mut deck: Vec<_> = (0..size).into_iter().collect();\n\n let techniques = parse_shuffle_techniques(input);\n\n deck.shuffle_with(&techniques);\n\n deck\n\n}\n\n\n", "file_path": "2019/src/day22.rs", "rank": 65, "score": 320665.3452603865 }, { "content": "fn parse_rules(input: Vec<&str>) -> Vec<IpRange> {\n\n input\n\n .into_iter()\n\n .map(|s| {\n\n let (from, to) = s.split_once('-').unwrap();\n\n from.parse().unwrap()..=to.parse().unwrap()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2016/src/day20.rs", "rank": 66, "score": 319725.45182769693 }, { "content": "fn split_into_guard_shifts(input: &[&str]) -> Vec<Vec<String>> {\n\n let mut sorted = input.to_vec();\n\n sorted.sort();\n\n let joined = sorted.join(\"\\n\");\n\n joined\n\n .split(\"Guard #\")\n\n .skip(1)\n\n .map(|shift| shift.split('\\n').map(str::to_string).collect())\n\n .collect()\n\n}\n\n\n", "file_path": "2018/src/day04.rs", "rank": 67, "score": 319725.45182769693 }, { "content": "fn parse(input: &str) -> impl Iterator<Item = &str> {\n\n input.trim().split(',')\n\n}\n\n\n", "file_path": "2023/src/day15.rs", "rank": 68, "score": 319238.5741625251 }, { "content": "fn parse_connections(input: Vec<&str>) -> HashMap<usize, HashSet<usize>> {\n\n let mut connections = HashMap::new();\n\n for line in input {\n\n let (source, destinations) = line.split_once(\" <-> \").unwrap();\n\n let source: usize = source.parse().unwrap();\n\n let destinations: HashSet<usize> = destinations\n\n .split(\", \")\n\n .map(|n| n.parse().unwrap())\n\n .collect();\n\n connections.insert(source, destinations);\n\n }\n\n connections\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n const EXAMPLE1: &str = \"\\\n", "file_path": "2017/src/day12.rs", "rank": 69, "score": 317644.89327542717 }, { "content": "fn parse(input: &str) -> Vec<Command> {\n\n input.trim().lines().map(Command::from).collect()\n\n}\n\n\n", "file_path": "2022/src/day09.rs", "rank": 70, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Direction> {\n\n input\n\n .trim()\n\n .chars()\n\n .map(|c| if c == '<' { Left } else { Right })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \">>><<><>><<<>><>>><<<>>><<<><<<>><>><<>>\";\n\n\n\n #[test]\n\n fn part1_example() {\n\n let directions = parse(EXAMPLE);\n\n assert_eq!(3_068, tower_height(directions, P1_ROUNDS));\n\n }\n\n #[test]\n", "file_path": "2022/src/day17.rs", "rank": 71, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Command> {\n\n input.trim().lines().map(Command::from).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"\\\n\nforward 5\n\ndown 5\n\nforward 8\n\nup 3\n\ndown 8\n\nforward 2\";\n\n\n\n #[test]\n\n fn example1() {\n\n let commands = parse(EXAMPLE);\n\n assert_eq!(150, follow_part1_commands(commands));\n", "file_path": "2021/src/day02.rs", "rank": 72, "score": 317534.34052235505 }, { "content": "fn to_points(input: &[&str]) -> Vec<Point> {\n\n input\n\n .iter()\n\n .map(|line| Point::parse(line).unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "2018/src/day10.rs", "rank": 73, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Brick> {\n\n input.trim().lines().map(Brick::from).collect()\n\n}\n\n\n\nimpl Brick {\n\n #[cfg(test)]\n\n fn single(x: Coord, y: Coord, z: Coord) -> Self {\n\n Brick {\n\n top: Position { x, y, z },\n\n bottom: Position { x, y, z },\n\n }\n\n }\n\n #[cfg(test)]\n\n fn new(x1: Coord, y1: Coord, z1: Coord, x2: Coord, y2: Coord, z2: Coord) -> Self {\n\n if x1 == x2 && (y1 == y2 || z1 == z2) || y1 == y2 && z1 == z2 {\n\n let p1 = Position::new(x1, y1, z1);\n\n let p2 = Position::new(x2, y2, z2);\n\n let (top, bottom) = match p1.z > p2.z {\n\n true => (p1, p2),\n\n false => (p2, p1),\n", "file_path": "2023/src/day22.rs", "rank": 74, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Race> {\n\n let (times, distances) = input.trim().split_once('\\n').expect(\"2 lines\");\n\n let times: Vec<_> = times\n\n .strip_prefix(\"Time:\")\n\n .expect(\"'Time:' prefix\")\n\n .trim()\n\n .split_ascii_whitespace()\n\n .filter_map(|n| n.parse().ok())\n\n .collect();\n\n let distances: Vec<_> = distances\n\n .strip_prefix(\"Distance:\")\n\n .expect(\"'Distance:' prefix\")\n\n .trim()\n\n .split_ascii_whitespace()\n\n .filter_map(|n| n.parse().ok())\n\n .collect();\n\n\n\n times\n\n .into_iter()\n\n .zip(distances)\n\n .map(|(total_time, distance)| Race {\n\n total_time,\n\n distance,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2023/src/day06.rs", "rank": 75, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Line> {\n\n input.trim().lines().map(Line::from).collect()\n\n}\n\n\n", "file_path": "2021/src/day05.rs", "rank": 76, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Voxel> {\n\n input.trim().lines().map(Voxel::from).collect()\n\n}\n\n\n", "file_path": "2022/src/day18.rs", "rank": 77, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Monkey> {\n\n input.trim().split(\"\\n\\n\").map(Monkey::from).collect()\n\n}\n\n\n", "file_path": "2022/src/day11.rs", "rank": 78, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Op> {\n\n input.trim().lines().map(Op::from).collect()\n\n}\n\n\n", "file_path": "2022/src/day10.rs", "rank": 79, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<char> {\n\n input.trim().chars().collect()\n\n}\n\n\n", "file_path": "2022/src/day06.rs", "rank": 80, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Signal> {\n\n input\n\n .trim()\n\n .lines()\n\n .map(|s| {\n\n let (inputs, outputs) = s.split_once(\" | \").unwrap();\n\n let inputs = inputs\n\n .split_ascii_whitespace()\n\n .map(|s| s.chars().collect())\n\n .collect();\n\n let outputs = outputs\n\n .split_ascii_whitespace()\n\n .map(|s| s.chars().collect())\n\n .collect();\n\n (inputs, outputs)\n\n })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "2021/src/day08.rs", "rank": 81, "score": 317534.34052235505 }, { "content": "fn parse(input: &str) -> Vec<Instructions> {\n\n input.trim().lines().map(Instructions::from).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"\\\n\nA Y\n\nB X\n\nC Z\";\n\n\n\n #[test]\n\n fn example1() {\n\n let rounds = parse(EXAMPLE);\n\n assert_eq!(15, follow_part1_rounds(rounds));\n\n }\n\n\n\n #[test]\n", "file_path": "2022/src/day02.rs", "rank": 82, "score": 317534.34052235505 }, { "content": "fn count_regions_of_used_cells(input: &str) -> usize {\n\n let mut grid = generate_grid(input);\n\n let mut locations_to_visit = locations_to_visit(&mut grid);\n\n\n\n let mut unique_region_count = 0;\n\n while let Some((x, y)) = locations_to_visit.pop() {\n\n if grid[y][x] == State::Labeled {\n\n continue;\n\n }\n\n unique_region_count += 1;\n\n let mut this_region = vec![(x, y)];\n\n while let Some((x, y)) = this_region.pop() {\n\n grid[y][x] = State::Labeled;\n\n safe_adjacent_neighbors(x, y)\n\n .into_iter()\n\n .filter(|(x, y)| grid[*y][*x] == State::Used)\n\n .for_each(|(x, y)| this_region.push((x, y)));\n\n }\n\n }\n\n unique_region_count\n\n}\n\n\n", "file_path": "2017/src/day14.rs", "rank": 83, "score": 316840.41144118615 }, { "content": "fn follow_path_and_return_letters_and_step_count(input: Vec<&str>) -> (String, usize) {\n\n let grid = parse_input(input);\n\n let mut y = 0;\n\n let mut x = grid[y].iter().position(|c| c == &'|').unwrap();\n\n let mut dir = Dir::South;\n\n let mut collected = vec![];\n\n let mut step_count = 0;\n\n let tile_at = |x, y| grid.get(y).and_then(|line: &Vec<char>| line.get(x));\n\n while let Some(tile) = tile_at(x, y) {\n\n // println!(\"({}, {}) {} {}\", x, y, dir, tile);\n\n match tile {\n\n '+' => match dir {\n\n Dir::South | Dir::North => {\n\n if let Some(' ' | '|') = tile_at(x + 1, y) {\n\n x -= 1;\n\n dir = Dir::West\n\n } else {\n\n x += 1;\n\n dir = Dir::East\n\n }\n", "file_path": "2017/src/day19.rs", "rank": 84, "score": 316702.48001766845 }, { "content": "fn find_high_score(input: Vec<&str>, fix_calories: bool) -> isize {\n\n let ingredients = parse_ingredients(input);\n\n let mut score = 0;\n\n for i0 in 0..=TOTAL_AMOUNT {\n\n if ingredients.len() == 2 {\n\n let i1 = TOTAL_AMOUNT - i0;\n\n let amounts = vec![i0, i1];\n\n score = score.max(calc_score(&ingredients, &amounts, fix_calories));\n\n } else {\n\n for i1 in 0..=(TOTAL_AMOUNT - i0) {\n\n for i2 in 0..=(TOTAL_AMOUNT - i0 - i1) {\n\n let i3 = TOTAL_AMOUNT - i0 - i1 - i2;\n\n let amounts = vec![i0, i1, i2, i3];\n\n score = score.max(calc_score(&ingredients, &amounts, fix_calories));\n\n }\n\n }\n\n }\n\n }\n\n score\n\n}\n\n\n\nconst TARGET_CALORIES: isize = 500;\n", "file_path": "2015/src/day15.rs", "rank": 85, "score": 315559.218611642 }, { "content": "fn part1_hash_checksum(max_idx: u8, input: Vec<&str>) -> usize {\n\n let lengths = input[0].split(',').map(|i| i.parse().unwrap()).collect();\n\n let mut hasher = Hasher::new(max_idx, lengths);\n\n hasher.do_hash_cycle();\n\n hasher.rotate_to_start_idx();\n\n hasher.ring[0] as usize * hasher.ring[1] as usize\n\n}\n\n\n", "file_path": "2017/src/day10.rs", "rank": 86, "score": 315136.0122844686 }, { "content": "fn generate_boolvec_of_length(input: &str, min_len: usize) -> Vec<bool> {\n\n let mut output = input.to_boolvec();\n\n while output.len() < min_len {\n\n output = lengthen_boolvec(output);\n\n }\n\n output\n\n}\n", "file_path": "2016/src/day16.rs", "rank": 87, "score": 315136.0122844686 }, { "content": "fn parse_shuffle_techniques(input: Vec<&str>) -> Vec<ShuffleTechnique> {\n\n input.into_iter().map(ShuffleTechnique::from).collect()\n\n}\n\n\n", "file_path": "2019/src/day22.rs", "rank": 88, "score": 315129.72151066153 }, { "content": "fn parse_firewalls(input: Vec<&str>) -> Vec<Option<Firewall>> {\n\n let layer_count = parse_firewall(input.last().unwrap()).0 + 1;\n\n let mut firewalls = vec![None; layer_count];\n\n input\n\n .iter()\n\n .map(|s| parse_firewall(s))\n\n .for_each(|(depth, firewall)| firewalls[depth] = Some(firewall));\n\n\n\n firewalls\n\n}\n\n\n", "file_path": "2017/src/day13.rs", "rank": 89, "score": 313085.4155559415 }, { "content": "fn ways_to_fill_containers(input: Vec<&str>, total: usize, part: Part) -> usize {\n\n let mut containers = parse_containers(input);\n\n\n\n // Optimization: The containers are sorted by largest first, so that\n\n // aborting when sum > total happens with as few elements as possible.\n\n containers.sort_unstable_by(|a, b| a.cmp(b).reverse());\n\n\n\n let combination_count = 2usize.pow(containers.len() as u32);\n\n let matches: Vec<_> = (0..combination_count)\n\n .into_iter()\n\n .filter_map(|combination_bitmask| {\n\n let mut sum = 0;\n\n let mut count = 0;\n\n for i in (0..containers.len())\n\n .into_iter()\n\n .filter(|i| (combination_bitmask >> i & 1) == 1)\n\n {\n\n count += 1;\n\n sum += containers[i];\n\n if sum > total {\n", "file_path": "2015/src/day17.rs", "rank": 90, "score": 312054.3290450131 }, { "content": "fn infections_after_bursts(input: Vec<&str>, burst_count: usize, part: Part) -> usize {\n\n let mut grid = parse_input(&input);\n\n let mut curr_pos = Loc::new((input[0].len() as isize) / 2, (input.len() / 2) as isize);\n\n let mut infections_caused = 0;\n\n let mut dir = Dir::Up;\n\n for _ in 0..burst_count {\n\n let is_infected = grid.entry(curr_pos).or_insert(NodeState::Clean);\n\n match is_infected {\n\n NodeState::Clean => {\n\n if part == Part::One {\n\n infections_caused += 1;\n\n }\n\n dir.turn_left();\n\n }\n\n NodeState::Weakened => {\n\n if part == Part::Two {\n\n infections_caused += 1;\n\n }\n\n }\n\n NodeState::Infected => {\n", "file_path": "2017/src/day22.rs", "rank": 91, "score": 312054.3290450131 }, { "content": "fn parse_games(input: &str) -> Vec<Game> {\n\n input.trim().lines().map(Game::from).collect()\n\n}\n\n\n", "file_path": "2023/src/day02.rs", "rank": 92, "score": 311851.41467927146 }, { "content": "fn part2_hash(input: Vec<&str>) -> String {\n\n day10_part2_hash(input[0])\n\n}\n\n\n\npub(crate) fn day10_part2_hash(input: &str) -> String {\n\n let mut lengths: Vec<u8> = input.chars().map(|c| c as u8).collect();\n\n lengths.extend_from_slice(&[17, 31, 73, 47, 23]);\n\n let mut hasher = Hasher::new(255, lengths);\n\n (0..64).into_iter().for_each(|_| {\n\n hasher.do_hash_cycle();\n\n });\n\n hasher.sparse_hash()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parse;\n\n\n\n #[test]\n", "file_path": "2017/src/day10.rs", "rank": 93, "score": 311851.4146792715 }, { "content": "fn parse_blueprints(input: &str) -> Vec<Blueprint> {\n\n input.lines().map(Blueprint::from).collect()\n\n}\n\n\n\nimpl Display for ResourceType {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Geode => \"Geode\",\n\n Obsidian => \"Obsidian\",\n\n Clay => \"Clay\",\n\n Ore => \"Ore\",\n\n }\n\n )\n\n }\n\n}\n\nimpl From<&str> for ResourceType {\n\n fn from(s: &str) -> Self {\n", "file_path": "2022/src/day19.rs", "rank": 94, "score": 311851.41467927146 }, { "content": "fn parse_packets(input: &str) -> Vec<Packet> {\n\n input\n\n .lines()\n\n .filter(|line| !line.is_empty())\n\n .map(Packet::from)\n\n .collect()\n\n}\n\n\n", "file_path": "2022/src/day13.rs", "rank": 95, "score": 311851.4146792715 }, { "content": "fn parse(input: &str) -> Vec<SnafuNumber> {\n\n input.trim().lines().map(SnafuNumber::from).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"\\\n\n1=-0-2\n\n12111\n\n2=0=\n\n21\n\n2=01\n\n111\n\n20012\n\n112\n\n1=-1=\n\n1-12\n\n12\n", "file_path": "2022/src/day25.rs", "rank": 96, "score": 311851.4146792715 }, { "content": "fn hex_to_bits(input: &str) -> Vec<BitDigit> {\n\n input\n\n .trim()\n\n .chars()\n\n .map(|c| c.to_digit(16).unwrap() as u8)\n\n .flat_map(|n| format!(\"{:04b}\", n).chars().collect::<Vec<_>>())\n\n .map(|c| c.to_digit(10).unwrap() as BitDigit)\n\n .collect()\n\n}\n\n\n", "file_path": "2021/src/day16.rs", "rank": 97, "score": 306530.1156200062 }, { "content": "fn strength_of_strongest_bridge(input: Vec<&str>) -> Strength {\n\n let mut components = parse_input(input);\n\n *strengths_and_lengths(0, 0, &0, &mut components[..])\n\n .iter()\n\n .map(|(strength, _length)| strength)\n\n .max()\n\n .unwrap()\n\n}\n\n\n", "file_path": "2017/src/day24.rs", "rank": 98, "score": 306530.1156200062 }, { "content": "fn strength_of_longest_bridge(input: Vec<&str>) -> Strength {\n\n let mut components = parse_input(input);\n\n strengths_and_lengths(0, 0, &0, &mut components[..])\n\n .iter()\n\n .max_by(|a, b| match a.1.cmp(&b.1) {\n\n len @ (Ordering::Less | Ordering::Greater) => len,\n\n Ordering::Equal => a.0.cmp(&b.0),\n\n })\n\n .unwrap()\n\n .0\n\n}\n\n\n", "file_path": "2017/src/day24.rs", "rank": 99, "score": 306530.1156200062 } ]
Rust
crates/content-tree/src/leaf.rs
josephg/diamond-types
5e32135c760310964172adce5c4668034fe8bf35
use std::mem::take; use std::ptr::NonNull; use rle::Searchable; use super::*; impl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> NodeLeaf<E, I, IE, LE> { pub(crate) unsafe fn new(next: Option<NonNull<Self>>) -> Self { Self::new_with_parent(ParentPtr::Root(NonNull::dangling()), next) } pub(crate) fn new_with_parent(parent: ParentPtr<E, I, IE, LE>, next: Option<NonNull<Self>>) -> Self { Self { parent, data: [E::default(); LE], num_entries: 0, _pin: PhantomPinned, next, } } pub fn find_offset<F>(&self, mut offset: usize, stick_end: bool, entry_to_num: F) -> Option<(usize, usize)> where F: Fn(E) -> usize { for i in 0..self.len_entries() { let entry: E = self.data[i]; let entry_len = entry_to_num(entry); if offset < entry_len || (stick_end && entry_len == offset) { return Some((i, offset)); } else { offset -= entry_len } } if offset == 0 { Some((self.len_entries(), 0)) } else { None } } pub fn next_leaf(&self) -> Option<NonNull<Self>> { self.next } pub fn prev_leaf(&self) -> Option<NonNull<Self>> { self.adjacent_leaf_by_traversal(false) } pub(crate) fn adjacent_leaf_by_traversal(&self, direction_forward: bool) -> Option<NonNull<Self>> { let mut parent = self.parent; let mut node_ptr = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }); loop { match parent { ParentPtr::Root(_) => { return None; }, ParentPtr::Internal(n) => { let node_ref = unsafe { n.as_ref() }; let idx = node_ref.find_child(node_ptr).unwrap(); let next_idx: Option<usize> = if direction_forward { let next_idx = idx + 1; if (next_idx < IE) && node_ref.children[next_idx].is_some() { Some(next_idx) } else { None } } else if idx > 0 { Some(idx - 1) } else { None }; if let Some(next_idx) = next_idx { node_ptr = unsafe { node_ref.children[next_idx].as_ref().unwrap().as_ptr() }; break; } else { node_ptr = NodePtr::Internal(unsafe { NonNull::new_unchecked(node_ref as *const _ as *mut _) }); parent = node_ref.parent; } } } } loop { match node_ptr { NodePtr::Internal(n) => { let node_ref = unsafe { n.as_ref() }; let next_idx = if direction_forward { 0 } else { let num_children = node_ref.count_children(); assert!(num_children > 0); num_children - 1 }; node_ptr = unsafe { node_ref.children[next_idx].as_ref().unwrap().as_ptr() }; }, NodePtr::Leaf(n) => { return Some(n); } } } } pub fn len_entries(&self) -> usize { self.num_entries as usize } pub fn as_slice(&self) -> &[E] { &self.data[0..self.num_entries as usize] } pub fn update_parent_count(&mut self, amt: I::Update) { if amt == I::Update::default() { return; } let mut child = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self) }); let mut parent = self.parent; loop { match parent { ParentPtr::Root(mut r) => { unsafe { I::update_offset_by_marker(&mut r.as_mut().count, &amt); } break; }, ParentPtr::Internal(mut n) => { let idx = unsafe { n.as_mut() }.find_child(child).unwrap(); let c = &mut unsafe { n.as_mut() }.metrics[idx]; I::update_offset_by_marker(c, &amt); child = NodePtr::Internal(n); parent = unsafe { n.as_mut() }.parent; }, }; } } pub fn flush_metric_update(&mut self, marker: &mut I::Update) { let amt = take(marker); self.update_parent_count(amt); } pub fn has_root_as_parent(&self) -> bool { self.parent.is_root() } pub fn count_items(&self) -> I::Value { if I::CAN_COUNT_ITEMS { match self.parent { ParentPtr::Root(root) => { unsafe { root.as_ref() }.count } ParentPtr::Internal(node) => { let child = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }); let idx = unsafe { node.as_ref() }.find_child(child).unwrap(); unsafe { node.as_ref() }.metrics[idx] } } } else { let mut val = I::Value::default(); for elem in self.data[..self.num_entries as usize].iter() { I::increment_offset(&mut val, elem); } val } } pub fn splice_out(&mut self, idx: usize) { debug_assert!(idx < self.num_entries as usize); self.data.copy_within(idx + 1..self.num_entries as usize, idx); self.num_entries -= 1; } pub fn clear_all(&mut self) { self.num_entries = 0; } pub fn unsafe_cursor_at_start(&self) -> UnsafeCursor<E, I, IE, LE> { UnsafeCursor::new( unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }, 0, 0 ) } } impl<E: ContentTraits + Searchable, I: TreeMetrics<E>, const IE: usize, const LE: usize> NodeLeaf<E, I, IE, LE> { pub fn find(&self, loc: E::Item) -> Option<UnsafeCursor<E, I, IE, LE>> { for i in 0..self.len_entries() { let entry: E = self.data[i]; if let Some(offset) = entry.get_offset(loc) { debug_assert!(offset < entry.len()); return Some(UnsafeCursor::new( unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }, i, offset )) } } None } }
use std::mem::take; use std::ptr::NonNull; use rle::Searchable; use super::*; impl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> NodeLeaf<E, I, IE, LE> { pub(crate) unsafe fn new(next: Option<NonNull<Self>>) -> Self { Self::new_with_parent(ParentPtr::Root(NonNull::dangling()), next) } pub(crate) fn new_with_parent(parent: ParentPtr<E, I, IE, LE>, next: Option<NonNull<Self>>) -> Self { Self { parent, data: [E::default(); LE], num_entries: 0, _pin: PhantomPinned, next, } } pub fn find_offset<F>(&self, mut offset: usize, stick_end: bool, entry_to_num: F) -> Option<(usize, usize)> where F: Fn(E) -> usize { for i in 0..self.len_entries() { let entry: E = self.data[i]; let entry_len = entry_to_num(entry); if offset < entry_len || (stick_end && entry_len == offset) { return Some((i, offset)); } else { offset -= entry_len } } if offset == 0 { Some((self.len_entries(), 0)) } else { None } } pub fn next_leaf(&self) -> Option<NonNull<Self>> { self.next } pub fn prev_leaf(&self) -> Option<NonNull<Self>> { self.adjacent_leaf_by_traversal(false) } pub(crate) fn adjacent_leaf_by_traversal(&self, direction_forward: bool) -> Option<NonNull<Self>> { let mut parent = self.parent; let mut node_ptr = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }); loop { match parent { ParentPtr::Root(_) => { return None; }, ParentPtr::Internal(n) => { let node_ref = unsafe { n.as_ref() }; let idx = node_ref.find_child(node_ptr).unwrap(); let next_idx: Option<usize> = if direction_forward { let next_idx = idx + 1; if (next_idx < IE) && node_ref.children[next_idx].is_some() { Some(next_idx) } else { None } } else if idx > 0 { Some(idx - 1) } else { None }; if let Some(next_idx) = next_idx { node_ptr = unsafe { node_ref.children[next_idx].as_ref().unwrap().as_ptr() }; break; } else { node_ptr = NodePtr::Internal(unsafe { NonNull::new_unchecked(node_ref as *const _ as *mut _) }); parent = node_ref.parent; } } } } loop { match node_ptr { NodePtr::Internal(n) => { let node_ref = unsafe { n.as_ref() }; let next_idx = if direction_forward { 0 } else { let num_children = node_ref.count_children(); assert!(num_children > 0); num_children - 1 }; node_ptr = unsafe { node_ref.children[next_idx].as_ref().unwrap().as_ptr() }; }, NodePtr::Leaf(n) => { return Some(n); } } } } pub fn len_entries(&self) -> usize { self.num_entries as usize } pub fn as_slice(&self) -> &[E] { &self.data[0..self.num_entries as usize] } pub fn update_parent_count(&mut self, amt: I::Update) { if amt == I::Update::default() { return; } let mut child = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self) }); let mut parent = self.parent; loop { match parent { ParentPtr::Root(mut r) => { unsafe { I::update_offset_by_marker(&mut r.as_mut().count, &amt); } break; }, ParentPtr::Internal(mut n) => { let idx = unsafe { n.as_mut() }.find_child(child).unwrap(); let c = &mut unsafe { n.as_mut() }.metrics[idx]; I::update_offset_by_marker(c, &amt); child = NodePtr::Internal(n); parent = unsafe { n.as_mut() }.parent; }, }; } } pub fn flush_metric_update(&mut self, marker: &mut I::Update) { let amt = take(marker); self.update_parent_count(amt); } pub fn has_root_as_parent(&self) -> bool { self.parent.is_root() } pub fn count_items(&self) -> I::Value { if I::CAN_COUNT_ITEMS { match self.parent { ParentPtr::Root(root) => { unsafe { root.as_ref() }.count } ParentPtr::Internal(node) => { let child = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }); let idx = unsafe { node.as_ref() }.find_child(child).unwrap(); unsafe { node.as_ref() }.metrics[idx] } } } else { let mut val = I::Value::default(); for elem in self.data[..self.num_entries as usize].iter() { I::increment_offset(&mut val, elem); } val } } pub fn splice_out(&mut self, idx: usize) { debug_assert!(idx < self.num_entries as usize); self.data.copy_within(idx + 1..self.num_entries as usize, idx); self.num_entries -= 1; } pub fn clear_all(&mut self) { self.num_entries = 0; } pub fn unsafe_cursor_at_start(&self) -> UnsafeCursor<E, I, IE, LE> { UnsafeCursor::new( unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }, 0, 0 ) } } impl<E: ContentTraits + Searchable, I: TreeMetrics<E>, const IE: usize, const LE: usize> NodeLeaf<E, I, IE, LE> { pub fn find(&self, loc: E::Item) -> Option<UnsafeCursor<E, I, IE, LE>> { for i in 0..self.len_entries() { let entry: E = self.data[i]; if let Some(offset) = entry.get_offset(loc) { debug_assert!(offset < entry.len()); return
} } None } }
Some(UnsafeCursor::new( unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }, i, offset ))
call_expression
[ { "content": "struct DebugContent<'a, E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize>(&'a ContentTreeRaw<E, I, IE, LE>);\n\n\n\nimpl<'a, E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> Debug for DebugContent<'a, E, I, IE, LE> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n f.debug_list()\n\n .entries(self.0.iter())\n\n .finish()\n\n }\n\n}\n\n\n\n\n\nimpl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> Debug for ContentTreeRaw<E, I, IE, LE> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"ContentTree\")\n\n .field(\"count\", &self.count)\n\n .field(\"(content)\", &DebugContent(self))\n\n .finish()\n\n }\n\n}", "file_path": "crates/content-tree/src/debug.rs", "rank": 0, "score": 369885.6494309249 }, { "content": "fn push_run_2<const INC: bool>(into: &mut Vec<u8>, val: Run2<INC>) {\n\n let mut dest = [0u8; 20];\n\n let mut pos = 0;\n\n pos += encode_i64_with_extra_bit(val.diff as i64, val.len != 1, &mut dest[..]);\n\n if val.len != 1 {\n\n pos += encode_u64(val.len as u64, &mut dest[pos..]);\n\n }\n\n\n\n into.extend_from_slice(&dest[..pos]);\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/mod.rs", "rank": 1, "score": 300653.43287811556 }, { "content": "pub fn encode_u32_with_extra_bit_2(value: u32, extra_1: bool, extra_2: bool, buf: &mut[u8]) -> usize {\n\n debug_assert!(value < (u32::MAX >> 2));\n\n let val_2 = (value << 2) + ((extra_1 as u32) << 1) + (extra_2 as u32);\n\n encode_u32(val_2, buf)\n\n}\n\n\n\npub(crate) fn mix_bit_u64(value: u64, extra: bool) -> u64 {\n\n debug_assert!(value < u64::MAX / 2);\n\n value * 2 + extra as u64\n\n}\n\n\n\npub(crate) fn mix_bit_u32(value: u32, extra: bool) -> u32 {\n\n debug_assert!(value < u32::MAX / 2);\n\n value * 2 + extra as u32\n\n}\n\n\n\n// TODO: Remove this method. Callers should just use mix_bit.\n\npub(crate) fn num_encode_i64_with_extra_bit(value: i64, extra: bool) -> u64 {\n\n // We only have enough remaining bits in the u64 encoding to fit +/- 2^62.\n\n debug_assert!(value.abs() < (i64::MAX / 2));\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 2, "score": 284893.53509830806 }, { "content": "pub fn encode_u32_with_extra_bit_2(value: u32, extra_1: bool, extra_2: bool, buf: &mut[u8]) -> usize {\n\n debug_assert!(value < (u32::MAX >> 2));\n\n let val_2 = (value << 2) + ((extra_1 as u32) << 1) + (extra_2 as u32);\n\n encode_u32(val_2, buf)\n\n}\n\n\n\n#[inline]\n\npub(crate) fn mix_bit_u64(value: u64, extra: bool) -> u64 {\n\n debug_assert!(value < u64::MAX >> 1);\n\n value * 2 + extra as u64\n\n}\n\n\n\npub(crate) fn mix_bit_u32(value: u32, extra: bool) -> u32 {\n\n debug_assert!(value < u32::MAX >> 1);\n\n value * 2 + extra as u32\n\n}\n\n\n\npub(crate) fn mix_bit_usize(value: usize, extra: bool) -> usize {\n\n debug_assert!(value < usize::MAX >> 1);\n\n if cfg!(target_pointer_width = \"16\") || cfg!(target_pointer_width = \"32\") {\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 3, "score": 282455.26623870153 }, { "content": "pub fn encode_u32_with_extra_bit(value: u32, extra: bool, buf: &mut[u8]) -> usize {\n\n debug_assert!(value < (u32::MAX >> 1));\n\n let val_2 = value * 2 + (extra as u32);\n\n encode_u32(val_2, buf)\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 4, "score": 275361.9872980559 }, { "content": "pub fn encode_i64_with_extra_bit(value: i64, extra: bool, buf: &mut[u8]) -> usize {\n\n encode_u64(num_encode_i64_with_extra_bit(value, extra), buf)\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 5, "score": 275361.9872980559 }, { "content": "pub fn encode_u32_with_extra_bit(value: u32, extra: bool, buf: &mut[u8]) -> usize {\n\n debug_assert!(value < (u32::MAX >> 1));\n\n let val_2 = value * 2 + (extra as u32);\n\n encode_u32(val_2, buf)\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 6, "score": 272718.0555167838 }, { "content": "fn write_parents(into: &mut Vec<u8>, val: Parents) {\n\n // dbg!(&val);\n\n let mut iter = val.parents.iter().peekable();\n\n while let Some(&p) = iter.next() {\n\n let is_last = iter.peek().is_none();\n\n let mut diff = val.order.start.wrapping_sub(p);\n\n diff = mix_bit_u32(diff, is_last);\n\n // dbg!(diff);\n\n push_u32(into, diff);\n\n }\n\n\n\n push_u32(into, val.order.order_len());\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use rle::test_splitable_methods_valid;\n\n\n\n use crate::list::encoding::*;\n\n use crate::list::external_txn::{RemoteCRDTOp, RemoteId, RemoteTxn};\n", "file_path": "crates/diamond-types/src/list/encoding/mod.rs", "rank": 7, "score": 250952.4940129586 }, { "content": "fn push_usize(into: &mut Vec<u8>, val: usize) {\n\n if size_of::<usize>() <= size_of::<u32>() {\n\n push_u32(into, val as u32);\n\n } else if size_of::<usize>() == size_of::<u64>() {\n\n push_u64(into, val as u64);\n\n } else {\n\n panic!(\"usize larger than u64 is not supported\");\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/mod.rs", "rank": 8, "score": 250660.47897636815 }, { "content": "fn push_usize(into: &mut Vec<u8>, val: usize) {\n\n if size_of::<usize>() <= size_of::<u32>() {\n\n push_u32(into, val as u32);\n\n } else if size_of::<usize>() == size_of::<u64>() {\n\n push_u64(into, val as u64);\n\n } else {\n\n panic!(\"usize larger than u64 is not supported\");\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/mod.rs", "rank": 9, "score": 248271.2394752925 }, { "content": "/// Encode u32 value as varint.\n\n/// Panics if buffer length is less than 5.\n\npub fn encode_u32(mut value: u32, buf: &mut [u8]) -> usize {\n\n assert!(buf.len() >= 5);\n\n\n\n fn iter(value: &mut u32, byte: &mut u8) -> bool {\n\n if (*value & !0x7F) > 0 {\n\n *byte = ((*value & 0x7F) | 0x80) as u8;\n\n *value >>= 7;\n\n true\n\n } else {\n\n *byte = *value as u8;\n\n false\n\n }\n\n }\n\n\n\n // Explicitly unroll loop to avoid either\n\n // unsafe code or bound checking when writing to `buf`\n\n\n\n if !iter(&mut value, &mut buf[0]) {\n\n return 1;\n\n };\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 10, "score": 244428.80250125972 }, { "content": "/// Encode u64 as varint.\n\n/// Panics if buffer length is less than 10.\n\npub fn encode_u64(mut value: u64, buf: &mut [u8]) -> usize {\n\n assert!(buf.len() >= 10);\n\n\n\n fn iter(value: &mut u64, byte: &mut u8) -> bool {\n\n if (*value & !0x7F) > 0 {\n\n *byte = ((*value & 0x7F) | 0x80) as u8;\n\n *value >>= 7;\n\n true\n\n } else {\n\n *byte = *value as u8;\n\n false\n\n }\n\n }\n\n\n\n // Explicitly unroll loop to avoid either\n\n // unsafe code or bound checking when writing to `buf`\n\n\n\n if !iter(&mut value, &mut buf[0]) {\n\n return 1;\n\n };\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 11, "score": 244428.80250125972 }, { "content": "pub fn encode_usize(value: usize, buf: &mut [u8]) -> usize {\n\n if cfg!(target_pointer_width = \"16\") || cfg!(target_pointer_width = \"32\") {\n\n encode_u32(value as u32, buf)\n\n } else if cfg!(target_pointer_width = \"64\") {\n\n encode_u64(value as u64, buf)\n\n } else {\n\n panic!(\"Unsupported target pointer width\")\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 12, "score": 243803.97096883613 }, { "content": "/// Encode u32 value as varint.\n\n/// Panics if buffer length is less than 5.\n\npub fn encode_u32(mut value: u32, buf: &mut [u8]) -> usize {\n\n assert!(buf.len() >= 5);\n\n\n\n fn iter(value: &mut u32, byte: &mut u8) -> bool {\n\n if (*value & !0x7F) > 0 {\n\n *byte = ((*value & 0x7F) | 0x80) as u8;\n\n *value >>= 7;\n\n true\n\n } else {\n\n *byte = *value as u8;\n\n false\n\n }\n\n }\n\n\n\n // Explicitly unroll loop to avoid either\n\n // unsafe code or bound checking when writing to `buf`\n\n\n\n if !iter(&mut value, &mut buf[0]) {\n\n return 1;\n\n };\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 13, "score": 242230.19853071016 }, { "content": "/// Encode u64 as varint.\n\n/// Panics if buffer length is less than 10.\n\npub fn encode_u64(mut value: u64, buf: &mut [u8]) -> usize {\n\n assert!(buf.len() >= 10);\n\n\n\n fn iter(value: &mut u64, byte: &mut u8) -> bool {\n\n if (*value & !0x7F) > 0 {\n\n *byte = ((*value & 0x7F) | 0x80) as u8;\n\n *value >>= 7;\n\n true\n\n } else {\n\n *byte = *value as u8;\n\n false\n\n }\n\n }\n\n\n\n // Explicitly unroll loop to avoid either\n\n // unsafe code or bound checking when writing to `buf`\n\n\n\n if !iter(&mut value, &mut buf[0]) {\n\n return 1;\n\n };\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 14, "score": 242230.19853071016 }, { "content": "pub fn num_encode_zigzag_isize(val: isize) -> usize {\n\n // TODO: Figure out a way to write this that gives compiler errors instead of runtime errors.\n\n if cfg!(target_pointer_width = \"16\") || cfg!(target_pointer_width = \"32\") {\n\n num_encode_zigzag_i32(val as i32) as usize\n\n } else if cfg!(target_pointer_width = \"64\") {\n\n num_encode_zigzag_i64(val as i64) as usize\n\n } else {\n\n panic!(\"Unsupported target pointer width\")\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 15, "score": 241812.98950231494 }, { "content": "pub fn num_decode_zigzag_isize(val: usize) -> isize {\n\n if cfg!(target_pointer_width = \"16\") || cfg!(target_pointer_width = \"32\") {\n\n num_decode_zigzag_i32(val as u32) as isize\n\n } else if cfg!(target_pointer_width = \"64\") {\n\n num_decode_zigzag_i64(val as u64) as isize\n\n } else {\n\n panic!(\"Unsupported target pointer width\")\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 16, "score": 241812.98950231494 }, { "content": "pub fn ropey_benchmarks(c: &mut Criterion) {\n\n c.bench_function(\"ropey baseline\", |b| {\n\n let test_data = load_testing_data(\"benchmark_data/automerge-paper.json.gz\");\n\n\n\n assert_eq!(test_data.start_content.len(), 0);\n\n\n\n b.iter(|| {\n\n let mut string = Rope::new();\n\n for txn in test_data.txns.iter() {\n\n for TestPatch(pos, del_span, ins_content) in txn.patches.iter() {\n\n if *del_span > 0 {\n\n string.remove(*pos .. *pos + *del_span);\n\n }\n\n if !ins_content.is_empty() {\n\n string.insert(*pos, ins_content.as_str());\n\n }\n\n }\n\n }\n\n\n\n black_box(string);\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(benches, ropey_benchmarks);\n\ncriterion_main!(benches);", "file_path": "crates/diamond-types/benches/ropey.rs", "rank": 17, "score": 239636.38681334234 }, { "content": "/// Simple test helper to verify an implementation of SplitableSpan is valid and meets expected\n\n/// constraints.\n\n///\n\n/// Use this to test splitablespan implementations in tests.\n\n// #[cfg(test)]\n\npub fn test_splitable_methods_valid<E: SplitAndJoinSpan + std::fmt::Debug + Clone + Eq>(entry: E) {\n\n assert!(entry.len() >= 2, \"Call this with a larger entry\");\n\n // dbg!(&entry);\n\n\n\n for i in 1..entry.len() {\n\n // Split here and make sure we get the expected results.\n\n let mut start = entry.clone();\n\n let end = start.truncate(i);\n\n // dbg!(&start, &end);\n\n\n\n assert_eq!(start.len(), i);\n\n assert_eq!(end.len(), entry.len() - i);\n\n\n\n // dbg!(&start, &end);\n\n assert!(start.can_append(&end));\n\n\n\n let mut merge_append = start.clone();\n\n\n\n // dbg!(&start, &end);\n\n merge_append.append(end.clone());\n", "file_path": "crates/rle/src/splitable_span.rs", "rank": 18, "score": 232795.46974927955 }, { "content": "pub fn random_str(len: usize, rng: &mut SmallRng) -> String {\n\n let mut str = String::new();\n\n let alphabet: Vec<char> = \"abcdefghijklmnop_\".chars().collect();\n\n for _ in 0..len {\n\n str.push(alphabet[rng.gen_range(0..alphabet.len())]);\n\n }\n\n str\n\n}\n\n\n", "file_path": "crates/diamond-types/tests/fuzzer.rs", "rank": 19, "score": 231691.54912228428 }, { "content": "pub fn count_chars(s: &str) -> usize {\n\n ropey::str_utils::byte_to_char_idx(s, s.len())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::unicount::*;\n\n\n\n // TODO: Run a microbenchmark to see how this performs in the wild.\n\n fn std_chars_to_bytes(s: &str, char_pos: usize) -> usize {\n\n s.char_indices().nth(char_pos).map_or_else(\n\n || s.len(),\n\n |(i, _)| i\n\n )\n\n }\n\n\n\n pub fn std_bytes_to_chars(s: &str, byte_pos: usize) -> usize {\n\n s[..byte_pos].chars().count()\n\n }\n\n\n", "file_path": "crates/diamond-types/src/unicount.rs", "rank": 20, "score": 229494.41691942723 }, { "content": "pub fn encode_i32(value: i32, buf: &mut[u8]) -> usize {\n\n encode_u32(num_encode_zigzag_i32(value), buf)\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 21, "score": 229256.18788931685 }, { "content": "pub fn encode_i64(value: i64, buf: &mut[u8]) -> usize {\n\n encode_u64(num_encode_zigzag_i64(value), buf)\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 22, "score": 229256.18788931685 }, { "content": "fn take_first_chars<'a>(s: &mut &'a str, count: usize) -> &'a str {\n\n let num_bytes = chars_to_bytes(s, count);\n\n let (first, remainder) = s.split_at(num_bytes);\n\n // result.content.push_str(first);\n\n *s = remainder;\n\n first\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/ot/ot.rs", "rank": 23, "score": 228038.03440772247 }, { "content": "pub fn encode_i32(value: i32, buf: &mut[u8]) -> usize {\n\n encode_u32(num_encode_zigzag_i32(value), buf)\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 24, "score": 226891.2734175844 }, { "content": "pub fn encode_i64(value: i64, buf: &mut[u8]) -> usize {\n\n encode_u64(num_encode_zigzag_i64(value), buf)\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 25, "score": 226891.2734175844 }, { "content": "pub trait FindOffset<E: ContentTraits>: TreeMetrics<E> {\n\n fn index_to_offset(offset: Self::Value) -> usize;\n\n}\n\n\n\n\n\n/// Content index - which just indexes based on the resulting size. Deletes are not counted.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct ContentMetrics;\n\n\n\nimpl<E: ContentTraits + ContentLength> TreeMetrics<E> for ContentMetrics {\n\n type Update = isize;\n\n type Value = usize; // TODO: Move this to a template parameter.\n\n\n\n fn increment_marker(marker: &mut Self::Update, entry: &E) {\n\n *marker += entry.content_len() as isize;\n\n }\n\n\n\n fn decrement_marker(marker: &mut Self::Update, entry: &E) {\n\n *marker -= entry.content_len() as isize;\n\n // dbg!(&marker, entry);\n", "file_path": "crates/content-tree/src/metrics.rs", "rank": 26, "score": 226729.38506733053 }, { "content": "pub fn count_chars(s: &str) -> usize {\n\n ropey::str_utils::byte_to_char_idx(s, s.len())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::unicount::*;\n\n\n\n fn std_chars_to_bytes(s: &str, char_pos: usize) -> usize {\n\n s.char_indices().nth(char_pos).map_or_else(\n\n || s.len(),\n\n |(i, _)| i\n\n )\n\n }\n\n\n\n pub fn std_bytes_to_chars(s: &str, byte_pos: usize) -> usize {\n\n s[..byte_pos].chars().count()\n\n }\n\n\n\n const TRICKY_CHARS: &[&str] = &[\n", "file_path": "crates/diamond-types-positional/src/unicount.rs", "rank": 27, "score": 226650.12140111945 }, { "content": "#[inline]\n\npub fn consume_chars<'a>(content: &mut &'a str, len: usize) -> &'a str {\n\n let (here, remaining) = split_at_char(*content, len);\n\n *content = remaining;\n\n here\n\n}\n\n\n", "file_path": "crates/diamond-types/src/unicount.rs", "rank": 28, "score": 225882.82387958432 }, { "content": "pub fn local_delete(opset: &mut OpLog, branch: &mut Branch, agent: AgentId, pos: usize, del_span: usize) -> Time {\n\n apply_local_operation(opset, branch, agent, &[Operation::new_delete(pos, del_span)])\n\n}\n", "file_path": "crates/diamond-types-positional/src/list/list.rs", "rank": 29, "score": 224764.45341150954 }, { "content": "pub fn random_str(len: usize, rng: &mut SmallRng) -> String {\n\n let mut str = String::new();\n\n let alphabet: Vec<char> = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_\".chars().collect();\n\n for _ in 0..len {\n\n str.push(alphabet[rng.gen_range(0..alphabet.len())]);\n\n }\n\n str\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/m2/fuzzer.rs", "rank": 30, "score": 224593.79260678194 }, { "content": "#[inline]\n\n#[allow(unused)]\n\npub fn consume_chars<'a>(content: &mut &'a str, len: usize) -> &'a str {\n\n let (here, remaining) = split_at_char(*content, len);\n\n *content = remaining;\n\n here\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/unicount.rs", "rank": 31, "score": 223373.8205607824 }, { "content": "pub fn local_delete_with_content(opset: &mut OpLog, branch: &mut Branch, agent: AgentId, pos: usize, del_span: usize) -> Time {\n\n apply_local_operation(opset, branch, agent, &[branch.make_delete_op(pos, del_span)])\n\n}\n\n\n\nimpl Default for ListCRDT {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl ListCRDT {\n\n pub fn new() -> Self {\n\n Self {\n\n branch: Branch::new(),\n\n ops: OpLog::new()\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.branch.len()\n", "file_path": "crates/diamond-types-positional/src/list/list.rs", "rank": 32, "score": 223052.9158818147 }, { "content": "/// Helper when a notify function is not needed\n\npub fn null_notify<E, Node>(_e: E, _node: Node) {}\n\n\n\nimpl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> Node<E, I, IE, LE> {\n\n /// Unsafe: Created leaf has a dangling parent pointer. Must be set after initialization.\n\n // unsafe fn new_leaf() -> Self {\n\n // Node::Leaf(Box::pin(NodeLeaf::new()))\n\n // }\n\n // fn new_with_parent(parent: ParentPtr) -> Self {\n\n // Node::Leaf(Box::pin(NodeLeaf::new_with_parent(parent)))\n\n // }\n\n\n\n fn set_parent(&mut self, parent: ParentPtr<E, I, IE, LE>) {\n\n unsafe {\n\n match self {\n\n Node::Leaf(l) => l.as_mut().get_unchecked_mut().parent = parent,\n\n Node::Internal(i) => i.as_mut().get_unchecked_mut().parent = parent,\n\n }\n\n }\n\n }\n\n\n", "file_path": "crates/content-tree/src/lib.rs", "rank": 33, "score": 216085.19185472955 }, { "content": "// TODO: Give these methods some love, and avoid constructing Operation at all here.\n\npub fn local_insert(opset: &mut OpLog, branch: &mut Branch, agent: AgentId, pos: usize, ins_content: &str) -> Time {\n\n apply_local_operation(opset, branch, agent, &[Operation::new_insert(pos, ins_content)])\n\n}\n", "file_path": "crates/diamond-types-positional/src/list/list.rs", "rank": 34, "score": 212625.46212032522 }, { "content": "pub fn branch_is_root(branch: &[Time]) -> bool {\n\n branch.len() == 1 && branch[0] == ROOT_TIME\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use smallvec::smallvec;\n\n\n\n use crate::list::{Branch, ROOT_TIME};\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn branch_movement_smoke_tests() {\n\n let mut branch: Branch = smallvec![ROOT_TIME];\n\n advance_branch_by_known(&mut branch, &[ROOT_TIME], 0..10);\n\n assert_eq!(branch.as_slice(), &[9]);\n\n\n\n let txns = RleVec(vec![\n\n TxnSpan {\n", "file_path": "crates/diamond-types/src/list/branch.rs", "rank": 35, "score": 208482.01770883455 }, { "content": "#[allow(unused)]\n\npub fn frontier_is_root(branch: &[Time]) -> bool {\n\n branch.len() == 1 && branch[0] == ROOT_TIME\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use smallvec::smallvec;\n\n\n\n use crate::list::Frontier;\n\n use crate::list::history::HistoryEntry;\n\n use crate::ROOT_TIME;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn frontier_movement_smoke_tests() {\n\n let mut branch: Frontier = smallvec![ROOT_TIME];\n\n advance_frontier_by_known_run(&mut branch, &[ROOT_TIME], (0..10).into());\n\n assert_eq!(branch.as_slice(), &[9]);\n\n\n", "file_path": "crates/diamond-types-positional/src/list/frontier.rs", "rank": 36, "score": 206481.2400212256 }, { "content": "pub trait TreeMetrics<E: ContentTraits> where Self: Debug + Copy + Clone + PartialEq + Eq {\n\n type Update: Debug + Default + PartialEq + Eq;\n\n type Value: Copy + Clone + Default + Debug + AddAssign + SubAssign + PartialEq + Eq + Sized;\n\n\n\n fn increment_marker(marker: &mut Self::Update, entry: &E);\n\n fn decrement_marker(marker: &mut Self::Update, entry: &E);\n\n\n\n // TODO: Unused. Consider removing.\n\n fn decrement_marker_by_val(marker: &mut Self::Update, val: &Self::Value);\n\n\n\n fn update_offset_by_marker(offset: &mut Self::Value, by: &Self::Update);\n\n\n\n fn increment_offset(offset: &mut Self::Value, by: &E);\n\n\n\n const CAN_COUNT_ITEMS: bool = false;\n\n // TODO: Unused. Consider removing.\n\n fn count_items(_idx: Self::Value) -> usize { panic!(\"Index cannot count items\") }\n\n}\n\n\n", "file_path": "crates/content-tree/src/metrics.rs", "rank": 37, "score": 203541.39012931124 }, { "content": "fn replace_in_list(list: &mut Vec<TestRange>, pos: usize, entry: TestRange) {\n\n // Wheee testing laziness!\n\n delete_in_list(list, pos, entry.len());\n\n insert_into_list(list, pos, entry);\n\n}\n\n\n", "file_path": "crates/content-tree/tests/fuzzer.rs", "rank": 38, "score": 202727.52611200474 }, { "content": "fn insert_into_list(list: &mut Vec<TestRange>, pos: usize, entry: TestRange) {\n\n let mut idx = 0;\n\n let mut cur_pos = 0;\n\n\n\n loop {\n\n if cur_pos == pos {\n\n list.insert(idx, entry);\n\n break;\n\n } else {\n\n let e = &list[idx];\n\n\n\n if cur_pos + e.len() > pos {\n\n // Split the item.\n\n let remainder = list[idx].truncate(pos - cur_pos);\n\n list.insert(idx + 1, entry);\n\n list.insert(idx + 2, remainder);\n\n break;\n\n }\n\n\n\n idx += 1;\n\n cur_pos += e.len();\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/content-tree/tests/fuzzer.rs", "rank": 39, "score": 202727.52611200474 }, { "content": "pub fn num_decode_zigzag_i32(val: u32) -> i32 {\n\n // dbg!(val);\n\n (val >> 1) as i32 * (if val & 1 == 1 { -1 } else { 1 })\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 40, "score": 202583.08623522357 }, { "content": "pub fn num_decode_zigzag_i64(val: u64) -> i64 {\n\n // dbg!(val);\n\n (val >> 1) as i64 * (if val & 1 == 1 { -1 } else { 1 })\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 41, "score": 202583.08623522357 }, { "content": "pub fn num_encode_zigzag_i64(val: i64) -> u64 {\n\n val.abs() as u64 * 2 + val.is_negative() as u64\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 42, "score": 200756.09232950915 }, { "content": "pub fn num_decode_zigzag_i32(val: u32) -> i32 {\n\n // dbg!(val);\n\n (val >> 1) as i32 * (if val & 1 == 1 { -1 } else { 1 })\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 43, "score": 200756.09232950915 }, { "content": "pub fn num_decode_zigzag_i64(val: u64) -> i64 {\n\n // dbg!(val);\n\n (val >> 1) as i64 * (if val & 1 == 1 { -1 } else { 1 })\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 44, "score": 200756.09232950915 }, { "content": "pub fn num_encode_zigzag_i32(val: i32) -> u32 {\n\n val.abs() as u32 * 2 + val.is_negative() as u32\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 45, "score": 200756.09232950915 }, { "content": "pub fn branch_eq(a: &[Time], b: &[Time]) -> bool {\n\n // Almost all branches only have one element in them. But it would be cleaner to keep branches\n\n // sorted.\n\n a.len() == b.len() && ((a.len() == 1 && a[0] == b[0]) || {\n\n a.iter().all(|o| b.contains(o))\n\n })\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/branch.rs", "rank": 46, "score": 200553.0212548448 }, { "content": "pub fn frontier_eq(a: &[Time], b: &[Time]) -> bool {\n\n // Almost all branches only have one element in them.\n\n debug_assert_frontier_sorted(a);\n\n debug_assert_frontier_sorted(b);\n\n a == b\n\n // a.len() == b.len() && ((a.len() == 1 && a[0] == b[0]) || {\n\n // a.iter().all(|o| b.contains(o))\n\n // })\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/frontier.rs", "rank": 47, "score": 198552.24356723583 }, { "content": "fn push_str(into: &mut Vec<u8>, val: &str) {\n\n let bytes = val.as_bytes();\n\n push_usize(into, bytes.len());\n\n into.extend_from_slice(bytes);\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/mod.rs", "rank": 48, "score": 197224.61649330537 }, { "content": "fn push_u32(into: &mut Vec<u8>, val: u32) {\n\n let mut buf = [0u8; 5];\n\n let pos = encode_u32(val, &mut buf);\n\n into.extend_from_slice(&buf[..pos]);\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/mod.rs", "rank": 49, "score": 197224.61649330537 }, { "content": "fn push_u64(into: &mut Vec<u8>, val: u64) {\n\n let mut buf = [0u8; 10];\n\n let pos = encode_u64(val, &mut buf);\n\n into.extend_from_slice(&buf[..pos]);\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/mod.rs", "rank": 50, "score": 197224.61649330537 }, { "content": "fn push_str(into: &mut Vec<u8>, val: &str) {\n\n let bytes = val.as_bytes();\n\n push_usize(into, bytes.len());\n\n into.extend_from_slice(bytes);\n\n}\n\n\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/mod.rs", "rank": 51, "score": 195340.87152802834 }, { "content": "fn push_u32(into: &mut Vec<u8>, val: u32) {\n\n let mut buf = [0u8; 5];\n\n let pos = encode_u32(val, &mut buf);\n\n into.extend_from_slice(&buf[..pos]);\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/mod.rs", "rank": 52, "score": 195340.87152802834 }, { "content": "fn push_u64(into: &mut Vec<u8>, val: u64) {\n\n let mut buf = [0u8; 10];\n\n let pos = encode_u64(val, &mut buf);\n\n into.extend_from_slice(&buf[..pos]);\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/mod.rs", "rank": 53, "score": 195340.87152802834 }, { "content": "pub fn num_decode_i64_with_extra_bit(value: u64) -> (i64, bool) {\n\n let bit = (value & 1) != 0;\n\n (num_decode_zigzag_i64(value >> 1), bit)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use rand::prelude::*;\n\n use crate::list::encoding::varint::encode_u64;\n\n\n\n fn check_enc_dec_unsigned(val: u64) {\n\n let mut buf = [0u8; 10];\n\n let bytes_used = encode_u64(val, &mut buf);\n\n\n\n let v1 = decode_u64_slow(&buf);\n\n assert_eq!(v1, (val, bytes_used));\n\n let v2 = decode_u64(&buf);\n\n assert_eq!(v2, (val, bytes_used));\n\n let v3 = decode_u64_slow(&buf[..bytes_used]);\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 54, "score": 194880.5253901521 }, { "content": "pub fn num_decode_u64_with_extra_bit(value: u64) -> (u64, bool) {\n\n let bit = (value & 1) != 0;\n\n (value >> 1, bit)\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 55, "score": 194880.5253901521 }, { "content": "pub fn num_decode_u32_with_extra_bit(value: u32) -> (u32, bool) {\n\n let bit = (value & 1) != 0;\n\n (value >> 1, bit)\n\n}\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 56, "score": 194880.5253901521 }, { "content": "fn write_editrun(into: &mut Vec<u8>, val: EditRun) {\n\n let mut dest = [0u8; 20];\n\n let mut pos = 0;\n\n\n\n println!(\"diff {} del {} rev {} len {}\", val.diff, val.is_delete, val.backspace_mode, val.len);\n\n\n\n let mut n = num_encode_i64_with_extra_bit(val.diff as i64, val.len != 1);\n\n if val.is_delete {\n\n n = mix_bit_u64(n, val.backspace_mode);\n\n }\n\n n = mix_bit_u64(n, val.is_delete);\n\n pos += encode_u64(n, &mut dest[..]);\n\n if val.len != 1 {\n\n pos += encode_u32(val.len, &mut dest[pos..]);\n\n }\n\n\n\n into.extend_from_slice(&dest[..pos]);\n\n}\n\n\n\nimpl ListCRDT {\n", "file_path": "crates/diamond-types/src/list/encoding/patch_encoding.rs", "rank": 57, "score": 193512.41660855702 }, { "content": "pub fn num_decode_i64_with_extra_bit(value: u64) -> (i64, bool) {\n\n let bit = (value & 1) != 0;\n\n (num_decode_zigzag_i64(value >> 1), bit)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use rand::prelude::*;\n\n use crate::list::encoding::varint::encode_u64;\n\n\n\n fn check_enc_dec_unsigned(val: u64) {\n\n let mut buf = [0u8; 10];\n\n let bytes_used = encode_u64(val, &mut buf);\n\n\n\n let v1 = decode_u64_slow(&buf);\n\n assert_eq!(v1, (val, bytes_used));\n\n let v2 = decode_u64(&buf);\n\n assert_eq!(v2, (val, bytes_used));\n\n let v3 = decode_u64_slow(&buf[..bytes_used]);\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 58, "score": 193156.7492462076 }, { "content": "fn delete_in_list(list: &mut Vec<TestRange>, pos: usize, mut del_span: usize) {\n\n let mut idx = 0;\n\n let mut cur_pos = 0;\n\n\n\n while del_span > 0 && idx < list.len() {\n\n let e_len = list[idx].len();\n\n if cur_pos == pos {\n\n if e_len > del_span {\n\n list[idx].truncate_keeping_right(del_span);\n\n break;\n\n } else {\n\n del_span -= e_len;\n\n list.remove(idx);\n\n // And continue keeping the current index.\n\n }\n\n } else {\n\n if cur_pos + e_len > pos {\n\n // Split the item.\n\n let mut remainder = list[idx].truncate(pos - cur_pos);\n\n if del_span < remainder.len() {\n", "file_path": "crates/content-tree/tests/fuzzer.rs", "rank": 59, "score": 190779.47847452376 }, { "content": "#[allow(unused)]\n\npub fn trim<V>(val: V, span: TimeSpan) -> V\n\n where V: RleKeyed + HasLength + SplitableSpan\n\n{\n\n try_trim(val, span).unwrap()\n\n}\n\n\n\n// pub fn intersect<A, B>(mut a: A, mut b: B) -> Option<(A, B)>\n\n// where A: RleKeyed + HasLength + SplitableSpan,\n\n// B: RleKeyed + HasLength + SplitableSpan\n\n// {\n\n// let a_span = a.span();\n\n// let b_span = b.span();\n\n//\n\n// if a.start <= b.start {\n\n// if a.end <= b.start { return None; }\n\n// a.truncate_keeping_right(b.start - a.start);\n\n// } else { // b.start < a.start\n\n// if b.end <= a.start { return None; }\n\n// b.truncate_keeping_right(a.start - b.start);\n\n// }\n", "file_path": "crates/diamond-types-positional/src/rle/mod.rs", "rank": 60, "score": 189445.44014756806 }, { "content": "// TODO: Make a try_ version of this method, which returns an appropriate Error object.\n\npub fn load_testing_data(filename: &str) -> TestData {\n\n // let start = SystemTime::now();\n\n // let mut file = File::open(\"benchmark_data/automerge-paper.json.gz\").unwrap();\n\n let file = File::open(filename).unwrap();\n\n\n\n let reader = BufReader::new(file);\n\n // We could pass the GzDecoder straight to serde, but it makes it way slower to parse for\n\n // some reason.\n\n let mut reader = GzDecoder::new(reader);\n\n let mut raw_json = vec!();\n\n reader.read_to_end(&mut raw_json).unwrap();\n\n\n\n // println!(\"uncompress time {}\", start.elapsed().unwrap().as_millis());\n\n\n\n // let start = SystemTime::now();\n\n let data: TestData = serde_json::from_reader(raw_json.as_slice()).unwrap();\n\n // println!(\"JSON parse time {}\", start.elapsed().unwrap().as_millis());\n\n\n\n data\n\n}\n", "file_path": "crates/crdt-testdata/src/lib.rs", "rank": 61, "score": 189322.8397854229 }, { "content": "pub fn get_thread_num_allocations() -> usize {\n\n ALLOCATED.with(|s| {\n\n s.borrow().0\n\n })\n\n}\n\n\n", "file_path": "crates/diamond-core/src/alloc.rs", "rank": 62, "score": 188477.3609362723 }, { "content": "pub fn apply_edits(doc: &mut ListCRDT, txns: &Vec<TestTxn>) {\n\n let id = doc.get_or_create_agent_id(\"jeremy\");\n\n\n\n let mut positional: Vec<PositionalComponent> = Vec::with_capacity(3);\n\n let mut content = String::new();\n\n\n\n for (_i, txn) in txns.iter().enumerate() {\n\n for TestPatch(pos, del_span, ins_content) in &txn.patches {\n\n\n\n positional.clear();\n\n content.clear();\n\n\n\n if *del_span > 0 {\n\n positional.push(PositionalComponent {\n\n pos: *pos as u32,\n\n len: *del_span as u32,\n\n content_known: false,\n\n tag: InsDelTag::Del\n\n });\n\n }\n", "file_path": "crates/diamond-types/examples/stats.rs", "rank": 63, "score": 188199.290716511 }, { "content": "pub fn apply_edits(doc: &mut ListCRDT, txns: &Vec<TestTxn>) {\n\n let id = doc.get_or_create_agent_id(\"jeremy\");\n\n\n\n let mut positional: Vec<PositionalComponent> = Vec::with_capacity(3);\n\n let mut content = String::new();\n\n\n\n for (_i, txn) in txns.iter().enumerate() {\n\n for TestPatch(pos, del_span, ins_content) in &txn.patches {\n\n positional.clear();\n\n content.clear();\n\n\n\n if *del_span > 0 {\n\n positional.push(PositionalComponent {\n\n pos: *pos as u32,\n\n len: *del_span as u32,\n\n content_known: false,\n\n tag: InsDelTag::Del\n\n });\n\n }\n\n\n", "file_path": "crates/diamond-types/tests/realworld.rs", "rank": 64, "score": 188199.290716511 }, { "content": "pub fn apply_edits(doc: &mut ListCRDT, txns: &Vec<TestTxn>) {\n\n let id = doc.get_or_create_agent_id(\"jeremy\");\n\n\n\n let mut positional: Vec<PositionalComponent> = Vec::with_capacity(3);\n\n let mut content = String::new();\n\n\n\n for (_i, txn) in txns.iter().enumerate() {\n\n for TestPatch(pos, del_span, ins_content) in &txn.patches {\n\n positional.clear();\n\n content.clear();\n\n\n\n if *del_span > 0 {\n\n positional.push(PositionalComponent {\n\n pos: *pos as u32,\n\n len: *del_span as u32,\n\n content_known: false,\n\n tag: InsDelTag::Del\n\n });\n\n }\n\n\n", "file_path": "crates/diamond-types/benches/utils.rs", "rank": 65, "score": 188199.290716511 }, { "content": "pub fn decode_usize(buf: &[u8]) -> (usize, usize) {\n\n if size_of::<usize>() <= size_of::<u32>() {\n\n let (val, count) = decode_u32(buf);\n\n (val as usize, count)\n\n } else if size_of::<usize>() == size_of::<u64>() {\n\n let (val, count) = decode_u64(buf);\n\n (val as usize, count)\n\n } else {\n\n panic!(\"usize larger than u64 not supported\");\n\n }\n\n}\n\n\n\n// Who coded it better?\n\n// pub fn encode_zig_zag_32(n: i32) -> u32 {\n\n// ((n << 1) ^ (n >> 31)) as u32\n\n// }\n\n//\n\n// pub fn encode_zig_zag_64(n: i64) -> u64 {\n\n// ((n << 1) ^ (n >> 63)) as u64\n\n// }\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 66, "score": 188083.76792765263 }, { "content": "pub fn apply_edits(doc: &mut ListCRDT, txns: &Vec<TestTxn>) {\n\n let id = doc.get_or_create_agent_id(\"jeremy\");\n\n\n\n let mut positional: Vec<Operation> = Vec::with_capacity(3);\n\n // let mut content = String::new();\n\n\n\n for (_i, txn) in txns.iter().enumerate() {\n\n for TestPatch(pos, del_span, ins_content) in &txn.patches {\n\n positional.clear();\n\n // content.clear();\n\n\n\n if *del_span > 0 {\n\n positional.push(doc.branch.make_delete_op(*pos, *del_span));\n\n }\n\n\n\n if !ins_content.is_empty() {\n\n positional.push(Operation::new_insert(*pos, ins_content));\n\n }\n\n\n\n doc.apply_local_operation(id, positional.as_slice());\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/examples/posstats.rs", "rank": 67, "score": 186428.194811205 }, { "content": "#[allow(unused)]\n\npub fn get_thread_num_allocations() -> usize {\n\n ALLOCATED.with(|s| {\n\n s.borrow().0\n\n })\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/alloc.rs", "rank": 68, "score": 186415.9330164997 }, { "content": "fn push_chunk(into: &mut Vec<u8>, chunk_type: Chunk, data: &[u8]) {\n\n push_chunk_header(into, chunk_type, data.len());\n\n into.extend_from_slice(data);\n\n}\n\n\n\n// The 0 is a simple top level version identifier.\n\nconst MAGIC_BYTES_SMALL: [u8; 8] = *b\"DIAMONDz\";\n\n\n\n// I'm sure there's lots of simple structures like this - but I'm just going to have my own.\n", "file_path": "crates/diamond-types/src/list/encoding/mod.rs", "rank": 69, "score": 185578.17695635633 }, { "content": "pub fn apply_edits_local(doc: &mut ListCRDT, txns: &Vec<TestTxn>) {\n\n let id = doc.get_or_create_agent_id(\"jeremy\");\n\n\n\n let mut positional: Vec<Operation> = Vec::with_capacity(3);\n\n // let mut content = String::new();\n\n\n\n for (_i, txn) in txns.iter().enumerate() {\n\n for TestPatch(pos, del_span, ins_content) in &txn.patches {\n\n positional.clear();\n\n // content.clear();\n\n\n\n if *del_span > 0 {\n\n positional.push(Operation::new_delete(*pos, *del_span));\n\n }\n\n\n\n if !ins_content.is_empty() {\n\n positional.push(Operation::new_insert(*pos, ins_content));\n\n // content.push_str(ins_content.as_str());\n\n }\n\n\n\n doc.apply_local_operation(id, positional.as_slice());\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "crates/diamond-types-positional/benches/utils.rs", "rank": 70, "score": 184707.60030126892 }, { "content": "fn random_edits_once(verbose: bool, iterations: usize) {\n\n let mut rng = SmallRng::seed_from_u64(20);\n\n\n\n // So for this test we'll make a range tree and a list, make random changes to both, and make\n\n // sure the content is always the same.\n\n\n\n for _i in 0..iterations {\n\n if verbose || _i % 10000 == 0 { println!(\"i {}\", _i); }\n\n // TestRange is overkill for this, but eh.\n\n let mut tree = ContentTreeRaw::<TestRange, FullMetricsU32, DEFAULT_IE, DEFAULT_LE>::new();\n\n let mut list = vec![];\n\n let mut expected_len = 0;\n\n\n\n for _j in 0..200 {\n\n if verbose { println!(\" j {} / i {}\", _j, _i); }\n\n if list.is_empty() || rng.gen_bool(0.33) {\n\n // Insert something.\n\n let pos = rng.gen_range(0..=tree.len().0);\n\n let item = random_entry(&mut rng);\n\n\n", "file_path": "crates/content-tree/tests/fuzzer.rs", "rank": 71, "score": 184392.4836413939 }, { "content": "fn find_entry_idx(input: &SmallVec<[VisitEntry; 4]>, time: Time) -> Option<usize> {\n\n input.as_slice().binary_search_by(|e| {\n\n // Is this the right way around?\n\n e.span.partial_cmp_time(time).reverse()\n\n // e.span.partial_cmp_time(time)\n\n }).ok()\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/m2/txn_trace.rs", "rank": 72, "score": 184053.09250834887 }, { "content": "pub fn bytes_to_chars(s: &str, byte_pos: usize) -> usize {\n\n ropey::str_utils::byte_to_char_idx(s, byte_pos)\n\n}\n\n\n", "file_path": "crates/diamond-types/src/unicount.rs", "rank": 73, "score": 184005.81715743965 }, { "content": "pub fn chars_to_bytes(s: &str, char_pos: usize) -> usize {\n\n // For all that my implementation above is correct and tight, ropey's char_to_byte_idx is\n\n // already being pulled in anyway by ropey, and its faster. Just use that.\n\n ropey::str_utils::char_to_byte_idx(s, char_pos)\n\n}\n\n\n", "file_path": "crates/diamond-types/src/unicount.rs", "rank": 74, "score": 184005.81715743965 }, { "content": "fn push_chunk(into: &mut Vec<u8>, chunk_type: Chunk, data: &[u8]) {\n\n push_chunk_header(into, chunk_type, data.len());\n\n into.extend_from_slice(data);\n\n}\n\n\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/mod.rs", "rank": 75, "score": 183852.63976601744 }, { "content": "pub fn apply_edits_push_merge(doc: &mut ListCRDT, txns: &Vec<TestTxn>) {\n\n let id = doc.get_or_create_agent_id(\"jeremy\");\n\n\n\n let mut last_parent = doc.branch.frontier[0];\n\n\n\n for (_i, txn) in txns.iter().enumerate() {\n\n for TestPatch(pos, del_span, ins_content) in &txn.patches {\n\n // content.clear();\n\n\n\n if *del_span > 0 {\n\n last_parent = doc.ops.push_delete_at(id, &[last_parent], *pos, *del_span);\n\n }\n\n\n\n if !ins_content.is_empty() {\n\n last_parent = doc.ops.push_insert_at(id, &[last_parent], *pos, ins_content);\n\n }\n\n }\n\n }\n\n\n\n doc.branch.merge(&doc.ops, &[last_parent]);\n\n}", "file_path": "crates/diamond-types-positional/benches/utils.rs", "rank": 76, "score": 183035.37753845152 }, { "content": "pub fn chars_to_bytes(s: &str, char_pos: usize) -> usize {\n\n // For all that my implementation above is correct and tight, ropey's char_to_byte_idx is\n\n // already being pulled in anyway by ropey, and its faster. Just use that.\n\n ropey::str_utils::char_to_byte_idx(s, char_pos)\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/unicount.rs", "rank": 77, "score": 182202.20057383942 }, { "content": "#[inline]\n\n#[allow(unused)]\n\npub fn bytes_to_chars(s: &str, byte_pos: usize) -> usize {\n\n ropey::str_utils::byte_to_char_idx(s, byte_pos)\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/unicount.rs", "rank": 78, "score": 182202.20057383942 }, { "content": "// I'm sure there's much better ways to write this. But this is fine for now - its not a bottleneck.\n\n// Code adapted from here:\n\n// https://github.com/josephg/librope/blob/785a7c5ef6dc6ca05cb545264fbb22c96951af0d/rope.c#L193-L212\n\npub fn chars_to_bytes_smol(s: &str, char_pos: usize) -> usize {\n\n let bytes = s.as_bytes();\n\n let mut num_bytes = 0;\n\n\n\n for _i in 0..char_pos {\n\n assert!(num_bytes < bytes.len());\n\n num_bytes += codepoint_size(bytes[num_bytes]);\n\n }\n\n num_bytes\n\n}\n\n\n", "file_path": "crates/diamond-types/src/unicount.rs", "rank": 79, "score": 182202.20057383942 }, { "content": "/// This is an optimized version of simply pushing the operation to the oplog and then merging it.\n\n///\n\n/// It is much faster; but I hate the duplicated code.\n\npub fn apply_local_operation(oplog: &mut OpLog, branch: &mut Branch, agent: AgentId, local_ops: &[Operation]) -> Time {\n\n let first_time = oplog.len();\n\n let mut next_time = first_time;\n\n\n\n // for LocalOp { pos, ins_content, del_span } in local_ops {\n\n for c in local_ops {\n\n let pos = c.span.span.start;\n\n let len = c.len();\n\n\n\n match c.tag {\n\n Ins => {\n\n // assert!(c.);\n\n // let new_content = consume_chars(&mut content, len);\n\n branch.content.insert(pos, c.content.as_ref().unwrap());\n\n }\n\n\n\n Del => {\n\n branch.content.remove(pos..pos + len);\n\n }\n\n }\n", "file_path": "crates/diamond-types-positional/src/list/list.rs", "rank": 80, "score": 182137.28991025483 }, { "content": "fn random_entry(rng: &mut SmallRng) -> TestRange {\n\n TestRange {\n\n id: rng.gen_range(0..10),\n\n len: rng.gen_range(1..10),\n\n is_activated: rng.gen_bool(0.5)\n\n }\n\n}\n\n\n", "file_path": "crates/content-tree/tests/fuzzer.rs", "rank": 81, "score": 179008.92772619438 }, { "content": "pub trait FindContent<E: ContentTraits + ContentLength>: TreeMetrics<E> {\n\n fn index_to_content(offset: Self::Value) -> usize;\n\n}\n\n\n", "file_path": "crates/content-tree/src/metrics.rs", "rank": 82, "score": 178045.53141016053 }, { "content": "#[allow(unused)]\n\npub fn try_trim<V>(mut x: V, target_span: TimeSpan) -> Option<V>\n\n where V: RleKeyed + HasLength + SplitableSpan\n\n{\n\n let x_span = x.span();\n\n if x_span.start < target_span.start {\n\n if x_span.end <= target_span.start { return None; }\n\n x.truncate_keeping_right(target_span.start - x_span.start);\n\n }\n\n\n\n if x_span.end > target_span.end {\n\n if x_span.start >= target_span.end { return None; }\n\n x.truncate(target_span.end - x_span.start);\n\n }\n\n\n\n Some(x)\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/rle/mod.rs", "rank": 83, "score": 177246.0804142526 }, { "content": "pub fn load_nl_testing_data(filename: &str) -> NLDataset {\n\n let file = File::open(filename).unwrap();\n\n let reader = BufReader::new(file);\n\n\n\n // TODO: Add gzip compression.\n\n serde_json::from_reader(reader).unwrap()\n\n}\n\n\n\n// #[test]\n\n// fn foo() {\n\n// let d = load_nl_testing_data(\"/home/seph/src/crdt-benchmarks/xml/out/G1-3.json\");\n\n// dbg!(&d);\n\n// }", "file_path": "crates/crdt-testdata/src/nonlinear.rs", "rank": 84, "score": 176322.8660057897 }, { "content": "fn encoding_benchmarks(c: &mut Criterion) {\n\n for name in DATASETS {\n\n let mut group = c.benchmark_group(\"encoding\");\n\n let test_data = testing_data(name);\n\n let doc = list_with_data(&test_data);\n\n // let mut out = vec![];\n\n // doc.encode_small(&mut out, false).unwrap();\n\n // group.throughput(Throughput::Bytes(out.len() as _));\n\n group.throughput(Throughput::Bytes(doc.encode_small(false).len() as _));\n\n\n\n group.bench_function(BenchmarkId::new(\"encode_small\", name), |b| {\n\n b.iter(|| {\n\n // let mut out = vec![];\n\n // doc.encode_small(&mut out, false).unwrap();\n\n let encoding = doc.encode_small(false);\n\n assert!(encoding.len() > 1000);\n\n black_box(encoding);\n\n })\n\n });\n\n group.bench_function(BenchmarkId::new(\"encode_patches\", name), |b| {\n", "file_path": "crates/diamond-types/benches/core.rs", "rank": 85, "score": 174655.87263089418 }, { "content": "fn local_benchmarks(c: &mut Criterion) {\n\n for name in DATASETS {\n\n let mut group = c.benchmark_group(\"local\");\n\n let test_data = testing_data(name);\n\n group.throughput(Throughput::Elements(test_data.len() as u64));\n\n\n\n group.bench_function(BenchmarkId::new(\"yjs\", name), |b| {\n\n b.iter(|| {\n\n let doc = list_with_data(&test_data);\n\n assert_eq!(doc.len(), test_data.end_content.len());\n\n black_box(doc.len());\n\n })\n\n });\n\n\n\n group.finish();\n\n }\n\n\n\n // c.bench_function(\"kevin\", |b| {\n\n // b.iter(|| {\n\n // let mut doc = ListCRDT::new();\n", "file_path": "crates/diamond-types/benches/core.rs", "rank": 86, "score": 174655.87263089418 }, { "content": "fn ot_benchmarks(c: &mut Criterion) {\n\n for name in DATASETS {\n\n let mut group = c.benchmark_group(\"ot\");\n\n let test_data = testing_data(name);\n\n let doc = list_with_data(&test_data);\n\n group.throughput(Throughput::Elements(test_data.len() as u64));\n\n\n\n group.bench_function(BenchmarkId::new(\"traversal_since\", name), |b| {\n\n b.iter(|| {\n\n let changes = doc.traversal_changes_since(0);\n\n black_box(changes);\n\n })\n\n });\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types/benches/core.rs", "rank": 87, "score": 174655.87263089418 }, { "content": "fn remote_benchmarks(c: &mut Criterion) {\n\n for name in DATASETS {\n\n let mut group = c.benchmark_group(\"remote\");\n\n let test_data = testing_data(name);\n\n let src_doc = list_with_data(&test_data);\n\n\n\n group.throughput(Throughput::Elements(test_data.len() as u64));\n\n\n\n group.bench_function(BenchmarkId::new( \"generate\", name), |b| {\n\n b.iter(|| {\n\n let remote_edits: Vec<_> = src_doc.get_all_txns();\n\n black_box(remote_edits);\n\n })\n\n });\n\n\n\n let remote_edits: Vec<_> = src_doc.get_all_txns();\n\n group.bench_function(BenchmarkId::new( \"apply\", name), |b| {\n\n b.iter(|| {\n\n let mut doc = ListCRDT::new();\n\n for txn in remote_edits.iter() {\n", "file_path": "crates/diamond-types/benches/core.rs", "rank": 88, "score": 174655.87263089418 }, { "content": "struct Merger<S: MergableSpan, F: FnMut(S, &mut Ctx), Ctx = ()> {\n\n last: Option<S>,\n\n f: F,\n\n _ctx: PhantomData<Ctx> // This is awful.\n\n}\n\n\n\nimpl<S: MergableSpan, F: FnMut(S, &mut Ctx), Ctx> Merger<S, F, Ctx> {\n\n pub fn new(f: F) -> Self {\n\n Self { last: None, f, _ctx: PhantomData }\n\n }\n\n\n\n pub fn push2(&mut self, span: S, ctx: &mut Ctx) {\n\n if let Some(last) = self.last.as_mut() {\n\n if last.can_append(&span) {\n\n last.append(span);\n\n } else {\n\n let old = replace(last, span);\n\n (self.f)(old, ctx);\n\n }\n\n } else {\n", "file_path": "crates/diamond-types-positional/src/list/encoding/mod.rs", "rank": 89, "score": 174618.00368984113 }, { "content": "/// Write an operation to the passed writer.\n\nfn write_op(dest: &mut Vec<u8>, op: &OperationInternal, cursor: &mut usize) {\n\n // Note I'm relying on the operation log itself to be iter_merged, which simplifies things here\n\n // greatly.\n\n\n\n // This is a bit of a tradeoff. Sometimes when items get split, they retain their reversed tag.\n\n // We could store .reversed for all operations (including when length=1) and pick a reversed\n\n // flag here which minimizes the cursor deltas. But that approach results in more complexity and\n\n // worse filesize overall.\n\n // let reversed = !op.fwd && op.len > 1;\n\n let fwd = op.span.fwd || op.len() == 1;\n\n\n\n // let reversed = op.reversed;\n\n // if op.len == 1 { assert!(!op.reversed); }\n\n\n\n // let op_start = op.pos;\n\n let op_start = if op.tag == Del && !fwd {\n\n op.end()\n\n } else {\n\n op.start()\n\n };\n", "file_path": "crates/diamond-types-positional/src/list/encoding/encode_oplog.rs", "rank": 90, "score": 174563.40193951124 }, { "content": "/// Transform the positions in one traversal component by another. Produces the replacement\n\n/// traversal.\n\n///\n\n/// This operates on lists of TraversalComponents because the inserted content is unaffected.\n\npub fn transform(op: &[TraversalComponent], other: &[TraversalComponent], is_left: bool) -> SmallVec<[TraversalComponent; 2]> {\n\n // debug_assert!(op.is_valid() && other.is_valid());\n\n\n\n let mut result = SmallVec::<[TraversalComponent; 2]>::new();\n\n let mut iter = traversal_iter(op, Context::Pre);\n\n\n\n for c in other {\n\n match c {\n\n Retain(mut len) => { // Skip. Copy input to output.\n\n while len > 0 {\n\n let chunk = iter.next(len);\n\n len -= chunk.pre_len();\n\n result.push_rle(chunk);\n\n }\n\n },\n\n\n\n Del(mut len) => {\n\n while len > 0 {\n\n let chunk = iter.next(len);\n\n len -= chunk.pre_len();\n", "file_path": "crates/diamond-types/src/list/ot/ot.rs", "rank": 91, "score": 174254.49226722703 }, { "content": "// TODO: This is from rust-protobuf. Check this is actually faster than decode_u64_slow.\n\n/// Returns (varint, number of bytes read).\n\npub fn decode_u64(buf: &[u8]) -> (u64, usize) {\n\n if buf.is_empty() {\n\n panic!(\"Not enough bytes in buffer\");\n\n } else if buf[0] < 0x80 {\n\n // The most common case\n\n (buf[0] as u64, 1)\n\n } else if buf.len() >= 2 && buf[1] < 0x80 {\n\n // Handle the case of two bytes too\n\n (\n\n (buf[0] & 0x7f) as u64 | (buf[1] as u64) << 7,\n\n 2\n\n )\n\n } else if buf.len() >= 10 {\n\n // Read from array when buf at at least 10 bytes, which is the max len for varint.\n\n let mut r: u64 = 0;\n\n let mut i: usize = 0;\n\n // The i < buf.len() clause gets optimized out, but it gets the optimizer to remove bounds\n\n // checks on buf[i].\n\n while i < buf.len() && i < 10 {\n\n let b = buf[i];\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 92, "score": 173466.0925799152 }, { "content": "pub fn decode_u32(buf: &[u8]) -> (u32, usize) {\n\n let (val, bytes_consumed) = decode_u64(buf);\n\n assert!(val < u32::MAX as u64, \"varint is not a u32\");\n\n debug_assert!(bytes_consumed <= 5);\n\n (val as u32, bytes_consumed)\n\n}\n\n\n\n// Who coded it better?\n\n// pub fn encode_zig_zag_32(n: i32) -> u32 {\n\n// ((n << 1) ^ (n >> 31)) as u32\n\n// }\n\n//\n\n// pub fn encode_zig_zag_64(n: i64) -> u64 {\n\n// ((n << 1) ^ (n >> 63)) as u64\n\n// }\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 93, "score": 173461.04916138825 }, { "content": "fn encoding_benchmarks(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"encoding\");\n\n let bytes = std::fs::read(\"node_nodecc.dt\").unwrap();\n\n let oplog = OpLog::load_from(&bytes).unwrap();\n\n // group.throughput(Throughput::Bytes(bytes.len() as _));\n\n group.throughput(Throughput::Elements(oplog.len() as _));\n\n\n\n group.bench_function(\"decode_nodecc\", |b| {\n\n b.iter(|| {\n\n let oplog = OpLog::load_from(&bytes).unwrap();\n\n black_box(oplog);\n\n });\n\n });\n\n\n\n group.bench_function(\"encode_nodecc\", |b| {\n\n b.iter(|| {\n\n let bytes = oplog.encode(EncodeOptions {\n\n store_inserted_content: false,\n\n store_deleted_content: false,\n\n verbose: false\n", "file_path": "crates/diamond-types-positional/benches/core.rs", "rank": 94, "score": 173193.09135842574 }, { "content": "fn local_benchmarks(c: &mut Criterion) {\n\n for name in DATASETS {\n\n let mut group = c.benchmark_group(\"local\");\n\n let test_data = testing_data(name);\n\n assert_eq!(test_data.start_content.len(), 0);\n\n\n\n group.throughput(Throughput::Elements(test_data.len() as u64));\n\n\n\n group.bench_function(BenchmarkId::new(\"apply_local\", name), |b| {\n\n b.iter(|| {\n\n let mut doc = ListCRDT::new();\n\n apply_edits_local(&mut doc, &test_data.txns);\n\n assert_eq!(doc.len(), test_data.end_content.len());\n\n black_box(doc.len());\n\n })\n\n });\n\n\n\n group.bench_function(BenchmarkId::new(\"apply_push\", name), |b| {\n\n b.iter(|| {\n\n let mut doc = ListCRDT::new();\n\n apply_edits_push_merge(&mut doc, &test_data.txns);\n\n // assert_eq!(doc.len(), test_data.end_content.len());\n\n black_box(doc.len());\n\n })\n\n });\n\n\n\n group.finish();\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/benches/core.rs", "rank": 95, "score": 173193.09135842574 }, { "content": "fn random_str(len: usize, rng: &mut SmallRng) -> String {\n\n let mut str = String::new();\n\n let alphabet: Vec<char> = \"abcdefghijklmnop \".chars().collect();\n\n for _ in 0..len {\n\n str.push(alphabet[rng.gen_range(0..alphabet.len())]);\n\n }\n\n str\n\n}\n\n\n", "file_path": "crates/diamond-types/examples/simple.rs", "rank": 96, "score": 171746.9858213216 }, { "content": "// TODO: Make this return a Result<> of some sort.\n\n/// Returns (varint, number of bytes read).\n\npub fn decode_u64_slow(buf: &[u8]) -> (u64, usize) {\n\n let mut r: u64 = 0;\n\n let mut i = 0;\n\n loop {\n\n if i == 10 {\n\n panic!(\"Invalid varint\");\n\n }\n\n let b = buf[i];\n\n if i == 9 && (b & 0x7f) > 1 {\n\n panic!(\"Invalid varint\");\n\n }\n\n r |= ((b & 0x7f) as u64) << (i * 7);\n\n i += 1;\n\n if b < 0x80 {\n\n return (r, i)\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/diamond-types/src/list/encoding/varint.rs", "rank": 97, "score": 171587.0514443906 }, { "content": "// TODO: This is from rust-protobuf. Check this is actually faster than decode_u64_slow.\n\n/// Returns (varint, number of bytes read).\n\npub fn decode_u64(buf: &[u8]) -> (u64, usize) {\n\n if buf.is_empty() {\n\n panic!(\"Not enough bytes in buffer\");\n\n } else if buf[0] < 0x80 {\n\n // The most common case\n\n (buf[0] as u64, 1)\n\n } else if buf.len() >= 2 && buf[1] < 0x80 {\n\n // Handle the case of two bytes too\n\n (\n\n (buf[0] & 0x7f) as u64 | (buf[1] as u64) << 7,\n\n 2\n\n )\n\n } else if buf.len() >= 10 {\n\n // Read from array when buf at at least 10 bytes, which is the max len for varint.\n\n let mut r: u64 = 0;\n\n let mut i: usize = 0;\n\n // The i < buf.len() clause gets optimized out, but it gets the optimizer to remove bounds\n\n // checks on buf[i].\n\n while i < buf.len() && i < 10 {\n\n let b = buf[i];\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 98, "score": 171586.09863383957 }, { "content": "pub fn decode_u32(buf: &[u8]) -> (u32, usize) {\n\n let (val, bytes_consumed) = decode_u64(buf);\n\n assert!(val < u32::MAX as u64, \"varint is not a u32\");\n\n debug_assert!(bytes_consumed <= 5);\n\n (val as u32, bytes_consumed)\n\n}\n\n\n", "file_path": "crates/diamond-types-positional/src/list/encoding/varint.rs", "rank": 99, "score": 171581.05521531263 } ]
Rust
mythril/src/multiboot2.rs
barkera/mythril
b7b45fe08a05c5b66317bbf13fdd4fcf10c428d2
use crate::acpi; use crate::boot_info::{self, BootInfo}; use crate::global_alloc; use crate::memory::HostPhysAddr; use alloc::vec::Vec; extern "C" { pub static MULTIBOOT2_HEADER_START: u32; pub static MULTIBOOT2_HEADER_END: u32; } pub fn header_location() -> (u32, u32) { unsafe { (MULTIBOOT2_HEADER_START, MULTIBOOT2_HEADER_END) } } fn setup_global_alloc_region(info: &multiboot2::BootInformation) -> (u64, u64) { let mem_tag = info .memory_map_tag() .expect("Missing multiboot memory map tag"); let available = mem_tag .memory_areas() .map(|area| (area.start_address(), area.end_address())); debug!("Modules:"); let modules = info.module_tags().map(|module| { debug!( " 0x{:x}-0x{:x}", module.start_address(), module.end_address() ); (module.start_address() as u64, module.end_address() as u64) }); let sections_tag = info .elf_sections_tag() .expect("Missing multiboot elf sections tag"); debug!("Elf sections:"); let sections = sections_tag.sections().map(|section| { debug!( " 0x{:x}-0x{:x}", section.start_address(), section.end_address() ); (section.start_address(), section.end_address()) }); let multiboot_info = [(info.start_address() as u64, info.end_address() as u64)]; debug!( "Multiboot Info: 0x{:x}-0x{:x}", info.start_address(), info.end_address() ); let excluded = modules .chain(sections) .chain(multiboot_info.iter().copied()); let max_excluded = excluded .max_by(|left, right| left.1.cmp(&right.1)) .expect("No max excluded region"); let largest_region = available .max_by(|left, right| (left.1 - left.0).cmp(&(right.1 - right.0))) .expect("No largest region"); if largest_region.0 > max_excluded.1 { largest_region } else if max_excluded.1 > largest_region.0 && max_excluded.1 < largest_region.1 { (max_excluded.1, largest_region.1) } else { panic!("Unable to find suitable global alloc region") } } pub fn early_init_multiboot2(addr: HostPhysAddr) -> BootInfo { let multiboot_info = unsafe { multiboot2::load(addr.as_u64() as usize) }; let alloc_region = setup_global_alloc_region(&multiboot_info); info!( "Allocating from 0x{:x}-{:x}", alloc_region.0, alloc_region.1 ); unsafe { global_alloc::Allocator::allocate_from(alloc_region.0, alloc_region.1); } let modules = multiboot_info .module_tags() .map(|tag| boot_info::BootModule { address: HostPhysAddr::new(tag.start_address() as u64), size: (tag.end_address() - tag.start_address()) as usize, identifier: Some(tag.name().into()), }) .collect::<Vec<_>>(); let rsdp = multiboot_info .rsdp_v2_tag() .filter(|tag| tag.checksum_is_valid()) .map(|rsdp_v2| acpi::rsdp::RSDP::V2 { xsdt_addr: rsdp_v2.xsdt_address() as u64, oemid: { let mut oemid = [0u8; 6]; if let Some(id) = rsdp_v2.oem_id() { oemid.copy_from_slice(id.as_bytes()); } oemid }, }) .or_else(|| { multiboot_info .rsdp_v1_tag() .filter(|tag| tag.checksum_is_valid()) .map(|rsdp_v1| acpi::rsdp::RSDP::V1 { rsdt_addr: rsdp_v1.rsdt_address() as u32, oemid: { let mut oemid = [0u8; 6]; if let Some(id) = rsdp_v1.oem_id() { oemid.copy_from_slice(id.as_bytes()); } oemid }, }) }); BootInfo { modules: modules, rsdp: rsdp, } }
use crate::acpi; use crate::boot_info::{self, BootInfo}; use crate::global_alloc; use crate::memory::HostPhysAddr; use alloc::vec::Vec; extern "C" { pub static MULTIBOOT2_HEADER_START: u32; pub static MULTIBOOT2_HEADER_END: u32; } pub fn header_location() -> (u32, u32) { unsafe { (MULTIBOOT2_HEADER_START, MULTIBOOT2_HEADER_END) } } fn setup_global_alloc_region(info: &multiboot2::BootInformation) -> (u64, u64) { let mem_tag = info .memory_map_tag() .expect("Missing multiboot memory map tag"); let available = mem_tag .memory_areas() .map(|area| (area.start_address(), area.end_address())); debug!("Modules:"); let modules = info.module_tags().map(|module| { debug!( " 0x{:x}-0x{:x}", module.start_address(), module.end_address() ); (module.start_address() as u64, module.end_address() as u64) });
pub fn early_init_multiboot2(addr: HostPhysAddr) -> BootInfo { let multiboot_info = unsafe { multiboot2::load(addr.as_u64() as usize) }; let alloc_region = setup_global_alloc_region(&multiboot_info); info!( "Allocating from 0x{:x}-{:x}", alloc_region.0, alloc_region.1 ); unsafe { global_alloc::Allocator::allocate_from(alloc_region.0, alloc_region.1); } let modules = multiboot_info .module_tags() .map(|tag| boot_info::BootModule { address: HostPhysAddr::new(tag.start_address() as u64), size: (tag.end_address() - tag.start_address()) as usize, identifier: Some(tag.name().into()), }) .collect::<Vec<_>>(); let rsdp = multiboot_info .rsdp_v2_tag() .filter(|tag| tag.checksum_is_valid()) .map(|rsdp_v2| acpi::rsdp::RSDP::V2 { xsdt_addr: rsdp_v2.xsdt_address() as u64, oemid: { let mut oemid = [0u8; 6]; if let Some(id) = rsdp_v2.oem_id() { oemid.copy_from_slice(id.as_bytes()); } oemid }, }) .or_else(|| { multiboot_info .rsdp_v1_tag() .filter(|tag| tag.checksum_is_valid()) .map(|rsdp_v1| acpi::rsdp::RSDP::V1 { rsdt_addr: rsdp_v1.rsdt_address() as u32, oemid: { let mut oemid = [0u8; 6]; if let Some(id) = rsdp_v1.oem_id() { oemid.copy_from_slice(id.as_bytes()); } oemid }, }) }); BootInfo { modules: modules, rsdp: rsdp, } }
let sections_tag = info .elf_sections_tag() .expect("Missing multiboot elf sections tag"); debug!("Elf sections:"); let sections = sections_tag.sections().map(|section| { debug!( " 0x{:x}-0x{:x}", section.start_address(), section.end_address() ); (section.start_address(), section.end_address()) }); let multiboot_info = [(info.start_address() as u64, info.end_address() as u64)]; debug!( "Multiboot Info: 0x{:x}-0x{:x}", info.start_address(), info.end_address() ); let excluded = modules .chain(sections) .chain(multiboot_info.iter().copied()); let max_excluded = excluded .max_by(|left, right| left.1.cmp(&right.1)) .expect("No max excluded region"); let largest_region = available .max_by(|left, right| (left.1 - left.0).cmp(&(right.1 - right.0))) .expect("No largest region"); if largest_region.0 > max_excluded.1 { largest_region } else if max_excluded.1 > largest_region.0 && max_excluded.1 < largest_region.1 { (max_excluded.1, largest_region.1) } else { panic!("Unable to find suitable global alloc region") } }
function_block-function_prefix_line
[ { "content": "/// Get this current core's sequential index\n\npub fn read_core_idx() -> u64 {\n\n unsafe {\n\n let value: u64;\n\n llvm_asm!(\"mov [%fs], %rax\"\n\n : \"={rax}\"(value)\n\n ::: \"volatile\");\n\n value >> 3 // Shift away the RPL and TI bits (they will always be 0)\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub unsafe fn get_pre_core_impl<T>(t: &T) -> &T {\n\n core::mem::transmute(per_core_address(\n\n t as *const T as *const u8,\n\n read_core_idx() as usize,\n\n ))\n\n}\n\n\n\n#[doc(hidden)]\n\npub unsafe fn get_pre_core_mut_impl<T>(t: &mut T) -> &mut T {\n", "file_path": "mythril/src/percore.rs", "rank": 1, "score": 140458.50762575096 }, { "content": "/// Obtain a reference to the current core's LocalApic\n\npub fn get_local_apic() -> &'static LocalApic {\n\n get_per_core!(LOCAL_APIC)\n\n .as_ref()\n\n .expect(\"Attempt to get local APIC before initialization\")\n\n}\n\n\n\n/// Obtain a mutable reference to the current core's LocalApic\n\n///\n\n/// The caller must ensure that calling this function does not\n\n/// cause soundness violations such as holding two mutable\n\n/// references or a mutable and immutable reference.\n\npub unsafe fn get_local_apic_mut() -> &'static mut LocalApic {\n\n get_per_core_mut!(LOCAL_APIC)\n\n .as_mut()\n\n .expect(\"Attempt to get local APIC before initialization\")\n\n}\n\n\n\n/// Structure defining the interface for a local x2APIC\n\n#[derive(Debug)]\n\npub struct LocalApic {\n", "file_path": "mythril/src/apic.rs", "rank": 3, "score": 129224.5076841678 }, { "content": "/// Get a reference to the current core's TimerWheel\n\npub fn get_timer_wheel() -> &'static TimerWheel {\n\n get_per_core!(TIMER_WHEEL)\n\n .as_ref()\n\n .expect(\"TimerWheel has not been initialized\")\n\n}\n\n\n\n/// Get a mutable reference to the current core's TimerWheel\n\npub unsafe fn get_timer_wheel_mut() -> &'static mut TimerWheel {\n\n get_per_core_mut!(TIMER_WHEEL)\n\n .as_mut()\n\n .expect(\"TimerWheel has not been initialized\")\n\n}\n\n\n\n/// Timer identifier that may be used to cancel a running timer\n\n#[derive(Eq, PartialEq, PartialOrd, Ord, Clone, Debug)]\n\npub struct TimerId(u64);\n\n\n\n// TimerId can only be used with the wheel that created the timer. To capture\n\n// this, don't allow the ids to be sent betweeen cores\n\nimpl !Send for TimerId {}\n", "file_path": "mythril/src/time.rs", "rank": 4, "score": 129224.5076841678 }, { "content": "fn map_guest_memory(\n\n guest_ept_base: &mut EptPml4Table,\n\n guest_addr: GuestPhysAddr,\n\n host_frame: HostPhysFrame,\n\n readonly: bool,\n\n) -> Result<()> {\n\n let default_flags = EptTableFlags::READ_ACCESS\n\n | EptTableFlags::WRITE_ACCESS\n\n | EptTableFlags::PRIV_EXEC_ACCESS\n\n | EptTableFlags::USERMODE_EXEC_ACCESS;\n\n\n\n let ept_pml4e = &mut guest_ept_base[guest_addr.p4_index()];\n\n if ept_pml4e.is_unused() {\n\n let ept_pdpt_frame =\n\n Box::into_raw(Box::new(EptPageDirectoryPointerTable::default()));\n\n let ept_pdpt_addr = HostPhysAddr::new(ept_pdpt_frame as u64);\n\n ept_pml4e.set_addr(ept_pdpt_addr, default_flags);\n\n }\n\n\n\n let ept_pdpt =\n", "file_path": "mythril/src/memory.rs", "rank": 5, "score": 125743.78529019978 }, { "content": "#[inline]\n\nfn pt_index(addr: u64) -> ux::u9 {\n\n ux::u9::new(((addr >> 12usize) & 0b111111111) as u16)\n\n}\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 7, "score": 112178.42298417479 }, { "content": "#[inline]\n\nfn page_offset(addr: u64) -> ux::u12 {\n\n ux::u12::new((addr & 0b111111111111) as u16)\n\n}\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 8, "score": 112178.42298417479 }, { "content": "#[inline]\n\nfn pml4_index(addr: u64) -> ux::u9 {\n\n ux::u9::new(((addr >> 39usize) & 0b111111111) as u16)\n\n}\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 9, "score": 112178.42298417479 }, { "content": "#[inline]\n\nfn pdpt_index(addr: u64) -> ux::u9 {\n\n ux::u9::new(((addr >> 30usize) & 0b111111111) as u16)\n\n}\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 10, "score": 112178.42298417479 }, { "content": "#[inline]\n\nfn pd_index(addr: u64) -> ux::u9 {\n\n ux::u9::new(((addr >> 21usize) & 0b111111111) as u16)\n\n}\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 11, "score": 112178.42298417479 }, { "content": "pub fn check_vm_insruction(rflags: u64, error: String) -> Result<()> {\n\n let rflags = rflags::RFlags::from_bits_truncate(rflags);\n\n\n\n if rflags.contains(RFlags::FLAGS_CF) {\n\n Err(Error::VmFailInvalid(error))\n\n } else if rflags.contains(RFlags::FLAGS_ZF) {\n\n let errno = unsafe {\n\n let value: u64;\n\n llvm_asm!(\"vmread %rax, %rdx;\"\n\n : \"={rdx}\"(value)\n\n : \"{rax}\"(vmcs::VmcsField::VmInstructionError as u64)\n\n : \"rflags\"\n\n : \"volatile\");\n\n value\n\n };\n\n let vm_error = VmInstructionError::try_from(errno)\n\n .unwrap_or(VmInstructionError::UnknownError);\n\n\n\n Err(Error::VmFailValid((vm_error, error)))\n\n } else {\n", "file_path": "mythril/src/error.rs", "rank": 12, "score": 111932.1445352927 }, { "content": "#[inline]\n\nfn huge_page_offset(addr: u64) -> ux::u30 {\n\n ux::u30::new((addr & 0x3fffffff) as u32)\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum GuestVirtAddr {\n\n NoPaging(GuestPhysAddr),\n\n Paging4Level(Guest4LevelPagingAddr),\n\n //TODO: 5 level paging\n\n}\n\n\n\nimpl GuestVirtAddr {\n\n // Convert a 64 bit number to a virtual address in the context of the current\n\n // guest configuration (as read from a VMCS)\n\n pub fn new(val: u64, vmcs: &vmcs::ActiveVmcs) -> Result<Self> {\n\n let cr0 = Cr0::from_bits_truncate(\n\n vmcs.read_field(vmcs::VmcsField::GuestCr0)? as usize,\n\n );\n\n if cr0.contains(Cr0::CR0_ENABLE_PAGING) {\n\n Ok(GuestVirtAddr::Paging4Level(Guest4LevelPagingAddr::new(val)))\n", "file_path": "mythril/src/memory.rs", "rank": 13, "score": 109111.90544715458 }, { "content": "#[inline]\n\nfn large_page_offset(addr: u64) -> ux::u21 {\n\n ux::u21::new((addr & 0x1fffff) as u32)\n\n}\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 14, "score": 109111.90544715457 }, { "content": "fn frequency() -> u64 {\n\n unsafe {\n\n TIME_SRC\n\n .as_ref()\n\n .expect(\"Global time source is not calibrated\")\n\n .frequency()\n\n }\n\n}\n\n\n\n/// A point in time on the system in terms of the global system `TimeSource`\n\n///\n\n/// An `Instant` can be added/subtracted with a `Duration` to produce an\n\n/// `Instant` in the future or past. However, this requires that the global\n\n/// system time source be initialized, otherwise it will panic.\n\n#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug)]\n\npub struct Instant(pub u64);\n\n\n\nimpl Add<Duration> for Instant {\n\n type Output = Self;\n\n\n", "file_path": "mythril/src/time.rs", "rank": 15, "score": 105583.71314585982 }, { "content": "pub fn load_linux(\n\n kernel_name: impl AsRef<str>,\n\n initramfs_name: impl AsRef<str>,\n\n cmdline: &[u8],\n\n memory: u64,\n\n builder: &mut QemuFwCfgBuilder,\n\n info: &BootInfo,\n\n) -> Result<()> {\n\n let mut kernel = info\n\n .find_module(kernel_name.as_ref())\n\n .ok_or_else(|| {\n\n Error::InvalidValue(format!(\n\n \"No such kernel '{}'\",\n\n kernel_name.as_ref()\n\n ))\n\n })?\n\n .data()\n\n .to_vec();\n\n let initramfs = info\n\n .find_module(initramfs_name.as_ref())\n", "file_path": "mythril/src/linux.rs", "rank": 16, "score": 101899.33660105558 }, { "content": "/// The post-startup point where a core begins executing its statically\n\n/// assigned VCPU. Past this point, there is no distinction between BSP\n\n/// and AP.\n\npub fn mp_entry_point() -> ! {\n\n unsafe {\n\n time::init_timer_wheel()\n\n .expect(\"Failed to initialize per-core timer wheel\");\n\n }\n\n\n\n let vm = unsafe {\n\n vm::VM_MAP\n\n .as_ref()\n\n .unwrap()\n\n .get(&apic::get_local_apic().id())\n\n .expect(\"Failed to find VM for core\")\n\n .clone()\n\n };\n\n let vcpu = VCpu::new(vm.clone()).expect(\"Failed to create vcpu\");\n\n vcpu.launch().expect(\"Failed to launch vm\")\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\n#[repr(u8)]\n", "file_path": "mythril/src/vcpu.rs", "rank": 17, "score": 99306.29721250685 }, { "content": "/// Set a one shot timer on this core\n\npub fn set_oneshot_timer(\n\n duration: core::time::Duration,\n\n vector: u8,\n\n) -> TimerId {\n\n let wheel = unsafe { get_timer_wheel_mut() };\n\n let timer = ReadyTimer::one_shot(duration, vector);\n\n wheel.register_timer(timer)\n\n}\n\n\n", "file_path": "mythril/src/time.rs", "rank": 18, "score": 99302.3939117482 }, { "content": "pub fn emulate_portio(\n\n vcpu: &mut vcpu::VCpu,\n\n guest_cpu: &mut vmexit::GuestCpuState,\n\n exit: vmexit::IoInstructionInformation,\n\n) -> Result<()> {\n\n let (port, input, size, string) =\n\n (exit.port, exit.input, exit.size, exit.string);\n\n\n\n if !string {\n\n let mut vm = vcpu.vm.write();\n\n\n\n if !input {\n\n let arr = (guest_cpu.rax as u32).to_be_bytes();\n\n vm.on_port_write(\n\n vcpu,\n\n port,\n\n PortWriteRequest::try_from(&arr[4 - size as usize..])?,\n\n )?;\n\n } else {\n\n let mut arr = [0u8; 4];\n", "file_path": "mythril/src/emulate/portio.rs", "rank": 19, "score": 99302.3939117482 }, { "content": "pub fn raw_write_vga(\n\n s: impl AsRef<str>,\n\n mut col: usize,\n\n mut row: usize,\n\n vga_mem: &mut [[u16; VGA_WIDTH]; VGA_HEIGHT],\n\n) -> (usize, usize) {\n\n for byte in s.as_ref().bytes() {\n\n // move cursor on newlines (0x0A) and carriage-returns (0x0D)\n\n if byte == 0x0A {\n\n row += 1;\n\n col = 0;\n\n continue;\n\n } else if byte == 0x0D {\n\n col = 0;\n\n continue;\n\n }\n\n\n\n if row >= VGA_HEIGHT {\n\n scroll_vga(vga_mem);\n\n row = VGA_HEIGHT - 1;\n", "file_path": "mythril/src/logger.rs", "rank": 20, "score": 99302.3939117482 }, { "content": "/// Set a periodic timer on this core\n\npub fn set_periodic_timer(\n\n interval: core::time::Duration,\n\n vector: u8,\n\n) -> TimerId {\n\n let wheel = unsafe { get_timer_wheel_mut() };\n\n let timer = ReadyTimer::periodic(interval, vector);\n\n wheel.register_timer(timer)\n\n}\n", "file_path": "mythril/src/time.rs", "rank": 21, "score": 99302.3939117482 }, { "content": "pub fn emulate_cpuid(\n\n _vcpu: &mut vcpu::VCpu,\n\n guest_cpu: &mut vmexit::GuestCpuState,\n\n) -> Result<()> {\n\n //FIXME: for now just use the actual cpuid\n\n let mut res = raw_cpuid::native_cpuid::cpuid_count(\n\n guest_cpu.rax as u32,\n\n guest_cpu.rcx as u32,\n\n );\n\n\n\n if guest_cpu.rax as u32 == 1 {\n\n // Disable MTRR\n\n res.edx &= !(1 << 12);\n\n\n\n // Disable XSAVE\n\n res.ecx &= !(1 << 26);\n\n\n\n // Hide hypervisor feature\n\n res.ecx &= !(1 << 31);\n\n }\n\n\n\n guest_cpu.rax = res.eax as u64 | (guest_cpu.rax & 0xffffffff00000000);\n\n guest_cpu.rbx = res.ebx as u64 | (guest_cpu.rbx & 0xffffffff00000000);\n\n guest_cpu.rcx = res.ecx as u64 | (guest_cpu.rcx & 0xffffffff00000000);\n\n guest_cpu.rdx = res.edx as u64 | (guest_cpu.rdx & 0xffffffff00000000);\n\n Ok(())\n\n}\n", "file_path": "mythril/src/emulate/cpuid.rs", "rank": 22, "score": 99302.3939117482 }, { "content": "/// Get the current instant from the global system `TimeSource`.\n\npub fn now() -> Instant {\n\n unsafe {\n\n TIME_SRC\n\n .as_ref()\n\n .expect(\"Global time source is not calibrated\")\n\n .now()\n\n }\n\n}\n\n\n", "file_path": "mythril/src/time.rs", "rank": 23, "score": 97513.26260939751 }, { "content": "pub fn handle_ept_violation(\n\n vcpu: &mut vcpu::VCpu,\n\n guest_cpu: &mut vmexit::GuestCpuState,\n\n _exit: vmexit::EptInformation,\n\n) -> Result<()> {\n\n let instruction_len = vcpu\n\n .vmcs\n\n .read_field(vmcs::VmcsField::VmExitInstructionLen)?;\n\n let ip = vcpu.vmcs.read_field(vmcs::VmcsField::GuestRip)?;\n\n\n\n let mut vm = vcpu.vm.write();\n\n let ip_addr = memory::GuestVirtAddr::new(ip, &vcpu.vmcs)?;\n\n let view = memory::GuestAddressSpaceViewMut::from_vmcs(\n\n &vcpu.vmcs,\n\n &mut vm.guest_space,\n\n )?;\n\n\n\n let bytes = view.read_bytes(\n\n ip_addr,\n\n instruction_len as usize,\n", "file_path": "mythril/src/emulate/memio.rs", "rank": 24, "score": 96912.5253547259 }, { "content": "#[panic_handler]\n\n#[cfg(not(test))]\n\nfn panic_handler(info: &core::panic::PanicInfo) -> ! {\n\n if let Some(location) = info.location() {\n\n error!(\n\n \"Panic in {} at ({}, {}):\",\n\n location.file(),\n\n location.line(),\n\n location.column()\n\n );\n\n if let Some(message) = info.message() {\n\n error!(\"{}\", message);\n\n }\n\n }\n\n\n\n loop {\n\n unsafe {\n\n // Try to at least keep CPU from running at 100%\n\n llvm_asm!(\"hlt\" :::: \"volatile\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "mythril/src/error.rs", "rank": 25, "score": 93774.56202396471 }, { "content": "/// Returns whether the global system `TimeSource` has be initialized.\n\npub fn is_global_time_ready() -> bool {\n\n unsafe { TIME_SRC.is_some() }\n\n}\n\n\n", "file_path": "mythril/src/time.rs", "rank": 26, "score": 92526.45136306784 }, { "content": "/// Get the instant the system was started (approximately) in terms\n\n/// of the global system `TimeSource`.\n\npub fn system_start_time() -> Instant {\n\n unsafe { START_TIME.expect(\"Global time source is not started\") }\n\n}\n\n\n", "file_path": "mythril/src/time.rs", "rank": 27, "score": 92526.45136306784 }, { "content": "fn vmcs_read(field: VmcsField) -> Result<u64> {\n\n let value = unsafe {\n\n let value: u64;\n\n llvm_asm!(\"vmreadq %rdx, %rax\"\n\n : \"={rax}\"(value)\n\n : \"{rdx}\"(field as u64)\n\n : \"rflags\"\n\n : \"volatile\");\n\n value\n\n };\n\n\n\n Ok(value)\n\n}\n\n\n", "file_path": "mythril/src/vmcs.rs", "rank": 28, "score": 82532.18310207274 }, { "content": "/// Cancel a timer set on this core\n\npub fn cancel_timer(id: &TimerId) -> Result<()> {\n\n let wheel = unsafe { get_timer_wheel_mut() };\n\n wheel.remove_timer(id);\n\n Ok(())\n\n}\n\n\n", "file_path": "mythril/src/time.rs", "rank": 29, "score": 81902.18298958094 }, { "content": "pub fn write_console(s: impl AsRef<str>) {\n\n let lock = LOG_LOCK.lock();\n\n unsafe { raw_write_console(s) };\n\n drop(lock)\n\n}\n\n\n\n// NOTE: the caller should hold `LOG_LOCK`\n\npub unsafe fn raw_write_console(s: impl AsRef<str>) {\n\n // mirror console output to VGA\n\n VGA_WRITER.write(s.as_ref());\n\n\n\n //FIXME: what about addresses above 4GB?\n\n let len = s.as_ref().len();\n\n let ptr = s.as_ref().as_ptr();\n\n\n\n llvm_asm!(\"cld; rep outsb\"\n\n :\n\n :\"{rdx}\"(0x3f8), \"{rcx}\"(len as u64), \"{rsi}\"(ptr as u64)\n\n : \"rflags\", \"rsi\"\n\n : \"volatile\");\n", "file_path": "mythril/src/logger.rs", "rank": 30, "score": 81557.34192307801 }, { "content": "/// Delay current core for the provided `duration`\n\npub fn busy_wait(duration: core::time::Duration) {\n\n let start = now();\n\n while now() < start + duration {\n\n unsafe {\n\n // Relax the cpu\n\n llvm_asm!(\"rep; nop\" ::: \"memory\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "mythril/src/time.rs", "rank": 31, "score": 79513.68601612993 }, { "content": "fn vmcs_write(field: VmcsField, value: u64) -> Result<()> {\n\n let rflags = unsafe {\n\n let rflags: u64;\n\n llvm_asm!(\"vmwrite %rdx, %rax; pushfq; popq $0\"\n\n : \"=r\"(rflags)\n\n : \"{rdx}\"(value), \"{rax}\"(field as u64)\n\n : \"rflags\"\n\n : \"volatile\");\n\n rflags\n\n };\n\n\n\n error::check_vm_insruction(\n\n rflags,\n\n format!(\"Failed to write 0x{:x} to field {:?}\", value, field),\n\n )\n\n}\n\n\n", "file_path": "mythril/src/vmcs.rs", "rank": 32, "score": 78197.69632905346 }, { "content": "#[alloc_error_handler]\n\n#[cfg(not(test))]\n\nfn out_of_memory(layout: ::core::alloc::Layout) -> ! {\n\n panic!(\n\n \"Ran out of free memory while trying to allocate {:#?}\",\n\n layout\n\n );\n\n}\n", "file_path": "mythril/src/error.rs", "rank": 33, "score": 71009.84354967081 }, { "content": "pub fn clear_vga(vga_mem: &mut [[u16; VGA_WIDTH]; VGA_HEIGHT]) {\n\n for row in 0..VGA_HEIGHT {\n\n clear_line_vga(row, vga_mem);\n\n }\n\n}\n\n\n", "file_path": "mythril/src/logger.rs", "rank": 34, "score": 70633.77134803926 }, { "content": "fn main() {\n\n // We must _not_ do this build under the test setup, because that\n\n // will produce a `_start` symbol that will conflict with the one\n\n // provided by the unittest harness.\n\n if cfg!(feature = \"test\") {\n\n return;\n\n }\n\n\n\n let mut build = nasm_rs::Build::new();\n\n build\n\n .file(\"src/vm.S\")\n\n .file(\"src/boot.S\")\n\n .file(\"src/multiboot2_header.S\")\n\n .file(\"src/ap_startup.S\")\n\n .include(\"asm_include/\")\n\n .target(\"x86_64-unknown-none\")\n\n .compile(\"vm\");\n\n println!(\"cargo:rustc-link-lib=static=vm\");\n\n}\n", "file_path": "mythril/build.rs", "rank": 35, "score": 57272.67759602518 }, { "content": "// Temporary helper function to create a vm for a single core\n\nfn default_vm(\n\n core: usize,\n\n mem: u64,\n\n info: &BootInfo,\n\n) -> Arc<RwLock<vm::VirtualMachine>> {\n\n let mut config = vm::VirtualMachineConfig::new(vec![core as u8], mem);\n\n\n\n // FIXME: When `map_bios` may return an error, log the error.\n\n config.map_bios(\"seabios.bin\".into()).unwrap_or(());\n\n\n\n let device_map = config.device_map();\n\n device_map\n\n .register_device(device::acpi::AcpiRuntime::new(0xb000).unwrap())\n\n .unwrap();\n\n device_map\n\n .register_device(device::com::ComDevice::new(core as u64, 0x3F8))\n\n .unwrap();\n\n device_map\n\n .register_device(device::com::ComDevice::new(core as u64, 0x2F8))\n\n .unwrap();\n", "file_path": "mythril/src/kmain.rs", "rank": 36, "score": 54029.403480720895 }, { "content": "#[lang = \"eh_personality\"]\n\n#[cfg(not(test))]\n\nfn eh_personality() {}\n\n\n", "file_path": "mythril/src/error.rs", "rank": 37, "score": 54029.403480720895 }, { "content": "fn emulate_outs(\n\n vcpu: &mut vcpu::VCpu,\n\n port: Port,\n\n guest_cpu: &mut vmexit::GuestCpuState,\n\n exit: vmexit::IoInstructionInformation,\n\n) -> Result<()> {\n\n let mut vm = vcpu.vm.write();\n\n\n\n let linear_addr =\n\n vcpu.vmcs.read_field(vmcs::VmcsField::GuestLinearAddress)?;\n\n let guest_addr = memory::GuestVirtAddr::new(linear_addr, &vcpu.vmcs)?;\n\n\n\n // FIXME: This could actually be any priv level due to IOPL, but for now\n\n // assume that is requires supervisor\n\n let access = memory::GuestAccess::Read(memory::PrivilegeLevel(0));\n\n\n\n let view = memory::GuestAddressSpaceViewMut::from_vmcs(\n\n &vcpu.vmcs,\n\n &mut vm.guest_space,\n\n )?;\n", "file_path": "mythril/src/emulate/portio.rs", "rank": 38, "score": 52618.5305792865 }, { "content": "fn emulate_ins(\n\n vcpu: &mut vcpu::VCpu,\n\n port: Port,\n\n guest_cpu: &mut vmexit::GuestCpuState,\n\n exit: vmexit::IoInstructionInformation,\n\n) -> Result<()> {\n\n let mut vm = vcpu.vm.write();\n\n\n\n let linear_addr =\n\n vcpu.vmcs.read_field(vmcs::VmcsField::GuestLinearAddress)?;\n\n let guest_addr = memory::GuestVirtAddr::new(linear_addr, &vcpu.vmcs)?;\n\n let access = memory::GuestAccess::Read(memory::PrivilegeLevel(0));\n\n\n\n let mut bytes = vec![0u8; guest_cpu.rcx as usize];\n\n for chunk in bytes.chunks_exact_mut(exit.size as usize) {\n\n let request = PortReadRequest::try_from(chunk)?;\n\n vm.on_port_read(vcpu, port, request)?;\n\n }\n\n\n\n let mut view = memory::GuestAddressSpaceViewMut::from_vmcs(\n\n &vcpu.vmcs,\n\n &mut vm.guest_space,\n\n )?;\n\n view.write_bytes(guest_addr, &bytes, access)?;\n\n\n\n guest_cpu.rdi += bytes.len() as u64;\n\n guest_cpu.rcx = 0;\n\n Ok(())\n\n}\n\n\n", "file_path": "mythril/src/emulate/portio.rs", "rank": 39, "score": 52618.5305792865 }, { "content": "fn vmcs_write_with_fixed(\n\n field: VmcsField,\n\n value: u64,\n\n msr: u32,\n\n) -> Result<u64> {\n\n let mut required_value = value;\n\n let fixed = unsafe { rdmsr(msr) };\n\n let low = fixed & 0x00000000ffffffff;\n\n let high = fixed >> 32;\n\n\n\n required_value &= high; /* bit == 0 in high word ==> must be zero */\n\n required_value |= low; /* bit == 1 in low word ==> must be one */\n\n\n\n if (value & !required_value) != 0 {\n\n return Err(Error::Vmcs(format!(\n\n \"Requested field ({:?}) bit not allowed by MSR (requested=0x{:x} forbidden=0x{:x} required=0x{:x} res=0x{:x})\",\n\n field,\n\n value,\n\n high,\n\n low,\n\n required_value\n\n )));\n\n }\n\n\n\n vmcs_write(field, required_value)?;\n\n Ok(required_value)\n\n}\n\n\n", "file_path": "mythril/src/vmcs.rs", "rank": 40, "score": 52618.5305792865 }, { "content": "fn do_mmio_read(\n\n addr: memory::GuestPhysAddr,\n\n vcpu: &mut vcpu::VCpu,\n\n guest_cpu: &mut vmexit::GuestCpuState,\n\n instr: iced_x86::Instruction,\n\n) -> Result<()> {\n\n let mut vm = vcpu.vm.write();\n\n\n\n match instr.op0_kind() {\n\n iced_x86::OpKind::Register => match instr.op_register(0) {\n\n iced_x86::Register::AL => {\n\n write_register!(vm, vcpu, addr, guest_cpu.rax, u8, !0xff)\n\n }\n\n iced_x86::Register::AX => {\n\n write_register!(vm, vcpu, addr, guest_cpu.rax, u16, !0xffff)\n\n }\n\n iced_x86::Register::EAX => {\n\n write_register!(vm, vcpu, addr, guest_cpu.rax, u32, !0xffffffff)\n\n }\n\n iced_x86::Register::RAX => {\n", "file_path": "mythril/src/emulate/memio.rs", "rank": 41, "score": 52618.5305792865 }, { "content": "fn do_mmio_write(\n\n addr: memory::GuestPhysAddr,\n\n vcpu: &mut vcpu::VCpu,\n\n guest_cpu: &mut vmexit::GuestCpuState,\n\n instr: iced_x86::Instruction,\n\n) -> Result<()> {\n\n let mut res = ArrayVec::<[u8; 8]>::new();\n\n let data = match instr.op1_kind() {\n\n iced_x86::OpKind::Register => {\n\n let reg = instr.op_register(1);\n\n read_register_value(reg, &vcpu.vmcs, guest_cpu)?\n\n }\n\n iced_x86::OpKind::Immediate8 => {\n\n let value = instr.immediate8();\n\n res.try_extend_from_slice(&value.to_be_bytes()).unwrap();\n\n res\n\n }\n\n iced_x86::OpKind::Immediate16 => {\n\n let value = instr.immediate16();\n\n res.try_extend_from_slice(&value.to_be_bytes()).unwrap();\n", "file_path": "mythril/src/emulate/memio.rs", "rank": 42, "score": 52618.5305792865 }, { "content": "/// A trait representing a counter on the system with a consistent frequency.\n\npub trait TimeSource {\n\n /// The current value of the counter.\n\n fn now(&self) -> Instant;\n\n\n\n /// The frequency this counter increments in ticks per second.\n\n fn frequency(&self) -> u64;\n\n}\n\n\n", "file_path": "mythril/src/time.rs", "rank": 43, "score": 51972.58080229415 }, { "content": "fn read_register_value(\n\n register: iced_x86::Register,\n\n vmcs: &vmcs::ActiveVmcs,\n\n guest_cpu: &mut vmexit::GuestCpuState,\n\n) -> Result<ArrayVec<[u8; 8]>> {\n\n let mut res = ArrayVec::new();\n\n\n\n // TODO: we should probably support the AH style registers\n\n match register {\n\n iced_x86::Register::AL => read_register!(res, guest_cpu.rax, u8),\n\n iced_x86::Register::AX => read_register!(res, guest_cpu.rax, u16),\n\n iced_x86::Register::EAX => read_register!(res, guest_cpu.rax, u32),\n\n iced_x86::Register::RAX => read_register!(res, guest_cpu.rax, u64),\n\n\n\n iced_x86::Register::BL => read_register!(res, guest_cpu.rbx, u8),\n\n iced_x86::Register::BX => read_register!(res, guest_cpu.rbx, u16),\n\n iced_x86::Register::EBX => read_register!(res, guest_cpu.rbx, u32),\n\n iced_x86::Register::RBX => read_register!(res, guest_cpu.rbx, u64),\n\n\n\n iced_x86::Register::CL => read_register!(res, guest_cpu.rcx, u8),\n", "file_path": "mythril/src/emulate/memio.rs", "rank": 44, "score": 51324.82885895269 }, { "content": "pub trait ExtendedExitInformation\n\nwhere\n\n Self: core::marker::Sized,\n\n{\n\n fn from_active_vmcs(vmcs: &vmcs::ActiveVmcs) -> Result<Self>;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ExitReason {\n\n pub flags: ExitReasonFlags,\n\n pub info: ExitInformation,\n\n}\n\n\n\n// See Table C-1 in Appendix C\n\n#[derive(Clone, Debug)]\n\npub enum ExitInformation {\n\n NonMaskableInterrupt(VectoredEventInformation),\n\n ExternalInterrupt(VectoredEventInformation),\n\n TripleFault,\n\n InitSignal,\n", "file_path": "mythril/src/vmexit.rs", "rank": 45, "score": 50669.339833320555 }, { "content": "pub trait DeviceInteraction {\n\n fn find_device(self, map: &DeviceMap) -> Option<&Box<dyn EmulatedDevice>>;\n\n fn find_device_mut(\n\n self,\n\n map: &mut DeviceMap,\n\n ) -> Option<&mut Box<dyn EmulatedDevice>>;\n\n}\n\n\n\nimpl DeviceInteraction for u16 {\n\n fn find_device(self, map: &DeviceMap) -> Option<&Box<dyn EmulatedDevice>> {\n\n let range = PortIoRegion(RangeInclusive::new(self, self));\n\n map.portio_map.get(&range).map(|v| &**v)\n\n }\n\n fn find_device_mut(\n\n self,\n\n map: &mut DeviceMap,\n\n ) -> Option<&mut Box<dyn EmulatedDevice>> {\n\n let range = PortIoRegion(RangeInclusive::new(self, self));\n\n //NOTE: This is safe because all of the clones will exist in the same DeviceMap,\n\n // so there cannot be other outstanding references\n", "file_path": "mythril/src/device/mod.rs", "rank": 46, "score": 50669.339833320555 }, { "content": "pub trait EmulatedDevice {\n\n fn services(&self) -> Vec<DeviceRegion>;\n\n\n\n fn on_mem_read(\n\n &mut self,\n\n _addr: GuestPhysAddr,\n\n _data: MemReadRequest,\n\n _space: GuestAddressSpaceViewMut,\n\n ) -> Result<()> {\n\n Err(Error::NotImplemented(\n\n \"MemoryMapped device does not support reading\".into(),\n\n ))\n\n }\n\n fn on_mem_write(\n\n &mut self,\n\n _addr: GuestPhysAddr,\n\n _data: MemWriteRequest,\n\n _space: GuestAddressSpaceViewMut,\n\n ) -> Result<()> {\n\n Err(Error::NotImplemented(\n", "file_path": "mythril/src/device/mod.rs", "rank": 47, "score": 50669.339833320555 }, { "content": "fn vmcs_clear(vmcs_page: &mut Raw4kPage) -> Result<()> {\n\n let rflags = unsafe {\n\n let rflags: u64;\n\n llvm_asm!(\"vmclear $1; pushfq; popq $0\"\n\n : \"=r\"(rflags)\n\n : \"m\"(vmcs_page as *const _ as u64)\n\n : \"rflags\"\n\n : \"volatile\");\n\n rflags\n\n };\n\n error::check_vm_insruction(rflags, \"Failed to clear VMCS\".into())\n\n}\n\n\n\npub struct Vmcs {\n\n frame: Box<Raw4kPage>,\n\n}\n\n\n\nimpl Vmcs {\n\n pub fn new() -> Result<Self> {\n\n Ok(Vmcs {\n", "file_path": "mythril/src/vmcs.rs", "rank": 48, "score": 39247.82055106169 }, { "content": "fn scroll_vga(vga_mem: &mut [[u16; VGA_WIDTH]; VGA_HEIGHT]) {\n\n for row in 1..VGA_HEIGHT {\n\n for col in 0..VGA_WIDTH {\n\n vga_mem[row - 1][col] = vga_mem[row][col];\n\n }\n\n }\n\n clear_line_vga(VGA_HEIGHT - 1, vga_mem);\n\n}\n\n\n", "file_path": "mythril/src/logger.rs", "rank": 49, "score": 36406.13749730311 }, { "content": " host_frame: HostPhysFrame,\n\n readonly: bool,\n\n ) -> Result<()> {\n\n map_guest_memory(&mut self.root, guest_addr, host_frame, readonly)\n\n }\n\n\n\n pub fn map_new_frame(\n\n &mut self,\n\n guest_addr: GuestPhysAddr,\n\n readonly: bool,\n\n ) -> Result<()> {\n\n let page = Box::into_raw(Box::new(Raw4kPage::default()));\n\n let page =\n\n HostPhysFrame::from_start_address(HostPhysAddr::new(page as u64))?;\n\n self.map_frame(guest_addr, page, readonly)\n\n }\n\n\n\n pub fn eptp(&self) -> u64 {\n\n // //TODO: check available memory types\n\n (&*self.root as *const _ as u64) | (4 - 1) << 3 | 6\n", "file_path": "mythril/src/memory.rs", "rank": 50, "score": 35861.114974328826 }, { "content": "\n\n pub fn addr(&self) -> HostPhysAddr {\n\n HostPhysAddr::new(self.entry & 0x000fffff_fffff000)\n\n }\n\n\n\n pub fn mem_type(&self) -> EptMemoryType {\n\n EptMemoryType::try_from(((self.entry & (0b111 << 5)) >> 5) as u8)\n\n .expect(\"Invalid EPT memory type\")\n\n }\n\n\n\n pub fn set_addr(&mut self, addr: HostPhysAddr, flags: EptTableFlags) {\n\n assert!(addr.is_frame_aligned());\n\n self.entry =\n\n (addr.as_u64()) | flags.bits() | ((self.mem_type() as u64) << 5);\n\n }\n\n\n\n pub fn set_flags(&mut self, flags: EptTableFlags) {\n\n self.entry = self.addr().as_u64()\n\n | flags.bits()\n\n | ((self.mem_type() as u64) << 5);\n", "file_path": "mythril/src/memory.rs", "rank": 51, "score": 35856.62394072431 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug)]\n\npub struct Guest4LevelPagingAddr(u64);\n\nimpl Guest4LevelPagingAddr {\n\n pub fn new(addr: u64) -> Self {\n\n Self(addr)\n\n }\n\n\n\n pub fn as_u64(&self) -> u64 {\n\n self.0\n\n }\n\n\n\n pub fn p1_index(&self) -> ux::u9 {\n\n pt_index(self.0)\n\n }\n\n\n\n pub fn p2_index(&self) -> ux::u9 {\n", "file_path": "mythril/src/memory.rs", "rank": 52, "score": 35856.525191375455 }, { "content": "\n\n pub fn start_address(&self) -> HostPhysAddr {\n\n self.0\n\n }\n\n\n\n pub unsafe fn as_array(&self) -> &[u8; Self::SIZE] {\n\n let ptr = self.0.as_u64() as *const [u8; Self::SIZE];\n\n ptr.as_ref().unwrap()\n\n }\n\n\n\n pub unsafe fn as_mut_array(&mut self) -> &mut [u8; Self::SIZE] {\n\n let ptr = self.0.as_u64() as *mut [u8; Self::SIZE];\n\n ptr.as_mut().unwrap()\n\n }\n\n}\n\n\n\npub struct GuestAddressSpace {\n\n root: Box<EptPml4Table>,\n\n}\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 53, "score": 35856.241290510414 }, { "content": "\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\npub struct HostPhysAddr(u64);\n\n\n\nimpl HostPhysAddr {\n\n pub fn new(addr: u64) -> Self {\n\n Self(addr)\n\n }\n\n\n\n pub fn as_u64(&self) -> u64 {\n\n self.0\n\n }\n\n\n\n pub fn is_frame_aligned(&self) -> bool {\n\n (self.0 & 0b111111111111) == 0\n\n }\n\n}\n\n\n\nimpl fmt::Debug for HostPhysAddr {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "mythril/src/memory.rs", "rank": 54, "score": 35856.193435738445 }, { "content": "#[derive(Copy, Clone, Debug)]\n\npub struct PrivilegeLevel(pub u8);\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum GuestAccess {\n\n Read(PrivilegeLevel),\n\n Write(PrivilegeLevel),\n\n Fetch(PrivilegeLevel),\n\n}\n\n\n\nimpl GuestAddressSpace {\n\n pub fn new() -> Result<Self> {\n\n Ok(GuestAddressSpace {\n\n root: Box::new(EptPml4Table::default()),\n\n })\n\n }\n\n\n\n pub fn map_frame(\n\n &mut self,\n\n guest_addr: GuestPhysAddr,\n", "file_path": "mythril/src/memory.rs", "rank": 55, "score": 35855.1209096333 }, { "content": " }\n\n\n\n pub fn set_mem_type(&mut self, mem_type: EptMemoryType) {\n\n self.entry &= !(0b111u64 << 5);\n\n self.entry |= ((mem_type as u8) << 5) as u64;\n\n }\n\n}\n\n\n\nbitflags! {\n\n //NOTE: Not all flags are valid for all tables\n\n pub struct EptTableFlags: u64 {\n\n const READ_ACCESS = 1 << 0;\n\n const WRITE_ACCESS = 1 << 1;\n\n const PRIV_EXEC_ACCESS = 1 << 2;\n\n const IGNORE_PAT = 1 << 6;\n\n const ACCESSED = 1 << 8;\n\n const DIRTY = 1 << 9;\n\n const USERMODE_EXEC_ACCESS = 1 << 10;\n\n const SUPRESS_VE = 1 << 63;\n\n }\n", "file_path": "mythril/src/memory.rs", "rank": 56, "score": 35854.6579393167 }, { "content": " let guest_pdpte_addr =\n\n GuestPhysAddr::new(guest_pdpte.address().as_u64());\n\n let guest_pdpte_host_frame = self.find_host_frame(guest_pdpte_addr)?;\n\n\n\n let guest_pdt =\n\n guest_pdpte_host_frame.start_address().as_u64() as *const PD;\n\n let guest_pdte =\n\n unsafe { (*guest_pdt)[u16::from(addr.p2_index()) as usize] };\n\n let _guest_pdte_addr =\n\n GuestPhysAddr::new(guest_pdte.address().as_u64());\n\n\n\n let translated_vaddr = guest_pdte.address().as_u64()\n\n + (u32::from(addr.large_page_offset()) as u64);\n\n\n\n Ok(GuestPhysAddr::new(translated_vaddr))\n\n }\n\n\n\n //FIXME this ignores read/write/exec permissions and 2MB/1GB pages (and lots of other stuff)\n\n pub fn find_host_frame(\n\n &self,\n", "file_path": "mythril/src/memory.rs", "rank": 57, "score": 35854.57486471105 }, { "content": " self.entry = (addr.as_u64()) | flags.bits();\n\n }\n\n\n\n pub fn set_flags(&mut self, flags: EptTableFlags) {\n\n self.entry = self.addr().as_u64() | flags.bits();\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, TryFromPrimitive)]\n\n#[repr(u8)]\n\npub enum EptMemoryType {\n\n Uncacheable = 0,\n\n WriteCache = 1,\n\n WriteThrough = 4,\n\n WriteP = 5, // I can't find an expansion of the 'WP' in the spec\n\n WriteBack = 6,\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\n#[repr(transparent)]\n", "file_path": "mythril/src/memory.rs", "rank": 58, "score": 35854.30334572946 }, { "content": " pub fn offset(&self) -> ux::u12 {\n\n page_offset(self.0)\n\n }\n\n}\n\n\n\nimpl Add<usize> for GuestPhysAddr {\n\n type Output = GuestPhysAddr;\n\n\n\n fn add(self, rhs: usize) -> Self::Output {\n\n GuestPhysAddr(self.0 + (rhs as u64))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for GuestPhysAddr {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_tuple(\"GuestPhysAddr\")\n\n .field(&format_args!(\"0x{:x}\", self.0))\n\n .finish()\n\n }\n\n}\n", "file_path": "mythril/src/memory.rs", "rank": 59, "score": 35853.40297413416 }, { "content": " pub fn as_u64(&self) -> u64 {\n\n self.0\n\n }\n\n\n\n pub fn p1_index(&self) -> ux::u9 {\n\n pt_index(self.0)\n\n }\n\n\n\n pub fn p2_index(&self) -> ux::u9 {\n\n pd_index(self.0)\n\n }\n\n\n\n pub fn p3_index(&self) -> ux::u9 {\n\n pdpt_index(self.0)\n\n }\n\n\n\n pub fn p4_index(&self) -> ux::u9 {\n\n pml4_index(self.0)\n\n }\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 60, "score": 35853.3863007857 }, { "content": " if !ept_pte.is_unused() {\n\n return Err(Error::DuplicateMapping(format!(\n\n \"Duplicate mapping for address 0x{:x}\",\n\n guest_addr.as_u64()\n\n )));\n\n }\n\n\n\n let mut page_flags = EptTableFlags::READ_ACCESS\n\n | EptTableFlags::PRIV_EXEC_ACCESS\n\n | EptTableFlags::USERMODE_EXEC_ACCESS\n\n | EptTableFlags::IGNORE_PAT;\n\n if !readonly {\n\n page_flags |= EptTableFlags::WRITE_ACCESS;\n\n }\n\n\n\n ept_pte.set_addr(host_frame.start_address(), page_flags);\n\n ept_pte.set_mem_type(EptMemoryType::WriteBack);\n\n\n\n Ok(())\n\n}\n", "file_path": "mythril/src/memory.rs", "rank": 61, "score": 35852.31631498585 }, { "content": "pub struct EptPageTableEntry {\n\n entry: u64,\n\n}\n\n\n\nimpl EptPageTableEntry {\n\n pub fn new() -> Self {\n\n Self { entry: 0 }\n\n }\n\n\n\n pub fn is_unused(&self) -> bool {\n\n self.entry == 0\n\n }\n\n\n\n pub fn set_unused(&mut self) {\n\n self.entry = 0;\n\n }\n\n\n\n pub fn flags(&self) -> EptTableFlags {\n\n EptTableFlags::from_bits_truncate(self.entry)\n\n }\n", "file_path": "mythril/src/memory.rs", "rank": 62, "score": 35852.11960423405 }, { "content": " huge_page_offset(self.0)\n\n }\n\n}\n\n\n\nimpl Add<usize> for Guest4LevelPagingAddr {\n\n type Output = Guest4LevelPagingAddr;\n\n\n\n fn add(self, rhs: usize) -> Self::Output {\n\n Guest4LevelPagingAddr(self.0 + (rhs as u64))\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\npub struct GuestPhysAddr(u64);\n\n\n\nimpl GuestPhysAddr {\n\n pub fn new(addr: u64) -> Self {\n\n Self(addr)\n\n }\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 63, "score": 35852.05046708215 }, { "content": "use crate::error::{Error, Result};\n\nuse crate::vmcs;\n\nuse alloc::boxed::Box;\n\nuse alloc::vec::Vec;\n\nuse bitflags::bitflags;\n\nuse core::borrow::{Borrow, BorrowMut};\n\nuse core::convert::TryFrom;\n\nuse core::default::Default;\n\nuse core::fmt;\n\nuse core::ops::{Add, Deref, Index, IndexMut};\n\nuse num_enum::TryFromPrimitive;\n\nuse ux;\n\nuse x86::bits64::paging::*;\n\nuse x86::controlregs::Cr0;\n\n\n\n#[repr(align(4096))]\n\npub struct Raw4kPage([u8; 4096]);\n\nimpl Default for Raw4kPage {\n\n fn default() -> Self {\n\n Raw4kPage([0u8; 4096])\n\n }\n\n}\n\n\n\n#[inline]\n", "file_path": "mythril/src/memory.rs", "rank": 64, "score": 35851.88452335436 }, { "content": " f.debug_tuple(\"HostPhysAddr\")\n\n .field(&format_args!(\"0x{:x}\", self.0))\n\n .finish()\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug)]\n\npub struct HostPhysFrame(HostPhysAddr);\n\nimpl HostPhysFrame {\n\n pub const SIZE: usize = 4096;\n\n\n\n pub fn from_start_address(addr: HostPhysAddr) -> Result<Self> {\n\n if !addr.is_frame_aligned() {\n\n Err(Error::InvalidValue(\n\n \"Invalid start address for HostPhysFrame\".into(),\n\n ))\n\n } else {\n\n Ok(HostPhysFrame(addr))\n\n }\n\n }\n", "file_path": "mythril/src/memory.rs", "rank": 65, "score": 35851.73203154027 }, { "content": " ept_pml4e.addr().as_u64() as *mut EptPageDirectoryPointerTable;\n\n let ept_pdpe = unsafe { &mut (*ept_pdpt)[guest_addr.p3_index()] };\n\n if ept_pdpe.is_unused() {\n\n let ept_pdt_frame =\n\n Box::into_raw(Box::new(EptPageDirectory::default()));\n\n let ept_pdt_addr = HostPhysAddr::new(ept_pdt_frame as u64);\n\n ept_pdpe.set_addr(ept_pdt_addr, default_flags);\n\n }\n\n\n\n let ept_pdt = ept_pdpe.addr().as_u64() as *mut EptPageDirectory;\n\n let ept_pde = unsafe { &mut (*ept_pdt)[guest_addr.p2_index()] };\n\n if ept_pde.is_unused() {\n\n let ept_pt_frame = Box::into_raw(Box::new(EptPageTable::default()));\n\n let ept_pt_addr = HostPhysAddr::new(ept_pt_frame as u64);\n\n ept_pde.set_addr(ept_pt_addr, default_flags);\n\n }\n\n\n\n let ept_pt = ept_pde.addr().as_u64() as *mut EptPageTable;\n\n let ept_pte = unsafe { &mut (*ept_pt)[guest_addr.p1_index()] };\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 66, "score": 35851.38234792477 }, { "content": " \"No PD entry for GuestPhysAddr\".into(),\n\n ));\n\n }\n\n let ept_pt = ept_pde.addr().as_u64() as *const EptPageTable;\n\n let ept_pte = unsafe { &(*ept_pt)[addr.p1_index()] };\n\n if ept_pte.is_unused() {\n\n return Err(Error::InvalidValue(\n\n \"No PT entry for GuestPhysAddr\".into(),\n\n ));\n\n }\n\n HostPhysFrame::from_start_address(ept_pte.addr())\n\n }\n\n\n\n pub fn frame_iter(\n\n &self,\n\n cr3: GuestPhysAddr,\n\n addr: GuestVirtAddr,\n\n access: GuestAccess,\n\n ) -> Result<FrameIter> {\n\n //TODO: align the addr to 4096 boundary\n", "file_path": "mythril/src/memory.rs", "rank": 67, "score": 35851.37518578689 }, { "content": " } else {\n\n Ok(GuestVirtAddr::NoPaging(GuestPhysAddr::new(val)))\n\n }\n\n }\n\n\n\n pub fn as_u64(&self) -> u64 {\n\n match self {\n\n Self::NoPaging(addr) => addr.as_u64(),\n\n Self::Paging4Level(addr) => addr.as_u64(),\n\n }\n\n }\n\n}\n\n\n\nimpl Add<usize> for GuestVirtAddr {\n\n type Output = GuestVirtAddr;\n\n\n\n fn add(self, rhs: usize) -> Self::Output {\n\n match self {\n\n Self::NoPaging(addr) => Self::NoPaging(addr + rhs),\n\n Self::Paging4Level(addr) => Self::Paging4Level(addr + rhs),\n", "file_path": "mythril/src/memory.rs", "rank": 68, "score": 35851.20095258512 }, { "content": " fn translate_pl4_address(\n\n &self,\n\n cr3: GuestPhysAddr,\n\n addr: Guest4LevelPagingAddr,\n\n _access: GuestAccess,\n\n ) -> Result<GuestPhysAddr> {\n\n let guest_pml4_root = self.find_host_frame(cr3)?;\n\n\n\n let guest_pml4 =\n\n guest_pml4_root.start_address().as_u64() as *const PML4;\n\n let guest_pml4e =\n\n unsafe { (*guest_pml4)[u16::from(addr.p4_index()) as usize] };\n\n let guest_pml4e_addr =\n\n GuestPhysAddr::new(guest_pml4e.address().as_u64());\n\n let guest_pml4e_host_frame = self.find_host_frame(guest_pml4e_addr)?;\n\n\n\n let guest_pdpt =\n\n guest_pml4e_host_frame.start_address().as_u64() as *const PDPT;\n\n let guest_pdpte =\n\n unsafe { (*guest_pdpt)[u16::from(addr.p3_index()) as usize] };\n", "file_path": "mythril/src/memory.rs", "rank": 69, "score": 35851.065995322555 }, { "content": " addr: GuestPhysAddr,\n\n ) -> Result<HostPhysFrame> {\n\n let ept_pml4e = &self.root[addr.p4_index()];\n\n if ept_pml4e.is_unused() {\n\n return Err(Error::InvalidValue(\n\n \"No PML4 entry for GuestPhysAddr\".into(),\n\n ));\n\n }\n\n let ept_pdpt =\n\n ept_pml4e.addr().as_u64() as *const EptPageDirectoryPointerTable;\n\n let ept_pdpe = unsafe { &(*ept_pdpt)[addr.p3_index()] };\n\n if ept_pdpe.is_unused() {\n\n return Err(Error::InvalidValue(\n\n \"No PDP entry for GuestPhysAddr\".into(),\n\n ));\n\n }\n\n let ept_pdt = ept_pdpe.addr().as_u64() as *const EptPageDirectory;\n\n let ept_pde = unsafe { &(*ept_pdt)[addr.p2_index()] };\n\n if ept_pde.is_unused() {\n\n return Err(Error::InvalidValue(\n", "file_path": "mythril/src/memory.rs", "rank": 70, "score": 35850.61994036497 }, { "content": " pub fn write_bytes(\n\n &mut self,\n\n cr3: GuestPhysAddr,\n\n addr: GuestVirtAddr,\n\n mut bytes: &[u8],\n\n access: GuestAccess,\n\n ) -> Result<()> {\n\n let iter = self.frame_iter(cr3, addr, access)?;\n\n\n\n // How many frames this region spans\n\n let count =\n\n (bytes.len() + HostPhysFrame::SIZE - 1) / HostPhysFrame::SIZE;\n\n\n\n let mut start_offset = addr.as_u64() as usize % HostPhysFrame::SIZE;\n\n for frame in iter.take(count) {\n\n let mut frame = frame?;\n\n let array = unsafe { frame.as_mut_array() };\n\n let _slice = if start_offset + bytes.len() <= HostPhysFrame::SIZE {\n\n array[start_offset..start_offset + bytes.len()]\n\n .copy_from_slice(&bytes);\n", "file_path": "mythril/src/memory.rs", "rank": 71, "score": 35850.521170920634 }, { "content": "\n\n fn index(&self, index: ux::u9) -> &Self::Output {\n\n &self.entries[u16::from(index) as usize]\n\n }\n\n}\n\n\n\nimpl<T> IndexMut<ux::u9> for EptTable<T> {\n\n fn index_mut(&mut self, index: ux::u9) -> &mut Self::Output {\n\n &mut self.entries[u16::from(index) as usize]\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\n#[repr(transparent)]\n\npub struct EptTableEntry {\n\n entry: u64,\n\n}\n\n\n\nimpl EptTableEntry {\n\n pub fn new() -> Self {\n", "file_path": "mythril/src/memory.rs", "rank": 72, "score": 35850.42846735125 }, { "content": " let mut start_offset = addr.as_u64() as usize % HostPhysFrame::SIZE;\n\n for frame in iter.take(count) {\n\n let frame = frame?;\n\n let array = unsafe { frame.as_array() };\n\n let slice = if start_offset + length <= HostPhysFrame::SIZE {\n\n &array[start_offset..start_offset + length]\n\n } else {\n\n &array[start_offset..]\n\n };\n\n out.extend_from_slice(slice);\n\n\n\n length -= slice.len();\n\n\n\n // All frames after the first have no start_offset\n\n start_offset = 0;\n\n }\n\n\n\n Ok(out)\n\n }\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 73, "score": 35849.71592236622 }, { "content": " }\n\n\n\n pub fn translate_linear_address(\n\n &self,\n\n cr3: GuestPhysAddr,\n\n addr: GuestVirtAddr,\n\n access: GuestAccess,\n\n ) -> Result<GuestPhysAddr> {\n\n match addr {\n\n GuestVirtAddr::NoPaging(addr) => {\n\n Ok(GuestPhysAddr::new(addr.as_u64()))\n\n }\n\n GuestVirtAddr::Paging4Level(vaddr) => {\n\n self.translate_pl4_address(cr3, vaddr, access)\n\n }\n\n }\n\n }\n\n\n\n //FIXME: this should check that the pages exist, access restrictions, guest page size,\n\n // and lots of other things\n", "file_path": "mythril/src/memory.rs", "rank": 74, "score": 35849.288456600465 }, { "content": " pd_index(self.0)\n\n }\n\n\n\n pub fn p3_index(&self) -> ux::u9 {\n\n pdpt_index(self.0)\n\n }\n\n\n\n pub fn p4_index(&self) -> ux::u9 {\n\n pml4_index(self.0)\n\n }\n\n\n\n pub fn page_offset(&self) -> ux::u12 {\n\n page_offset(self.0)\n\n }\n\n\n\n pub fn large_page_offset(&self) -> ux::u21 {\n\n large_page_offset(self.0)\n\n }\n\n\n\n pub fn huge_page_offset(&self) -> ux::u30 {\n", "file_path": "mythril/src/memory.rs", "rank": 75, "score": 35849.19798743609 }, { "content": "}\n\n\n\npub type EptPml4Entry = EptTableEntry;\n\npub type EptPageDirectoryPointerEntry = EptTableEntry;\n\npub type EptPageDirectoryEntry = EptTableEntry;\n\n\n\npub type EptPml4Table = EptTable<EptPml4Entry>;\n\npub type EptPageDirectoryPointerTable = EptTable<EptPageDirectoryPointerEntry>;\n\npub type EptPageDirectory = EptTable<EptPageDirectoryEntry>;\n\npub type EptPageTable = EptTable<EptPageTableEntry>;\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 76, "score": 35849.1210649201 }, { "content": " Self { entry: 0 }\n\n }\n\n\n\n pub fn is_unused(&self) -> bool {\n\n self.entry == 0\n\n }\n\n\n\n pub fn set_unused(&mut self) {\n\n self.entry = 0;\n\n }\n\n\n\n pub fn flags(&self) -> EptTableFlags {\n\n EptTableFlags::from_bits_truncate(self.entry)\n\n }\n\n\n\n pub fn addr(&self) -> HostPhysAddr {\n\n HostPhysAddr::new(self.entry & 0x000fffff_fffff000)\n\n }\n\n\n\n pub fn set_addr(&mut self, addr: HostPhysAddr, flags: EptTableFlags) {\n", "file_path": "mythril/src/memory.rs", "rank": 77, "score": 35849.07519045199 }, { "content": " pub fn frame_iter(\n\n &self,\n\n addr: GuestVirtAddr,\n\n access: GuestAccess,\n\n ) -> Result<FrameIter> {\n\n self.space.borrow().frame_iter(self.cr3, addr, access)\n\n }\n\n\n\n pub fn read_bytes(\n\n &self,\n\n addr: GuestVirtAddr,\n\n length: usize,\n\n access: GuestAccess,\n\n ) -> Result<Vec<u8>> {\n\n self.space\n\n .borrow()\n\n .read_bytes(self.cr3, addr, length, access)\n\n }\n\n\n\n pub fn translate_linear_address(\n", "file_path": "mythril/src/memory.rs", "rank": 78, "score": 35848.52247892938 }, { "content": "\n\npub struct GuestAddressSpaceWrapper<T> {\n\n space: T,\n\n cr3: GuestPhysAddr,\n\n}\n\n\n\nimpl<T> GuestAddressSpaceWrapper<T>\n\nwhere\n\n T: Borrow<GuestAddressSpace>,\n\n{\n\n pub fn new(cr3: GuestPhysAddr, space: T) -> Self {\n\n Self { space, cr3 }\n\n }\n\n\n\n pub fn from_vmcs(vmcs: &vmcs::ActiveVmcs, space: T) -> Result<Self> {\n\n let cr3 = vmcs.read_field(vmcs::VmcsField::GuestCr3)?;\n\n let cr3 = GuestPhysAddr::new(cr3);\n\n Ok(Self { space, cr3 })\n\n }\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 79, "score": 35848.20393245096 }, { "content": " break;\n\n } else {\n\n &array[start_offset..].copy_from_slice(\n\n &bytes[..(HostPhysFrame::SIZE - start_offset)],\n\n );\n\n bytes = &bytes[(HostPhysFrame::SIZE - start_offset)..];\n\n };\n\n\n\n // All frames after the first have no start_offset\n\n start_offset = 0;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\npub type GuestAddressSpaceView<'a> =\n\n GuestAddressSpaceWrapper<&'a GuestAddressSpace>;\n\npub type GuestAddressSpaceViewMut<'a> =\n\n GuestAddressSpaceWrapper<&'a mut GuestAddressSpace>;\n", "file_path": "mythril/src/memory.rs", "rank": 80, "score": 35847.85966931779 }, { "content": " }\n\n}\n\n\n\n#[repr(align(4096))]\n\npub struct EptTable<T> {\n\n entries: [T; 512],\n\n}\n\nimpl<T> Default for EptTable<T>\n\nwhere\n\n T: Copy + Default,\n\n{\n\n fn default() -> Self {\n\n Self {\n\n entries: [T::default(); 512],\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Index<ux::u9> for EptTable<T> {\n\n type Output = T;\n", "file_path": "mythril/src/memory.rs", "rank": 81, "score": 35847.39882606559 }, { "content": " self.space\n\n .borrow_mut()\n\n .write_bytes(self.cr3, addr, bytes, access)\n\n }\n\n}\n\n\n\nimpl<T> Deref for GuestAddressSpaceWrapper<T>\n\nwhere\n\n T: Borrow<GuestAddressSpace>,\n\n{\n\n type Target = T;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.space\n\n }\n\n}\n\n\n\npub struct FrameIter<'a> {\n\n view: GuestAddressSpaceView<'a>,\n\n addr: GuestVirtAddr,\n", "file_path": "mythril/src/memory.rs", "rank": 82, "score": 35847.204185638926 }, { "content": " &self,\n\n addr: GuestVirtAddr,\n\n access: GuestAccess,\n\n ) -> Result<GuestPhysAddr> {\n\n self.space\n\n .borrow()\n\n .translate_linear_address(self.cr3, addr, access)\n\n }\n\n}\n\n\n\nimpl<T> GuestAddressSpaceWrapper<T>\n\nwhere\n\n T: BorrowMut<GuestAddressSpace>,\n\n{\n\n pub fn write_bytes(\n\n &mut self,\n\n addr: GuestVirtAddr,\n\n bytes: &[u8],\n\n access: GuestAccess,\n\n ) -> Result<()> {\n", "file_path": "mythril/src/memory.rs", "rank": 83, "score": 35847.057172441855 }, { "content": " Ok(FrameIter {\n\n view: GuestAddressSpaceView::new(cr3, self),\n\n addr: addr,\n\n access: access,\n\n })\n\n }\n\n\n\n pub fn read_bytes(\n\n &self,\n\n cr3: GuestPhysAddr,\n\n addr: GuestVirtAddr,\n\n mut length: usize,\n\n access: GuestAccess,\n\n ) -> Result<Vec<u8>> {\n\n let mut out = vec![];\n\n let iter = self.frame_iter(cr3, addr, access)?;\n\n\n\n // How many frames this region spans\n\n let count = (length + HostPhysFrame::SIZE - 1) / HostPhysFrame::SIZE;\n\n\n", "file_path": "mythril/src/memory.rs", "rank": 84, "score": 35846.84497889729 }, { "content": " access: GuestAccess,\n\n}\n\n\n\nimpl<'a> Iterator for FrameIter<'a> {\n\n type Item = Result<HostPhysFrame>;\n\n\n\n //TODO: stop at end of address space\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let old = self.addr;\n\n\n\n // This is the smallest possible guest page size, so permissions\n\n // can't change except at this granularity\n\n self.addr = self.addr + 4096;\n\n\n\n let physaddr =\n\n match self.view.translate_linear_address(old, self.access) {\n\n Ok(addr) => addr,\n\n Err(e) => return Some(Err(e)),\n\n };\n\n Some(self.view.find_host_frame(physaddr))\n", "file_path": "mythril/src/memory.rs", "rank": 85, "score": 35844.52317112134 }, { "content": "fn vmcs_activate(vmcs: &mut Vmcs, _vmx: &vmx::Vmx) -> Result<()> {\n\n let revision_id = vmx::Vmx::revision();\n\n let vmcs_region_addr = &mut *vmcs.frame as *mut Raw4kPage;\n\n let region_revision = vmcs_region_addr as *mut u32;\n\n unsafe {\n\n *region_revision = revision_id;\n\n }\n\n let rflags = unsafe {\n\n let rflags: u64;\n\n llvm_asm!(\"vmptrld $1; pushfq; popq $0\"\n\n : \"=r\"(rflags)\n\n : \"m\"(vmcs_region_addr)\n\n : \"rflags\");\n\n rflags\n\n };\n\n\n\n error::check_vm_insruction(rflags, \"Failed to activate VMCS\".into())\n\n}\n\n\n", "file_path": "mythril/src/vmcs.rs", "rank": 86, "score": 35438.97343504441 }, { "content": "use crate::device::{\n\n DeviceRegion, EmulatedDevice, Port, PortReadRequest, PortWriteRequest,\n\n};\n\nuse crate::error::Result;\n\nuse crate::logger;\n\nuse crate::memory::GuestAddressSpaceViewMut;\n\nuse alloc::boxed::Box;\n\nuse alloc::string::String;\n\nuse alloc::vec::Vec;\n\n\n\npub struct DebugPort {\n\n id: u64,\n\n port: Port,\n\n buff: Vec<u8>,\n\n}\n\n\n\nimpl DebugPort {\n\n pub fn new(vmid: u64, port: Port) -> Box<dyn EmulatedDevice> {\n\n Box::new(Self {\n\n port,\n", "file_path": "mythril/src/device/debug.rs", "rank": 87, "score": 34610.57152125444 }, { "content": " buff: vec![],\n\n id: vmid,\n\n })\n\n }\n\n}\n\n\n\nimpl EmulatedDevice for DebugPort {\n\n fn services(&self) -> Vec<DeviceRegion> {\n\n vec![DeviceRegion::PortIo(self.port..=self.port)]\n\n }\n\n\n\n fn on_port_read(\n\n &mut self,\n\n _port: Port,\n\n mut val: PortReadRequest,\n\n _space: GuestAddressSpaceViewMut,\n\n ) -> Result<()> {\n\n // This is a magical value (called BOCHS_DEBUG_PORT_MAGIC by edk2)\n\n val.copy_from_u32(0xe9);\n\n Ok(())\n", "file_path": "mythril/src/device/debug.rs", "rank": 88, "score": 34600.07095833657 }, { "content": " }\n\n\n\n fn on_port_write(\n\n &mut self,\n\n _port: Port,\n\n val: PortWriteRequest,\n\n _space: GuestAddressSpaceViewMut,\n\n ) -> Result<()> {\n\n self.buff.extend_from_slice(val.as_slice());\n\n\n\n // Flush on newlines\n\n if val.as_slice().iter().filter(|b| **b == 10).next().is_some() {\n\n let s = String::from_utf8_lossy(&self.buff);\n\n\n\n logger::write_console(&format!(\"GUEST{}: {}\", self.id, s));\n\n self.buff.clear();\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "mythril/src/device/debug.rs", "rank": 89, "score": 34593.814690022016 }, { "content": "use crate::acpi;\n\nuse crate::memory::HostPhysAddr;\n\nuse alloc::{string::String, vec::Vec};\n\n\n\n/// The abstract 'info' provided by the boot environment. This could be\n\n/// bios-multiboot, bios-multiboot2, efi-multiboot2, etc.\n\n///\n\n#[derive(Default)]\n\npub struct BootInfo {\n\n pub modules: Vec<BootModule>,\n\n pub rsdp: Option<acpi::rsdp::RSDP>,\n\n}\n\n\n\nimpl BootInfo {\n\n pub fn find_module(&self, ident: impl AsRef<str>) -> Option<&BootModule> {\n\n self.modules\n\n .iter()\n\n .filter(|module| {\n\n if let Some(id) = &module.identifier {\n\n id == ident.as_ref()\n", "file_path": "mythril/src/boot_info.rs", "rank": 90, "score": 34572.84430151308 }, { "content": " } else {\n\n false\n\n }\n\n })\n\n .next()\n\n }\n\n}\n\n\n\npub struct BootModule {\n\n pub identifier: Option<String>,\n\n pub address: HostPhysAddr,\n\n pub size: usize,\n\n}\n\n\n\nimpl BootModule {\n\n pub fn data(&self) -> &[u8] {\n\n unsafe {\n\n core::slice::from_raw_parts(\n\n self.address.as_u64() as *const u8,\n\n self.size,\n\n )\n\n }\n\n }\n\n}\n", "file_path": "mythril/src/boot_info.rs", "rank": 91, "score": 34567.0484408309 }, { "content": "#[repr(C)]\n\n#[repr(packed)]\n\nstruct IdtInfo {\n\n limit: u16,\n\n base_addr: u64,\n\n}\n\n\n\npub struct IdtrBase;\n\nimpl IdtrBase {\n\n pub fn read() -> u64 {\n\n unsafe {\n\n let mut info = IdtInfo {\n\n limit: 0,\n\n base_addr: 0,\n\n };\n\n llvm_asm!(\"sidt ($0)\"\n\n :\n\n : \"r\"(&mut info)\n\n : \"memory\"\n\n : \"volatile\");\n\n info.base_addr\n\n }\n\n }\n\n}\n\n\n", "file_path": "mythril/src/registers.rs", "rank": 92, "score": 32991.68150714611 }, { "content": "#[repr(C)]\n\n#[repr(packed)]\n\nstruct GdtInfo {\n\n size: u16,\n\n offset: u64,\n\n}\n\n\n\npub struct GdtrBase;\n\nimpl GdtrBase {\n\n pub fn read() -> u64 {\n\n unsafe {\n\n let mut info = GdtInfo { size: 0, offset: 0 };\n\n llvm_asm!(\"sgdtq ($0)\"\n\n :\n\n : \"r\"(&mut info)\n\n : \"memory\"\n\n : \"volatile\");\n\n info.offset\n\n }\n\n }\n\n}\n", "file_path": "mythril/src/registers.rs", "rank": 93, "score": 32991.68150714611 }, { "content": "fn clear_line_vga(row: usize, vga_mem: &mut [[u16; VGA_WIDTH]; VGA_HEIGHT]) {\n\n for col in 0..VGA_WIDTH {\n\n (*vga_mem)[row][col] = VGA_ATTRIB | 0x20;\n\n }\n\n}\n\n\n", "file_path": "mythril/src/logger.rs", "rank": 94, "score": 32449.144677514043 }, { "content": "use crate::error::{self, Error, Result};\n\nuse crate::memory::GuestPhysAddr;\n\nuse crate::{vcpu, vmcs};\n\nuse alloc::fmt::Debug;\n\nuse bitflags::bitflags;\n\nuse core::convert::TryFrom;\n\nuse num_enum::TryFromPrimitive;\n\n\n\nextern \"C\" {\n\n pub fn vmexit_handler_wrapper();\n\n}\n\n\n\n#[repr(C)]\n\n#[repr(packed)]\n\n#[derive(Clone, Copy, Debug)]\n\npub struct GuestCpuState {\n\n pub cr2: u64,\n\n pub r15: u64,\n\n pub r14: u64,\n\n pub r13: u64,\n", "file_path": "mythril/src/vmexit.rs", "rank": 96, "score": 25.52549723469297 }, { "content": "use crate::acpi;\n\nuse crate::ap;\n\nuse crate::apic;\n\nuse crate::boot_info::BootInfo;\n\nuse crate::device;\n\nuse crate::interrupt;\n\nuse crate::linux;\n\nuse crate::logger;\n\nuse crate::memory;\n\nuse crate::multiboot2;\n\nuse crate::percore;\n\nuse crate::time;\n\nuse crate::vcpu;\n\nuse crate::vm;\n\n\n\nuse alloc::collections::BTreeMap;\n\nuse alloc::sync::Arc;\n\nuse alloc::vec::Vec;\n\nuse log::{debug, info};\n\nuse spin::RwLock;\n\n\n\nextern \"C\" {\n\n static AP_STARTUP_ADDR: u16;\n\n static mut AP_STACK_ADDR: u64;\n\n static mut AP_IDX: u64;\n\n static mut AP_READY: u8;\n\n}\n\n\n\n// Temporary helper function to create a vm for a single core\n", "file_path": "mythril/src/kmain.rs", "rank": 97, "score": 24.934401947323117 }, { "content": "\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn kmain_multiboot2(multiboot_info_addr: usize) -> ! {\n\n let boot_info = multiboot2::early_init_multiboot2(\n\n memory::HostPhysAddr::new(multiboot_info_addr as u64),\n\n );\n\n kmain(boot_info)\n\n}\n\n\n\nunsafe fn kmain(mut boot_info: BootInfo) -> ! {\n\n // Setup the actual interrupt handlers\n\n interrupt::idt::init();\n\n\n\n // Setup our (com0) logger\n\n log::set_logger(&LOGGER)\n\n .map(|()| log::set_max_level(log::LevelFilter::Info))\n\n .expect(\"Failed to set logger\");\n\n\n\n // Calibrate the global time source\n\n time::init_global_time().expect(\"Failed to init global timesource\");\n", "file_path": "mythril/src/kmain.rs", "rank": 99, "score": 23.899035321243975 } ]
Rust
bin/wayland/scanner/src/ast.rs
PowerOlive/garnet
16b5b38b765195699f41ccb6684cc58dd3512793
use crate::parser::{self, ArgKind}; #[derive(Debug)] pub struct Protocol { pub name: String, pub copyright: Option<String>, pub description: Option<Description>, pub interfaces: Vec<Interface>, } #[derive(Debug)] pub struct Description { pub summary: String, pub description: String, } #[derive(Debug)] pub struct Interface { pub name: String, pub version: u32, pub description: Option<Description>, pub requests: Vec<Message>, pub events: Vec<Message>, pub enums: Vec<Enum>, } #[derive(Debug)] pub struct Message { pub name: String, pub since: u32, pub request_type: Option<String>, pub description: Option<Description>, pub args: Vec<Arg>, } #[derive(Debug)] pub struct Arg { pub name: String, pub kind: ArgKind, pub summary: Option<String>, pub interface: Option<String>, pub nullable: bool, pub enum_type: Option<String>, pub description: Option<Description>, } #[derive(Debug)] pub struct Enum { pub name: String, pub since: u32, pub bitfield: bool, pub description: Option<Description>, pub entries: Vec<EnumEntry>, } #[derive(Debug)] pub struct EnumEntry { pub name: String, pub value: i64, pub summary: Option<String>, pub since: u32, pub description: Option<Description>, } pub type AstError = String; pub type AstResult<T> = Result<T, AstError>; fn build_protocol(node: parser::ParseNode) -> AstResult<Protocol> { if let parser::ParseElement::Protocol { name } = node.element { let mut copyright: Option<String> = None; let mut description: Option<Description> = None; let mut interfaces: Vec<Interface> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Copyright => copyright = Some(build_copyright(child)?), parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Interface { .. } => interfaces.push(build_interface(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Protocol { name: name, copyright, description, interfaces, }) } else { Err("Unexpected Element; expected Protocol".to_owned()) } } fn build_copyright(node: parser::ParseNode) -> AstResult<String> { if let Some(copyright) = node.body { Ok(copyright) } else { Err(format!("Unexpected node {:?}", node)) } } fn build_description(node: parser::ParseNode) -> AstResult<Description> { if let parser::ParseElement::Description { summary } = node.element { Ok(Description { summary, description: node.body.unwrap_or("".to_owned()), }) } else { Err("Invalid node".to_owned()) } } fn build_interface(node: parser::ParseNode) -> AstResult<Interface> { if let parser::ParseElement::Interface { name, version } = node.element { let mut description: Option<Description> = None; let mut requests: Vec<Message> = Vec::new(); let mut events: Vec<Message> = Vec::new(); let mut enums: Vec<Enum> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Request { .. } => requests.push(build_request(child)?), parser::ParseElement::Event { .. } => events.push(build_event(child)?), parser::ParseElement::Enum { .. } => enums.push(build_enum(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Interface { name, version, description, requests, events, enums, }) } else { Err("Invalid node".to_owned()) } } fn build_request(node: parser::ParseNode) -> AstResult<Message> { if let parser::ParseElement::Request { name, since, request_type, } = node.element { let mut description: Option<Description> = None; let mut args: Vec<Arg> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Arg { .. } => args.append(&mut build_arg(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Message { name, since, request_type, description, args, }) } else { Err("Invalid node".to_owned()) } } fn build_event(node: parser::ParseNode) -> AstResult<Message> { if let parser::ParseElement::Event { name, since } = node.element { let mut description: Option<Description> = None; let mut args: Vec<Arg> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Arg { .. } => args.append(&mut build_arg(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Message { name, since, description, args, request_type: None, }) } else { Err("Invalid node".to_owned()) } } fn build_arg(node: parser::ParseNode) -> AstResult<Vec<Arg>> { if let parser::ParseElement::Arg { name, kind, summary, interface, nullable, enum_type, } = node.element { let mut description: Option<Description> = None; for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } _ => return Err("Unsupported".to_owned()), } } let arg = Arg { name, kind, summary, interface, nullable, enum_type, description, }; if arg.kind == ArgKind::NewId && arg.interface.is_none() { Ok(vec![ Arg { name: format!("{}_interface_name", arg.name), kind: ArgKind::String, summary: None, interface: None, nullable: false, enum_type: None, description: None, }, Arg { name: format!("{}_interface_version", arg.name), kind: ArgKind::Uint, summary: None, interface: None, nullable: false, enum_type: None, description: None, }, arg, ]) } else { Ok(vec![arg]) } } else { Err("Invalid node".to_owned()) } } fn build_enum(node: parser::ParseNode) -> AstResult<Enum> { if let parser::ParseElement::Enum { name, since, bitfield, } = node.element { let mut description: Option<Description> = None; let mut entries: Vec<EnumEntry> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::EnumEntry { .. } => entries.push(build_enum_entry(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Enum { name, since, bitfield, description, entries, }) } else { Err("Invalid node".to_owned()) } } fn build_enum_entry(node: parser::ParseNode) -> AstResult<EnumEntry> { if let parser::ParseElement::EnumEntry { name, value, summary, since, } = node.element { let mut description: Option<Description> = None; for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } _ => return Err("Unsupported".to_owned()), } } Ok(EnumEntry { name, value, summary, since, description, }) } else { Err("Invalid node".to_owned()) } } impl Protocol { pub fn from_parse_tree(parse_tree: parser::ParseNode) -> AstResult<Protocol> { build_protocol(parse_tree) } }
use crate::parser::{self, ArgKind}; #[derive(Debug)] pub struct Protocol { pub name: String, pub copyright: Option<String>, pub description: Option<Description>, pub interfaces: Vec<Interface>, } #[derive(Debug)] pub struct Description { pub summary: String, pub description: String, } #[derive(Debug)] pub struct Interface { pub name: String, pub version: u32, pub description: Option<Description>, pub requests: Vec<Message>, pub events: Vec<Message>, pub enums: Vec<Enum>, } #[derive(Debug)] pub struct Message { pub name: String, pub since: u32, pub request_type: Option<String>, pub description: Option<Description>, pub args: Vec<Arg>, } #[derive(Debug)] pub struct Arg { pub name: String, pub kind: ArgKind, pub summary: Option<String>, pub interface: Option<String>, pub nullable: bool, pub enum_type: Option<String>, pub description: Option<Description>, } #[derive(Debug)] pub struct Enum { pub name: String, pub since: u32, pub bitfield: bool, pub description: Option<Description>, pub entries: Vec<EnumEntry>, } #[derive(Debug)] pub struct EnumEntry { pub name: String, pub value: i64, pub summary: Option<String>, pub since: u32, pub description: Option<Description>, } pub type AstError = String; pub type AstResult<T> = Result<T, AstError>; fn build_protocol(node: parser::ParseNode) -> AstResult<Protocol> { if let parser::ParseElement::Protocol { name } = node.element { let mut copyright: Option<String> = None; let mut description: Option<Description> = None; let mut interfaces: Vec<Interface> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Copyright => copyright = Some(build_copyright(child)?), parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Interface { .. } => interfaces.push(build_interface(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Protocol { name: name, copyright, description, interfaces, }) } else { Err("Unexpected Element; expected Protocol".to_owned()) } } fn build_copyright(node: parser::ParseNode) -> AstResult<String> { if let Some(copyright) = node.body { Ok(copyright) } else { Err(format!("Unexpected node {:?}", node)) } } fn build_description(node: parser::ParseNode) -> AstResult<Description> { if let parser::ParseElement::Description { summary } = node.element { Ok(Description { summary, description: node.body.unwrap_or("".to_owned()), }) } else { Err("Invalid node".to_owned()) } } fn build_interface(node: parser::ParseNode) -> AstResult<Interface> { if let parser::ParseElement::Interface { name, version } = node.element { let mut description: Option<Description> = None; let mut requests: Vec<Message> = Vec::new(); let mut events: Vec<Message> = Vec::new(); let mut enums: Vec<Enum> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Request { .. } => requests.push(build_request(child)?), parser::ParseElement::Event { .. } => events.push(build_event(child)?), parser::ParseElement::Enum { .. } => enums.push(build_enum(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Interface { name, version, description, requests, events, enums, }) } else { Err("Invalid node".to_owned()) } } fn build_request(node: parser::ParseNode) -> AstResult<Message> { if let parser::ParseElement::Request { name, since, request_type, } = node.element { let mut description: Option<Description> = None; let mut args: Vec<Arg> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Arg { .. } => args.append(&mut build_arg(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Message { name, since, request_type, description, args, }) } else { Err("Invalid node".to_owned()) } } fn build_event(node: parser::ParseNode) -> AstResult<Message> { if let parser::ParseElement::Event { name, since } = node.element { let mut description: Option<Description> = None; let mut args: Vec<Arg> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Arg { .. } => args.append(&mut build_arg(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Message { name, since, description, args, request_type: None, }) } else { Err("Invalid node".to_owned()) } }
fn build_enum(node: parser::ParseNode) -> AstResult<Enum> { if let parser::ParseElement::Enum { name, since, bitfield, } = node.element { let mut description: Option<Description> = None; let mut entries: Vec<EnumEntry> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::EnumEntry { .. } => entries.push(build_enum_entry(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Enum { name, since, bitfield, description, entries, }) } else { Err("Invalid node".to_owned()) } } fn build_enum_entry(node: parser::ParseNode) -> AstResult<EnumEntry> { if let parser::ParseElement::EnumEntry { name, value, summary, since, } = node.element { let mut description: Option<Description> = None; for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } _ => return Err("Unsupported".to_owned()), } } Ok(EnumEntry { name, value, summary, since, description, }) } else { Err("Invalid node".to_owned()) } } impl Protocol { pub fn from_parse_tree(parse_tree: parser::ParseNode) -> AstResult<Protocol> { build_protocol(parse_tree) } }
fn build_arg(node: parser::ParseNode) -> AstResult<Vec<Arg>> { if let parser::ParseElement::Arg { name, kind, summary, interface, nullable, enum_type, } = node.element { let mut description: Option<Description> = None; for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } _ => return Err("Unsupported".to_owned()), } } let arg = Arg { name, kind, summary, interface, nullable, enum_type, description, }; if arg.kind == ArgKind::NewId && arg.interface.is_none() { Ok(vec![ Arg { name: format!("{}_interface_name", arg.name), kind: ArgKind::String, summary: None, interface: None, nullable: false, enum_type: None, description: None, }, Arg { name: format!("{}_interface_version", arg.name), kind: ArgKind::Uint, summary: None, interface: None, nullable: false, enum_type: None, description: None, }, arg, ]) } else { Ok(vec![arg]) } } else { Err("Invalid node".to_owned()) } }
function_block-full_function
[]
Rust
server/src/sonos_endpoint.rs
papertigers/home-api
d9a430425ebfbc730b29683381f96e00317d2875
use crate::AppCtx; use dropshot::{endpoint, ApiDescription, HttpError, HttpResponseOk, RequestContext, TypedBody}; use futures::stream::StreamExt; use futures_util::stream::FuturesUnordered; use schemars::JsonSchema; use serde::Deserialize; use sonor::{rupnp::Device, Speaker}; use std::sync::Arc; use std::time::Duration; #[derive(Deserialize, JsonSchema)] struct SonosArgs { rooms: Vec<String>, volume: Option<u16>, sleep_timer: Option<u16>, } async fn goodnight(speaker: &sonor::Speaker, sleep_timer: Option<u16>) -> Result<(), sonor::Error> { speaker.stop().await?; let _ = speaker.clear_queue().await; speaker .queue_next("file:///jffs/settings/savedqueues.rsq#23", "") .await?; speaker.set_repeat_mode(sonor::RepeatMode::All).await?; speaker.set_shuffle(true).await?; if let Some(t) = sleep_timer.map(|v| v.clamp(0, 2 * 60 * 60)) { speaker.set_sleep_timer(t as u64).await?; } speaker.play().await } async fn group_rooms( rctx: Arc<RequestContext<AppCtx>>, rooms: &[String], volume: Option<u16>, ) -> Result<Option<Speaker>, sonor::Error> { let first = match rooms.first() { Some(c) => c, None => return Ok(None), }; if let Some(coordinator) = sonor::find(first, Duration::from_secs(3)).await? { let find = coordinator .zone_group_state() .await? .into_iter() .flat_map(|(_, v)| v) .filter(|i| rooms[1..].iter().any(|n| n.eq_ignore_ascii_case(i.name()))) .map(|info| { let url = info.location().parse(); async { let device = Device::from_url(url?).await?; Ok(Speaker::from_device(device)) } }) .collect::<FuturesUnordered<_>>() .collect::<Vec<Result<Option<Speaker>, sonor::Error>>>() .await; let speakers: Vec<Speaker> = find.into_iter().filter_map(Result::ok).flatten().collect(); let default_volume = coordinator.volume().await?; let volume = volume.unwrap_or(default_volume); coordinator.leave().await?; coordinator.set_volume(volume).await?; for speaker in speakers { speaker.leave().await?; speaker.set_volume(volume).await?; if let Err(e) = speaker.join(first).await { warn!( rctx.log, "failed to join {} to group: {}", speaker.name().await?, e ) } } info!(rctx.log, "joined rooms: {:?}", rooms); return Ok(Some(coordinator)); }; Ok(None) } #[endpoint { method = POST, path = "/sonos/sleep", }] async fn sleep( rctx: Arc<RequestContext<AppCtx>>, body_param: TypedBody<SonosArgs>, ) -> Result<HttpResponseOk<()>, HttpError> { let body = body_param.into_inner(); let context = Arc::clone(&rctx); if let Some(speaker) = group_rooms(context, &body.rooms, body.volume) .await .map_err(|e| HttpError::for_internal_error(format!("failed sonos request: {}", e)))? { goodnight(&speaker, body.sleep_timer) .await .map_err(|e| HttpError::for_unavail(None, format!("{}", e)))?; } else { return Err(HttpError::for_bad_request( None, format!("verify sonos speakers: [{:?}]", &body.rooms), )); } info!(rctx.log, "sleep mode initiated for: {:?}", &body.rooms); Ok(HttpResponseOk(())) } #[endpoint { method = POST, path = "/sonos/group", }] async fn group( rctx: Arc<RequestContext<AppCtx>>, body_param: TypedBody<SonosArgs>, ) -> Result<HttpResponseOk<()>, HttpError> { let body = body_param.into_inner(); let context = Arc::clone(&rctx); group_rooms(context, &body.rooms, body.volume) .await .map_err(|e| HttpError::for_internal_error(format!("failed sonos request: {}", e)))?; Ok(HttpResponseOk(())) } pub fn mount(api: &mut ApiDescription<AppCtx>) { api.register(sleep).expect("failed to mount sleep"); api.register(group).expect("failed to mount group"); }
use crate::AppCtx; use dropshot::{endpoint, ApiDescription, HttpError, HttpResponseOk, RequestContext, TypedBody}; use futures::stream::StreamExt; use futures_util::stream::FuturesUnordered; use schemars::JsonSchema; use serde::Deserialize; use sonor::{rupnp::Device, Speaker}; use std::sync::Arc; use std::time::Duration; #[derive(Deserialize, JsonSchema)] struct SonosArgs { rooms: Vec<String>, volume: Option<u16>, sleep_timer: Option<u16>, } async fn goodnight(speaker: &sonor::Speaker, sleep_timer: Option<u16>) -> Result<(), sonor::Error> { speaker.stop().await?; let _ = speaker.clear_queue().await; speaker .queue_next("file:///jffs/settings/savedqueues.rsq#23", "") .await?; speaker.set_repeat_mode(sonor::RepeatMode::All).await?; speaker.set_shuffle(true).await?; if let Some(t) = sleep_timer.map(|v| v.clamp(0, 2 * 60 * 60)) { speaker.set_sleep_timer(t as u64).await?; } speaker.play().await }
#[endpoint { method = POST, path = "/sonos/sleep", }] async fn sleep( rctx: Arc<RequestContext<AppCtx>>, body_param: TypedBody<SonosArgs>, ) -> Result<HttpResponseOk<()>, HttpError> { let body = body_param.into_inner(); let context = Arc::clone(&rctx); if let Some(speaker) = group_rooms(context, &body.rooms, body.volume) .await .map_err(|e| HttpError::for_internal_error(format!("failed sonos request: {}", e)))? { goodnight(&speaker, body.sleep_timer) .await .map_err(|e| HttpError::for_unavail(None, format!("{}", e)))?; } else { return Err(HttpError::for_bad_request( None, format!("verify sonos speakers: [{:?}]", &body.rooms), )); } info!(rctx.log, "sleep mode initiated for: {:?}", &body.rooms); Ok(HttpResponseOk(())) } #[endpoint { method = POST, path = "/sonos/group", }] async fn group( rctx: Arc<RequestContext<AppCtx>>, body_param: TypedBody<SonosArgs>, ) -> Result<HttpResponseOk<()>, HttpError> { let body = body_param.into_inner(); let context = Arc::clone(&rctx); group_rooms(context, &body.rooms, body.volume) .await .map_err(|e| HttpError::for_internal_error(format!("failed sonos request: {}", e)))?; Ok(HttpResponseOk(())) } pub fn mount(api: &mut ApiDescription<AppCtx>) { api.register(sleep).expect("failed to mount sleep"); api.register(group).expect("failed to mount group"); }
async fn group_rooms( rctx: Arc<RequestContext<AppCtx>>, rooms: &[String], volume: Option<u16>, ) -> Result<Option<Speaker>, sonor::Error> { let first = match rooms.first() { Some(c) => c, None => return Ok(None), }; if let Some(coordinator) = sonor::find(first, Duration::from_secs(3)).await? { let find = coordinator .zone_group_state() .await? .into_iter() .flat_map(|(_, v)| v) .filter(|i| rooms[1..].iter().any(|n| n.eq_ignore_ascii_case(i.name()))) .map(|info| { let url = info.location().parse(); async { let device = Device::from_url(url?).await?; Ok(Speaker::from_device(device)) } }) .collect::<FuturesUnordered<_>>() .collect::<Vec<Result<Option<Speaker>, sonor::Error>>>() .await; let speakers: Vec<Speaker> = find.into_iter().filter_map(Result::ok).flatten().collect(); let default_volume = coordinator.volume().await?; let volume = volume.unwrap_or(default_volume); coordinator.leave().await?; coordinator.set_volume(volume).await?; for speaker in speakers { speaker.leave().await?; speaker.set_volume(volume).await?; if let Err(e) = speaker.join(first).await { warn!( rctx.log, "failed to join {} to group: {}", speaker.name().await?, e ) } } info!(rctx.log, "joined rooms: {:?}", rooms); return Ok(Some(coordinator)); }; Ok(None) }
function_block-full_function
[ { "content": "type Result<T> = std::result::Result<T, error::AylaError>;\n\n\n\npub(crate) struct AylaClient {\n\n client: Client,\n\n region: Region,\n\n email: String,\n\n password: String,\n\n access_token: Option<String>,\n\n refresh_token: Option<String>,\n\n auth_expiration: Option<String>,\n\n is_authenticated: bool,\n\n}\n\n\n\npub(crate) enum RequestType {\n\n Device,\n\n User,\n\n}\n\n\n\nimpl AylaClient {\n\n pub(crate) fn new(region: Region, email: String, password: String) -> Self {\n", "file_path": "shark/src/ayla.rs", "rank": 0, "score": 34022.93737784387 }, { "content": "#[derive(Deserialize)]\n\nstruct GetDevicesResponse {\n\n device: SharkDevice,\n\n}\n\n\n\n#[derive(Serialize_repr)]\n\n#[repr(u8)]\n\npub enum OperatingMode {\n\n Stop = 0,\n\n Pause = 1,\n\n Start = 2,\n\n Return = 3,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema)]\n\npub struct SharkDevice {\n\n pub dsn: String,\n\n pub model: String,\n\n pub oem_model: String,\n\n pub mac: String,\n\n pub product_name: String,\n", "file_path": "shark/src/lib.rs", "rank": 2, "score": 28954.663937822108 }, { "content": "#[derive(Deserialize, JsonSchema)]\n\nstruct ActionPathParam {\n\n dsn: String,\n\n}\n\n\n\n#[endpoint {\n\n method = GET,\n\n path = \"/shark/devices\",\n\n}]\n\nasync fn get_devices(\n\n rctx: Arc<RequestContext<AppCtx>>,\n\n) -> Result<HttpResponseOk<Vec<SharkDevice>>, HttpError> {\n\n let app = rctx.context();\n\n let shark = app.shark.read().await;\n\n match shark.get_devices().await {\n\n Ok(devices) => Ok(HttpResponseOk(devices)),\n\n Err(e) => Err(HttpError::for_internal_error(format!(\n\n \"shark api error: {}\",\n\n e\n\n ))),\n\n }\n", "file_path": "server/src/shark_endpoint.rs", "rank": 3, "score": 28114.060208215036 }, { "content": "pub fn mount(api: &mut ApiDescription<AppCtx>) {\n\n api.register(get_devices)\n\n .expect(\"failed to register get_devices\");\n\n api.register(start).expect(\"failed to register start\");\n\n api.register(stop).expect(\"failed to register stop\");\n\n api.register(r#return).expect(\"failed to register return\");\n\n}\n", "file_path": "server/src/shark_endpoint.rs", "rank": 4, "score": 18466.662498651593 }, { "content": "\n\n /// Refresh API token (expires after 24h)\n\n pub async fn refresh_token(&mut self) -> Result<()> {\n\n self.ayla.refresh_token().await?;\n\n Ok(())\n\n }\n\n\n\n /// Sign out of the Shark API\n\n pub async fn sign_out(self) -> Result<()> {\n\n self.ayla.sign_out().await?;\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Check for a successful Shark API response or return a SharkError\n\nasync fn get_api_response<T>(r: Response) -> Result<T>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n match r.status() {\n\n StatusCode::OK => Ok(r.json::<T>().await?),\n\n sc => Err(error::SharkError::ApiError(sc, r.text().await.unwrap())),\n\n }\n\n}\n", "file_path": "shark/src/lib.rs", "rank": 11, "score": 6.038901467884578 }, { "content": "use crate::ayla::RequestType;\n\nuse crate::region::Region;\n\nuse reqwest::{Method, Response, StatusCode};\n\nuse schemars::JsonSchema;\n\nuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\nuse serde_json::json;\n\nuse serde_repr::*;\n\n\n\nmod ayla;\n\npub mod error;\n\nmod models;\n\npub mod region;\n\npub use error::SharkError;\n\n\n\npub type Result<T> = std::result::Result<T, error::SharkError>;\n\n\n\n#[derive(Deserialize)]\n", "file_path": "shark/src/lib.rs", "rank": 12, "score": 4.894619860021665 }, { "content": " {\n\n SharkClientBuilder::new(email.into(), password.into())\n\n }\n\n\n\n async fn from_creds(region: Region, email: String, password: String) -> Result<Self> {\n\n let ayla = ayla::AylaClient::new(region, email, password);\n\n\n\n let mut sharkvac = Self { ayla };\n\n sharkvac.ayla.sign_in().await?;\n\n\n\n Ok(sharkvac)\n\n }\n\n\n\n pub async fn get_devices(&self) -> Result<Vec<SharkDevice>> {\n\n let req = self.ayla.request(\n\n RequestType::Device,\n\n Method::GET,\n\n \"/apiv1/devices\",\n\n None::<()>,\n\n )?;\n", "file_path": "shark/src/lib.rs", "rank": 13, "score": 4.711678867659482 }, { "content": "\n\n let res = self.ayla.execute(req).await?;\n\n Ok(get_api_response::<Vec<GetDevicesResponse>>(res)\n\n .await?\n\n .into_iter()\n\n .map(|v| v.device)\n\n .collect())\n\n }\n\n\n\n pub async fn get_device_properties(&self, dsn: &str) -> Result<()> {\n\n let req = self.ayla.request(\n\n RequestType::Device,\n\n Method::GET,\n\n format!(\"/apiv1/dsns/{}/properties\", dsn),\n\n None::<()>,\n\n )?;\n\n\n\n let res = self.ayla.execute(req).await?;\n\n let properties = get_api_response::<serde_json::Value>(res).await?;\n\n println!(\"{:#?}\", properties);\n", "file_path": "shark/src/lib.rs", "rank": 14, "score": 4.509340421570146 }, { "content": " sc => {\n\n return Err(error::AylaError::RefreshTokenError(\n\n sc,\n\n res.text().await.unwrap(),\n\n ));\n\n }\n\n };\n\n\n\n Ok(())\n\n }\n\n\n\n pub(crate) async fn sign_out(self) -> Result<()> {\n\n if self.is_authenticated {\n\n let body = json!({\"user\": {\"access_token\": self.access_token }});\n\n let req = self.request(\n\n RequestType::User,\n\n Method::POST,\n\n \"/users/sign_out\",\n\n Some(body),\n\n )?;\n", "file_path": "shark/src/ayla.rs", "rank": 16, "score": 4.264590840166347 }, { "content": " let res = self.client.execute(request).await?;\n\n Ok(res)\n\n }\n\n\n\n pub(crate) async fn sign_in(&mut self) -> Result<()> {\n\n if !self.is_authenticated {\n\n let body = json!({\n\n \"user\": {\n\n \"email\": self.email,\n\n \"password\": self.password,\n\n \"application\": {\n\n \"app_id\": self.region.app_id(),\n\n \"app_secret\": self.region.app_secret(),\n\n }\n\n }\n\n });\n\n\n\n let req = self.request(\n\n RequestType::User,\n\n Method::POST,\n", "file_path": "shark/src/ayla.rs", "rank": 17, "score": 4.168668219193729 }, { "content": " }\n\n\n\n pub(crate) async fn refresh_token(&mut self) -> Result<()> {\n\n let body = json!({\"user\": {\"refresh_token\": self.refresh_token }});\n\n let req = self.request(\n\n RequestType::User,\n\n Method::POST,\n\n \"/users/refresh_token\",\n\n Some(body),\n\n )?;\n\n\n\n let res = self.client.execute(req).await?;\n\n match res.status() {\n\n StatusCode::OK => {\n\n let alr: AylaLoginResponse = res.json().await?;\n\n self.access_token = Some(alr.access_token);\n\n self.refresh_token = Some(alr.refresh_token);\n\n self.auth_expiration = None;\n\n self.is_authenticated = true;\n\n }\n", "file_path": "shark/src/ayla.rs", "rank": 18, "score": 3.9649774241581666 }, { "content": " }\n\n}\n\n\n\n#[endpoint {\n\n method = PUT,\n\n path = \"/shark/devices/{dsn}/stop\",\n\n}]\n\nasync fn stop(\n\n rctx: Arc<RequestContext<AppCtx>>,\n\n path_params: Path<ActionPathParam>,\n\n) -> Result<HttpResponseAccepted<()>, HttpError> {\n\n let app = rctx.context();\n\n let shark = app.shark.read().await;\n\n let dsn = path_params.into_inner().dsn;\n\n\n\n match shark\n\n .set_device_operating_mode(&dsn, shark::OperatingMode::Stop)\n\n .await\n\n {\n\n Ok(_) => Ok(HttpResponseAccepted(())),\n", "file_path": "server/src/shark_endpoint.rs", "rank": 19, "score": 3.9649774241581666 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n pub async fn set_device_operating_mode(&self, dsn: &str, mode: OperatingMode) -> Result<()> {\n\n let body = json!({ \"datapoint\": { \"value\": mode }});\n\n let req = self.ayla.request(\n\n RequestType::Device,\n\n Method::POST,\n\n format!(\n\n \"/apiv1/dsns/{}/properties/SET_Operating_Mode/datapoints\",\n\n dsn\n\n ),\n\n Some(body),\n\n )?;\n\n\n\n let res = self.ayla.execute(req).await?;\n\n dbg!(res);\n\n Ok(())\n\n }\n", "file_path": "shark/src/lib.rs", "rank": 20, "score": 3.9467353241257745 }, { "content": "}\n\n\n\n#[endpoint {\n\n method = PUT,\n\n path = \"/shark/devices/{dsn}/start\",\n\n}]\n\nasync fn start(\n\n rctx: Arc<RequestContext<AppCtx>>,\n\n path_params: Path<ActionPathParam>,\n\n) -> Result<HttpResponseAccepted<()>, HttpError> {\n\n let app = rctx.context();\n\n let shark = app.shark.read().await;\n\n let dsn = path_params.into_inner().dsn;\n\n\n\n match shark\n\n .set_device_operating_mode(&dsn, shark::OperatingMode::Start)\n\n .await\n\n {\n\n Ok(_) => Ok(HttpResponseAccepted(())),\n\n Err(e) => Err(HttpError::for_internal_error(e.to_string())),\n", "file_path": "server/src/shark_endpoint.rs", "rank": 21, "score": 3.7485720898807218 }, { "content": " Err(e) => Err(HttpError::for_internal_error(e.to_string())),\n\n }\n\n}\n\n\n\n#[endpoint {\n\n method = PUT,\n\n path = \"/shark/devices/{dsn}/return\",\n\n}]\n\nasync fn r#return(\n\n rctx: Arc<RequestContext<AppCtx>>,\n\n path_params: Path<ActionPathParam>,\n\n) -> Result<HttpResponseAccepted<()>, HttpError> {\n\n let app = rctx.context();\n\n let shark = app.shark.read().await;\n\n let dsn = path_params.into_inner().dsn;\n\n\n\n match shark\n\n .set_device_operating_mode(&dsn, shark::OperatingMode::Return)\n\n .await\n\n {\n\n Ok(_) => Ok(HttpResponseAccepted(())),\n\n Err(e) => Err(HttpError::for_internal_error(e.to_string())),\n\n }\n\n}\n\n\n", "file_path": "server/src/shark_endpoint.rs", "rank": 22, "score": 3.6081619767142965 }, { "content": " pub fn region(mut self, region: Region) -> Self {\n\n self.region = region;\n\n self\n\n }\n\n\n\n /// Given the provided credentials attempt to login to the Shark API service\n\n pub async fn build(self) -> Result<SharkClient> {\n\n SharkClient::from_creds(self.region, self.email, self.password).await\n\n }\n\n}\n\n\n\npub struct SharkClient {\n\n ayla: ayla::AylaClient,\n\n}\n\n\n\nimpl SharkClient {\n\n pub fn builder<E, P>(email: E, password: P) -> SharkClientBuilder\n\n where\n\n E: Into<String>,\n\n P: Into<String>,\n", "file_path": "shark/src/lib.rs", "rank": 23, "score": 3.5669395785427547 }, { "content": "use serde::Deserialize;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::path::Path;\n\n\n\n#[derive(Deserialize)]\n\npub struct SharkAuth {\n\n pub user: String,\n\n pub password: String,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct Config {\n\n pub user_auth: Vec<String>,\n\n pub shark: SharkAuth,\n\n}\n\n\n\nimpl Config {\n\n pub fn from_file<P: AsRef<Path>>(path: P) -> std::io::Result<Self> {\n\n let path = path.as_ref();\n\n let mut f = File::open(path)?;\n\n let mut buf: Vec<u8> = Vec::new();\n\n f.read_to_end(&mut buf)?;\n\n let config: Self = toml::from_slice(&buf)?;\n\n\n\n Ok(config)\n\n }\n\n}\n", "file_path": "server/src/config.rs", "rank": 24, "score": 3.26569503403877 }, { "content": "use crate::models::AylaLoginResponse;\n\nuse crate::{error, Region};\n\nuse reqwest::{header, Client, Method, Request, Response, StatusCode, Url};\n\nuse serde::Serialize;\n\nuse serde_json::json;\n\n\n", "file_path": "shark/src/ayla.rs", "rank": 25, "score": 2.9860522523645416 }, { "content": "use anyhow::anyhow;\n\nuse dropshot::{\n\n ApiDescription, ConfigDropshot, ConfigLogging, ConfigLoggingLevel, HttpError, HttpServerStarter,\n\n};\n\nuse hyper::{Body, Request, StatusCode};\n\nuse illumos_priv::{PrivOp, PrivPtype, PrivSet, Privilege};\n\nuse shark::SharkClient;\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n\nuse tokio::sync::RwLock;\n\nuse tokio::time;\n\n\n\n#[macro_use]\n\nextern crate slog;\n\n\n\nmod config;\n\nmod shark_endpoint;\n\nmod sonos_endpoint;\n\n\n\npub struct Auth;\n\n\n", "file_path": "server/src/main.rs", "rank": 26, "score": 2.957310247981478 }, { "content": "use crate::AppCtx;\n\nuse dropshot::{\n\n endpoint, ApiDescription, HttpError, HttpResponseAccepted, HttpResponseOk, Path, RequestContext,\n\n};\n\nuse schemars::JsonSchema;\n\nuse serde::Deserialize;\n\nuse shark::SharkDevice;\n\nuse std::sync::Arc;\n\n\n\n#[derive(Deserialize, JsonSchema)]\n", "file_path": "server/src/shark_endpoint.rs", "rank": 27, "score": 2.9403842843136427 }, { "content": "\n\n if self.is_authenticated {\n\n if let Some(token) = &self.access_token {\n\n let mut headers = header::HeaderMap::new();\n\n headers.append(\n\n header::AUTHORIZATION,\n\n header::HeaderValue::from_str(token).unwrap(),\n\n );\n\n rb = rb.headers(headers);\n\n }\n\n }\n\n\n\n if let Some(b) = body {\n\n rb = rb.json(&b);\n\n }\n\n\n\n Ok(rb.build()?)\n\n }\n\n\n\n pub(crate) async fn execute(&self, request: Request) -> Result<Response> {\n", "file_path": "shark/src/ayla.rs", "rank": 28, "score": 2.814204189212468 }, { "content": " appctx,\n\n &log,\n\n )\n\n .map_err(|error| anyhow!(\"failed to start server: {}\", error))?;\n\n\n\n let mut pset = PrivSet::new_basic().unwrap();\n\n pset.delset(Privilege::ProcFork).unwrap();\n\n pset.delset(Privilege::ProcExec).unwrap();\n\n pset.delset(Privilege::ProcInfo).unwrap();\n\n pset.delset(Privilege::ProcSession).unwrap();\n\n illumos_priv::setppriv(PrivOp::Set, PrivPtype::Permitted, &pset).unwrap();\n\n illumos_priv::setppriv(PrivOp::Set, PrivPtype::Limit, &pset).unwrap();\n\n\n\n tokio::task::spawn(async move {\n\n let mut interval = time::interval(Duration::from_secs(60 * 60 * 12));\n\n interval.tick().await;\n\n\n\n loop {\n\n interval.tick().await;\n\n let mut shark = app.shark.write().await;\n", "file_path": "server/src/main.rs", "rank": 29, "score": 2.615212618955754 }, { "content": " let res = self.client.execute(req).await?;\n\n match res.status() {\n\n StatusCode::OK => (),\n\n sc => {\n\n return Err(error::AylaError::LogoutError(sc, res.text().await.unwrap()));\n\n }\n\n };\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "shark/src/ayla.rs", "rank": 30, "score": 2.144594015353848 }, { "content": " \"/users/sign_in\",\n\n Some(body),\n\n )?;\n\n\n\n let res = self.client.execute(req).await?;\n\n match res.status() {\n\n StatusCode::OK => {\n\n let alr: AylaLoginResponse = res.json().await?;\n\n self.access_token = Some(alr.access_token);\n\n self.refresh_token = Some(alr.refresh_token);\n\n self.auth_expiration = None;\n\n self.is_authenticated = true;\n\n }\n\n sc => {\n\n return Err(error::AylaError::LoginError(sc, res.text().await.unwrap()));\n\n }\n\n };\n\n }\n\n\n\n Ok(())\n", "file_path": "shark/src/ayla.rs", "rank": 31, "score": 2.0562254262004314 }, { "content": " match shark.refresh_token().await {\n\n Ok(_) => info!(&log, \"refreshed shark access_token\"),\n\n Err(e) => error!(&log, \"error refreshing shark token: {}\", e),\n\n }\n\n }\n\n });\n\n\n\n let server_task = server.start();\n\n server_task.await.map_err(|e| anyhow!(\"{}\", e))\n\n}\n", "file_path": "server/src/main.rs", "rank": 32, "score": 2.0531909753390973 }, { "content": "use serde::Deserialize;\n\n\n\n#[derive(Deserialize)]\n\npub(crate) struct AylaLoginResponse {\n\n pub access_token: String,\n\n pub refresh_token: String,\n\n}\n", "file_path": "shark/src/models.rs", "rank": 33, "score": 2.0269119104906688 }, { "content": "use reqwest::StatusCode;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum AylaError {\n\n #[error(\"Failed to login: status code {0} -- {1}\")]\n\n LoginError(StatusCode, String),\n\n #[error(\"Failed to refresh token: status code {0} -- {1}\")]\n\n RefreshTokenError(StatusCode, String),\n\n #[error(\"Failed to logout: status code {0} -- {1}\")]\n\n LogoutError(StatusCode, String),\n\n #[error(\"reqwest error\")]\n\n ReqwestError(#[from] reqwest::Error),\n\n}\n\n\n\n#[derive(Error, Debug)]\n\npub enum SharkError {\n\n #[error(\"Ayla Client error: {0}\")]\n\n AylaError(#[from] AylaError),\n\n #[error(\"Shark API error: status code {0} -- {1}\")]\n\n ApiError(StatusCode, String),\n\n #[error(\"reqwest error: {0}\")]\n\n ReqwestError(#[from] reqwest::Error),\n\n}\n", "file_path": "shark/src/error.rs", "rank": 34, "score": 1.5977972184667466 }, { "content": " }\n\n\n\n pub(crate) fn request<P, B>(\n\n &self,\n\n req_type: RequestType,\n\n method: Method,\n\n path: P,\n\n body: Option<B>,\n\n ) -> Result<Request>\n\n where\n\n P: AsRef<str>,\n\n B: Serialize,\n\n {\n\n let base = match req_type {\n\n RequestType::Device => Url::parse(self.region.device_url()).unwrap(),\n\n RequestType::User => Url::parse(self.region.user_url()).unwrap(),\n\n };\n\n\n\n let url = base.join(path.as_ref()).unwrap();\n\n let mut rb = self.client.request(method, url);\n", "file_path": "shark/src/ayla.rs", "rank": 35, "score": 1.1563846574814995 }, { "content": " let program = &args[0].clone();\n\n let brief = format!(\"Usage: {} [options] -c CONFIG\", program);\n\n\n\n let mut opts = getopts::Options::new();\n\n opts.reqopt(\"c\", \"\", \"config file\", \"CONFIG\");\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(e) => return Err(anyhow!(\"{}\\n{}\", e, opts.usage(&brief))),\n\n };\n\n\n\n let config = config::Config::from_file(matches.opt_str(\"c\").unwrap())\n\n .map_err(|e| anyhow!(\"Failed to parse config file: {}\", e))?;\n\n\n\n let shark = SharkClient::builder(&config.shark.user, &config.shark.password)\n\n .build()\n\n .await\n\n .map_err(|e| anyhow!(\"failed to create shark client: {}\", e))?;\n\n\n\n let app = Arc::new(App {\n\n shark: RwLock::new(shark),\n", "file_path": "server/src/main.rs", "rank": 36, "score": 0.97190052831106 } ]
Rust
crates/interledger-settlement/src/core/engines_api.rs
KevinWMatthews/interledger-rs
ec3c745e1af51b9aa83ac29b64111d2e9cbb3fbc
use super::{ get_hash_of, idempotency::{make_idempotent_call, IdempotentStore}, types::{Quantity, SettlementEngine}, }; use bytes::Bytes; use http::StatusCode; use hyper::Response; use interledger_errors::default_rejection_handler; use serde::{Deserialize, Serialize}; use warp::Filter; #[derive(Serialize, Deserialize, Debug, Clone, Hash)] pub struct CreateAccount { id: String, } async fn create_engine_account<E, S>( idempotency_key: Option<String>, account_id: CreateAccount, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input_hash = get_hash_of(account_id.id.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.create_account(account_id.id), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("CREATED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn delete_engine_account<E, S>( account_id: String, idempotency_key: Option<String>, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input_hash = get_hash_of(account_id.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.delete_account(account_id), input_hash, idempotency_key, StatusCode::NO_CONTENT, Bytes::from("DELETED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn engine_send_money<E, S>( id: String, idempotency_key: Option<String>, quantity: Quantity, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input = format!("{}{:?}", id, quantity); let input_hash = get_hash_of(input.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.send_money(id, quantity), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("EXECUTED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn engine_receive_message<E, S>( id: String, idempotency_key: Option<String>, message: Bytes, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input = format!("{}{:?}", id, message); let input_hash = get_hash_of(input.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.receive_message(id, message.to_vec()), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("RECEIVED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } pub fn create_settlement_engine_filter<E, S>( engine: E, store: S, ) -> warp::filters::BoxedFilter<(impl warp::Reply,)> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let with_store = warp::any().map(move || store.clone()); let with_engine = warp::any().map(move || engine.clone()); let idempotency = warp::header::optional::<String>("idempotency-key"); let account_id = warp::path("accounts").and(warp::path::param::<String>()); let accounts = warp::post() .and(warp::path("accounts")) .and(warp::path::end()) .and(idempotency) .and(warp::body::json()) .and(with_engine.clone()) .and(with_store.clone()) .and_then(create_engine_account); let del_account = warp::delete() .and(account_id) .and(warp::path::end()) .and(idempotency) .and(with_engine.clone()) .and(with_store.clone()) .and_then(delete_engine_account); let settlement_endpoint = account_id.and(warp::path("settlements")); let settlements = warp::post() .and(settlement_endpoint) .and(warp::path::end()) .and(idempotency) .and(warp::body::json()) .and(with_engine.clone()) .and(with_store.clone()) .and_then(engine_send_money); let messages_endpoint = account_id.and(warp::path("messages")); let messages = warp::post() .and(messages_endpoint) .and(warp::path::end()) .and(idempotency) .and(warp::body::bytes()) .and(with_engine) .and(with_store) .and_then(engine_receive_message); accounts .or(del_account) .or(settlements) .or(messages) .recover(default_rejection_handler) .boxed() } #[cfg(test)] mod tests { use super::*; use crate::core::idempotency::IdempotentData; use crate::core::types::{ApiResponse, ApiResult}; use async_trait::async_trait; use bytes::Bytes; use http::StatusCode; use interledger_errors::*; use parking_lot::RwLock; use serde_json::{json, Value}; use std::collections::HashMap; use std::sync::Arc; fn check_error_status_and_message(response: Response<Bytes>, status_code: u16, message: &str) { let err: Value = serde_json::from_slice(response.body()).unwrap(); assert_eq!(response.status().as_u16(), status_code); assert_eq!(err.get("status").unwrap(), status_code); assert_eq!(err.get("detail").unwrap(), message); } #[derive(Clone)] struct TestEngine; #[derive(Debug, Clone)] pub struct TestAccount; #[derive(Clone)] pub struct TestStore { #[allow(clippy::all)] pub cache: Arc<RwLock<HashMap<String, IdempotentData>>>, pub cache_hits: Arc<RwLock<u64>>, } fn test_store() -> TestStore { TestStore { cache: Arc::new(RwLock::new(HashMap::new())), cache_hits: Arc::new(RwLock::new(0)), } } #[async_trait] impl IdempotentStore for TestStore { async fn load_idempotent_data( &self, idempotency_key: String, ) -> Result<Option<IdempotentData>, IdempotentStoreError> { let cache = self.cache.read(); if let Some(data) = cache.get(&idempotency_key) { let mut guard = self.cache_hits.write(); *guard += 1; Ok(Some(data.clone())) } else { Ok(None) } } async fn save_idempotent_data( &self, idempotency_key: String, input_hash: [u8; 32], status_code: StatusCode, data: Bytes, ) -> Result<(), IdempotentStoreError> { let mut cache = self.cache.write(); cache.insert( idempotency_key, IdempotentData::new(status_code, data, input_hash), ); Ok(()) } } pub static IDEMPOTENCY: &str = "abcd01234"; #[async_trait] impl SettlementEngine for TestEngine { async fn send_money(&self, _account_id: String, _money: Quantity) -> ApiResult { Ok(ApiResponse::Default) } async fn receive_message(&self, _account_id: String, _message: Vec<u8>) -> ApiResult { Ok(ApiResponse::Default) } async fn create_account(&self, _account_id: String) -> ApiResult { Ok(ApiResponse::Default) } async fn delete_account(&self, _account_id: String) -> ApiResult { Ok(ApiResponse::Default) } } #[tokio::test] async fn idempotent_execute_settlement() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let settlement_call = |id, amount, scale| { warp::test::request() .method("POST") .path(&format!("/accounts/{}/settlements", id)) .body(json!(Quantity::new(amount, scale)).to_string()) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = settlement_call("1".to_owned(), 100, 6).await; assert_eq!(ret.status(), StatusCode::CREATED); assert_eq!(ret.body(), "EXECUTED"); let ret = settlement_call("1".to_owned(), 100, 6).await; assert_eq!(ret.status(), StatusCode::CREATED); assert_eq!(ret.body(), "EXECUTED"); let ret = settlement_call("42".to_owned(), 100, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = settlement_call("1".to_owned(), 42, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = settlement_call("42".to_owned(), 42, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 4); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "EXECUTED".to_string()); } #[tokio::test] async fn idempotent_receive_message() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let messages_call = |id, msg| { warp::test::request() .method("POST") .path(&format!("/accounts/{}/messages", id)) .body(msg) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = messages_call("1", vec![0]).await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "RECEIVED"); let ret = messages_call("1", vec![0]).await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "RECEIVED"); let ret = messages_call("42", vec![0]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = messages_call("1", vec![42]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = messages_call("42", vec![42]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 4); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "RECEIVED".to_string()); } #[tokio::test] async fn idempotent_create_account() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let create_account_call = |id: &str| { warp::test::request() .method("POST") .path("/accounts") .body(json!(CreateAccount { id: id.to_string() }).to_string()) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = create_account_call("1").await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "CREATED"); let ret = create_account_call("1").await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "CREATED"); let ret = create_account_call("42").await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 2); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "CREATED".to_string()); } #[tokio::test] async fn idempotent_delete_account() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let delete_account_call = |id: &str| { warp::test::request() .method("DELETE") .path(&format!("/accounts/{}", id)) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = delete_account_call("1").await; assert_eq!(ret.status(), StatusCode::NO_CONTENT); assert_eq!(ret.body(), "DELETED"); let ret = delete_account_call("1").await; assert_eq!(ret.status(), StatusCode::NO_CONTENT); assert_eq!(ret.body(), "DELETED"); let ret = delete_account_call("42").await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 2); assert_eq!(cached_data.status, 204); assert_eq!(cached_data.body, "DELETED".to_string()); } }
use super::{ get_hash_of, idempotency::{make_idempotent_call, IdempotentStore}, types::{Quantity, SettlementEngine}, }; use bytes::Bytes; use http::StatusCode; use hyper::Response; use interledger_errors::default_rejection_handler; use serde::{Deserialize, Serialize}; use warp::Filter; #[derive(Serialize, Deserialize, Debug, Clone, Hash)] pub struct CreateAccount { id: String, } async fn create_engine_account<E, S>( idempotency_key: Option<String>, account_id: CreateAccount, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input_hash = get_hash_of(account_id.id.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.create_account(account_id.id), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("CREATED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn delete_engine_account<E, S>( account_id: String, idempotency_key: Option<String>, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input_hash = get_hash_of(account_id.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.delete_account(account_id), input_hash, idempotency_key, StatusCode::NO_CONTENT, Bytes::from("DELETED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn engine_send_money<E, S>( id: String, idempotency_key: Option<String>, quantity: Quantity, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input = format!("{}{:?}", id, quantity); let input_hash = get_hash_of(input.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.send_money(id, quantity), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("EXECUTED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn engine_receive_message<E, S>( id: String, idempotency_key: Option<String>, message: Bytes, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input = format!("{}{:?}", id, message); let input_hash = get_hash_of(input.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.receive_message(id, message.to_vec()), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("RECEIVED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } pub fn create_settlement_engine_filter<E, S>( engine: E, store: S, ) -> warp::filters::BoxedFilter<(impl warp::Reply,)> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let with_store = warp::any().map(move || store.clone()); let with_engine = warp::any().map(move || engine.clone()); let idempotency = warp::header::optional::<String>("idempotency-key"); let account_id = warp::path("accounts").and(warp::path::param::<String>()); let accounts = warp::post() .and(warp::path("accounts")) .and(warp::path::end()) .and(idempotency) .and(warp::body::json()) .and(with_engine.clone()) .and(with_store.clone()) .and_then(create_engine_account); let del_account = warp::delete() .and(account_id) .and(warp::path::end()) .and(idempotency) .and(with_engine.clone()) .and(with_store.clone()) .and_then(delete_engine_account); let settlement_endpoint = account_id.and(warp::path("settlements")); let settlements = warp::post() .and(settlement_endpoint) .and(warp::path::end()) .and(idempotency) .and(warp::body::json()) .and(with_engine.clone()) .and(with_store.clone()) .and_then(engine_send_money); let messages_endpoint = account_id.and(warp::path("messages")); let messages = warp::post() .and(messages_endpoint) .and(warp::path::end()) .and(idempotency) .and(warp::body::bytes()) .and(with_engine) .and(with_store) .and_then(engine_receive_message); accounts .or(del_account) .or(settlements) .or(messages) .recover(default_rejection_handler) .boxed() } #[cfg(test)] mod tests { use super::*; use crate::core::idempotency::IdempotentData; use crate::core::types::{ApiResponse, ApiResult}; use async_trait::async_trait; use bytes::Bytes; use http::StatusCode; use interledger_errors::*; use parking_lot::RwLock; use serde_json::{json, Value}; use std::collections::HashMap; use std::sync::Arc; fn check_error_status_and_message(response: Response<Bytes>, status_code: u16, message: &str) { let err: Value = serde_json::from_slice(response.body()).unwrap(); assert_eq!(response.status().as_u16(), status_code); assert_eq!(err.get("status").unwrap(), status_code); assert_eq!(err.get("detail").unwrap(), message); } #[derive(Clone)] struct TestEngine; #[derive(Debug, Clone)] pub struct TestAccount; #[derive(Clone)] pub struct TestStore { #[allow(clippy::all)] pub cache: Arc<RwLock<HashMap<String, IdempotentData>>>, pub cache_hits: Arc<RwLock<u64>>, } fn test_store() -> TestStore { TestStore { cache: Arc::new(RwLock::new(HashMap::new())), cache_hits: Arc::new(RwLock::new(0)), } } #[async_trait] impl IdempotentStore for TestStore { async fn load_idempotent_data( &self, idempotency_key: String, ) -> Result<Option<IdempotentData>, IdempotentStoreError> { let cache = self.cache.read(); if let Some(data) = cache.get(&idempotency_key) { let mut guard = self.cache_hits.write(); *guard += 1; Ok(Some(data.clone())) } else { Ok(None) } }
tEngine { async fn send_money(&self, _account_id: String, _money: Quantity) -> ApiResult { Ok(ApiResponse::Default) } async fn receive_message(&self, _account_id: String, _message: Vec<u8>) -> ApiResult { Ok(ApiResponse::Default) } async fn create_account(&self, _account_id: String) -> ApiResult { Ok(ApiResponse::Default) } async fn delete_account(&self, _account_id: String) -> ApiResult { Ok(ApiResponse::Default) } } #[tokio::test] async fn idempotent_execute_settlement() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let settlement_call = |id, amount, scale| { warp::test::request() .method("POST") .path(&format!("/accounts/{}/settlements", id)) .body(json!(Quantity::new(amount, scale)).to_string()) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = settlement_call("1".to_owned(), 100, 6).await; assert_eq!(ret.status(), StatusCode::CREATED); assert_eq!(ret.body(), "EXECUTED"); let ret = settlement_call("1".to_owned(), 100, 6).await; assert_eq!(ret.status(), StatusCode::CREATED); assert_eq!(ret.body(), "EXECUTED"); let ret = settlement_call("42".to_owned(), 100, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = settlement_call("1".to_owned(), 42, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = settlement_call("42".to_owned(), 42, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 4); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "EXECUTED".to_string()); } #[tokio::test] async fn idempotent_receive_message() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let messages_call = |id, msg| { warp::test::request() .method("POST") .path(&format!("/accounts/{}/messages", id)) .body(msg) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = messages_call("1", vec![0]).await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "RECEIVED"); let ret = messages_call("1", vec![0]).await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "RECEIVED"); let ret = messages_call("42", vec![0]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = messages_call("1", vec![42]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = messages_call("42", vec![42]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 4); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "RECEIVED".to_string()); } #[tokio::test] async fn idempotent_create_account() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let create_account_call = |id: &str| { warp::test::request() .method("POST") .path("/accounts") .body(json!(CreateAccount { id: id.to_string() }).to_string()) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = create_account_call("1").await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "CREATED"); let ret = create_account_call("1").await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "CREATED"); let ret = create_account_call("42").await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 2); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "CREATED".to_string()); } #[tokio::test] async fn idempotent_delete_account() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let delete_account_call = |id: &str| { warp::test::request() .method("DELETE") .path(&format!("/accounts/{}", id)) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = delete_account_call("1").await; assert_eq!(ret.status(), StatusCode::NO_CONTENT); assert_eq!(ret.body(), "DELETED"); let ret = delete_account_call("1").await; assert_eq!(ret.status(), StatusCode::NO_CONTENT); assert_eq!(ret.body(), "DELETED"); let ret = delete_account_call("42").await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 2); assert_eq!(cached_data.status, 204); assert_eq!(cached_data.body, "DELETED".to_string()); } }
async fn save_idempotent_data( &self, idempotency_key: String, input_hash: [u8; 32], status_code: StatusCode, data: Bytes, ) -> Result<(), IdempotentStoreError> { let mut cache = self.cache.write(); cache.insert( idempotency_key, IdempotentData::new(status_code, data, input_hash), ); Ok(()) } } pub static IDEMPOTENCY: &str = "abcd01234"; #[async_trait] impl SettlementEngine for Tes
random
[ { "content": "/// A trait for Store implmentations that have ILP routing tables.\n\npub trait RouterStore: AccountStore + Clone + Send + Sync + 'static {\n\n /// **Synchronously** return the routing table.\n\n /// Note that this is synchronous because it assumes that Stores should\n\n /// keep the routing table in memory and use PubSub or polling to keep it updated.\n\n /// This ensures that individual packets can be routed without hitting the underlying store.\n\n /// An Arc is returned to avoid copying the underlying data while processing each packet.\n\n fn routing_table(&self) -> Arc<HashMap<String, Uuid>>;\n\n}\n", "file_path": "crates/interledger-router/src/lib.rs", "rank": 0, "score": 430403.744785307 }, { "content": "/// Domain separator for leftover amounts\n\nfn uncredited_amount_key(account_id: impl ToString) -> String {\n\n format!(\"uncredited-amount:{}\", account_id.to_string())\n\n}\n\n\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 1, "score": 414176.87810887175 }, { "content": "/// Helper function to get a redis key\n\nfn uncredited_amount_key(account_id: &str) -> String {\n\n format!(\"{}:{}\", UNCREDITED_AMOUNT_KEY, account_id)\n\n}\n\n\n\n/// Builder object to create a Redis connection for the engine\n\npub struct EngineRedisStoreBuilder {\n\n redis_url: ConnectionInfo,\n\n}\n\n\n\nimpl EngineRedisStoreBuilder {\n\n /// Simple constructor\n\n pub fn new(redis_url: ConnectionInfo) -> Self {\n\n EngineRedisStoreBuilder { redis_url }\n\n }\n\n\n\n /// Connects to the provided redis_url and returns a Redis connection for the Settlement Engine\n\n pub async fn connect(&self) -> Result<EngineRedisStore, ()> {\n\n let client = match Client::open(self.redis_url.clone()) {\n\n Ok(c) => c,\n\n Err(err) => {\n", "file_path": "crates/interledger-settlement/src/core/backends_common/redis/mod.rs", "rank": 2, "score": 399563.5956509613 }, { "content": "#[async_trait]\n\npub trait NodeStore: Clone + Send + Sync + 'static {\n\n type Account: Account;\n\n\n\n /// Inserts an account to the store. Generates a UUID and returns the full Account object.\n\n async fn insert_account(\n\n &self,\n\n account: AccountDetails,\n\n ) -> Result<Self::Account, NodeStoreError>;\n\n\n\n /// Deletes the account corresponding to the provided id and returns it\n\n async fn delete_account(&self, id: Uuid) -> Result<Self::Account, NodeStoreError>;\n\n\n\n /// Overwrites the account corresponding to the provided id with the provided details\n\n async fn update_account(\n\n &self,\n\n id: Uuid,\n\n account: AccountDetails,\n\n ) -> Result<Self::Account, NodeStoreError>;\n\n\n\n /// Modifies the account corresponding to the provided id with the provided settings.\n", "file_path": "crates/interledger-api/src/lib.rs", "rank": 3, "score": 392331.0675939663 }, { "content": "#[async_trait]\n\npub trait HttpStore: Clone + Send + Sync + 'static {\n\n type Account: HttpAccount;\n\n\n\n /// Load account details based on the full HTTP Authorization header\n\n /// received on the incoming HTTP request.\n\n async fn get_account_from_http_auth(\n\n &self,\n\n username: &Username,\n\n token: &str,\n\n ) -> Result<Self::Account, HttpStoreError>;\n\n}\n\n\n", "file_path": "crates/interledger-http/src/lib.rs", "rank": 4, "score": 392331.0675939664 }, { "content": "/// Domain separator for idempotency keys\n\nfn prefixed_idempotency_key(idempotency_key: &str) -> String {\n\n format!(\"idempotency-key:{}\", idempotency_key)\n\n}\n\n\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 5, "score": 382693.3639472256 }, { "content": "/// Domain separator for accounts\n\nfn accounts_key(account_id: Uuid) -> String {\n\n format!(\"accounts:{}\", account_id)\n\n}\n\n\n\n// TODO: Add descriptive errors inside the lua scripts!\n\n\n\n// The following are Lua scripts that are used to atomically execute the given logic\n\n// inside Redis. This allows for more complex logic without needing multiple round\n\n// trips for messages to be sent to and from Redis, as well as locks to ensure no other\n\n// process is accessing Redis at the same time.\n\n// For more information on scripting in Redis, see https://redis.io/commands/eval\n\n\n\n/// The node's default ILP Address\n\nstatic DEFAULT_ILP_ADDRESS: Lazy<Address> = Lazy::new(|| Address::from_str(\"local.host\").unwrap());\n\n\n\n/// This lua script fetches an account associated with a username. The client\n\n/// MUST ensure that the returned account is authenticated.\n\nstatic ACCOUNT_FROM_USERNAME: Lazy<Script> =\n\n Lazy::new(|| Script::new(include_str!(\"lua/account_from_username.lua\")));\n\n\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 6, "score": 356768.6983444296 }, { "content": "pub fn test_store(store_fails: bool, account_has_engine: bool) -> TestStore {\n\n let mut acc = TEST_ACCOUNT_0.clone();\n\n acc.no_details = !account_has_engine;\n\n\n\n TestStore::new(vec![acc], store_fails)\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/api/test_helpers.rs", "rank": 7, "score": 355537.5433495789 }, { "content": "/// The base trait that Account types from other Services extend.\n\n/// This trait assumes that the account has an ID that can be compared with others.\n\n/// An account is also characterized by its username, ILP Address, and asset details (the code and the scale)\n\n///\n\n/// Each service can extend the Account type to include additional details they require.\n\n/// Store implementations will implement these Account traits for a concrete type that\n\n/// they will load from the database.\n\npub trait Account: Clone + Send + Sized + Debug {\n\n fn id(&self) -> Uuid;\n\n fn username(&self) -> &Username;\n\n fn ilp_address(&self) -> &Address;\n\n fn asset_scale(&self) -> u8;\n\n fn asset_code(&self) -> &str;\n\n}\n\n\n\n/// A struct representing an incoming ILP Prepare packet or an outgoing one before the next hop is set.\n\n#[derive(Clone)]\n\npub struct IncomingRequest<A: Account> {\n\n /// The account which the request originates from\n\n pub from: A,\n\n /// The prepare packet attached to the request\n\n pub prepare: Prepare,\n\n}\n\n\n\n// Use a custom debug implementation to specify the order of the fields\n\nimpl<A> Debug for IncomingRequest<A>\n\nwhere\n", "file_path": "crates/interledger-service/src/lib.rs", "rank": 8, "score": 352898.97909396305 }, { "content": "fn get_value<V>(key: &str, map: &HashMap<String, Value>) -> Result<V, RedisError>\n\nwhere\n\n V: FromRedisValue,\n\n{\n\n if let Some(ref value) = map.get(key) {\n\n from_redis_value(value)\n\n } else {\n\n Err(RedisError::from((\n\n ErrorKind::TypeError,\n\n \"Account is missing field\",\n\n key.to_string(),\n\n )))\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 10, "score": 339961.0678752802 }, { "content": "#[allow(unused)]\n\n#[allow(clippy::mutable_key_type)]\n\npub fn accounts_to_ids(accounts: Vec<Account>) -> HashMap<Address, Uuid> {\n\n let mut map = HashMap::new();\n\n for a in accounts {\n\n map.insert(a.ilp_address().clone(), a.id());\n\n }\n\n map\n\n}\n\n\n\n#[allow(unused)]\n\npub async fn get_balance<T: Display>(\n\n account_id: T,\n\n node_port: u16,\n\n admin_token: &str,\n\n) -> Result<BalanceData, ()> {\n\n let client = reqwest::Client::new();\n\n let res = client\n\n .get(&format!(\n\n \"http://localhost:{}/accounts/{}/balance\",\n\n node_port, account_id\n\n ))\n", "file_path": "crates/ilp-node/tests/redis/test_helpers.rs", "rank": 11, "score": 337172.4536037337 }, { "content": "fn get_value_option<V>(key: &str, map: &HashMap<String, Value>) -> Result<Option<V>, RedisError>\n\nwhere\n\n V: FromRedisValue,\n\n{\n\n if let Some(ref value) = map.get(key) {\n\n from_redis_value(value).map(Some)\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 12, "score": 329862.82113940606 }, { "content": "fn get_url_option(key: &str, map: &HashMap<String, Value>) -> Result<Option<Url>, RedisError> {\n\n if let Some(ref value) = map.get(key) {\n\n let value: String = from_redis_value(value)?;\n\n if let Ok(url) = Url::parse(&value) {\n\n Ok(Some(url))\n\n } else {\n\n Err(RedisError::from((ErrorKind::TypeError, \"Invalid URL\")))\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use redis_crate::IntoConnectionInfo;\n\n\n\n #[tokio::test]\n\n async fn connect_fails_if_db_unavailable() {\n\n let result = RedisStoreBuilder::new(\n\n \"redis://127.0.0.1:0\".into_connection_info().unwrap() as ConnectionInfo,\n\n [0; 32],\n\n )\n\n .connect()\n\n .await;\n\n assert!(result.is_err());\n\n }\n\n}\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 13, "score": 323688.7699326389 }, { "content": "pub fn extract_var_octet_string(mut buffer: BytesMut) -> Result<BytesMut> {\n\n let buffer_length = buffer.len();\n\n let mut reader = &buffer[..];\n\n let content_length = reader.read_var_octet_string_length()?;\n\n let content_offset = buffer_length - reader.len();\n\n\n\n let mut remaining = buffer.split_off(content_offset);\n\n if remaining.len() < content_length {\n\n Err(Error::new(ErrorKind::UnexpectedEof, \"buffer too small\"))\n\n } else {\n\n Ok(remaining.split_to(content_length))\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-packet/src/oer.rs", "rank": 14, "score": 301562.73708279943 }, { "content": "/// Returns the 32-bytes SHA256 hash of the provided preimage\n\npub fn get_hash_of(preimage: &[u8]) -> [u8; 32] {\n\n let mut hash = [0; 32];\n\n hash.copy_from_slice(digest(&SHA256, preimage).as_ref());\n\n hash\n\n}\n", "file_path": "crates/interledger-settlement/src/core/mod.rs", "rank": 15, "score": 291689.65070509375 }, { "content": "#[allow(unused)]\n\npub fn random_secret() -> String {\n\n let mut bytes: [u8; 32] = [0; 32];\n\n SystemRandom::new().fill(&mut bytes).unwrap();\n\n hex::encode(bytes)\n\n}\n\n\n\n#[derive(serde::Deserialize, Debug, PartialEq)]\n\npub struct BalanceData {\n\n pub balance: f64,\n\n pub asset_code: String,\n\n}\n\n\n\n#[allow(unused)]\n\npub async fn create_account_on_node<T: Serialize>(\n\n api_port: u16,\n\n data: T,\n\n auth: &str,\n\n) -> Result<Account, ()> {\n\n let client = reqwest::Client::new();\n\n let res = client\n", "file_path": "crates/ilp-node/tests/redis/test_helpers.rs", "rank": 16, "score": 276370.4819366123 }, { "content": "/// Create an Interledger service wrapper that publishes records\n\n/// of fulfilled packets to Google Cloud PubSub.\n\n///\n\n/// This is an experimental feature that may be removed in the future.\n\npub fn create_google_pubsub_wrapper<A: Account + 'static>(\n\n config: Option<PubsubConfig>,\n\n) -> impl Fn(OutgoingRequest<A>, Box<dyn OutgoingService<A> + Send>) -> Pin<BoxedIlpFuture> + Clone\n\n{\n\n // If Google credentials were passed in, create an HTTP client and\n\n // OAuth2 client that will automatically fetch and cache access tokens\n\n let utilities = if let Some(config) = config {\n\n let key = service_account_key_from_file(config.service_account_credentials.as_str())\n\n .expect(\"Unable to load Google Cloud credentials from file\");\n\n let access = ServiceAccountAccess::new(key);\n\n // This needs to be wrapped in a Mutex because the .token()\n\n // method takes a mutable reference to self and we want to\n\n // reuse the same fetcher so that it caches the tokens\n\n let token_fetcher = Arc::new(Mutex::new(access.build()));\n\n\n\n // TODO make sure the client uses HTTP/2\n\n let client = Client::new();\n\n let api_endpoint = Arc::new(format!(\n\n \"https://pubsub.googleapis.com/v1/projects/{}/topics/{}:publish\",\n\n config.project_id, config.topic\n", "file_path": "crates/ilp-node/src/instrumentation/google_pubsub.rs", "rank": 17, "score": 275264.3932204783 }, { "content": "pub fn default_redis_url() -> String {\n\n String::from(\"redis://127.0.0.1:6379\")\n\n}\n\n\n\n// This function could theoretically be defined as an inherent method on InterledgerNode itself.\n\n// However, we define it in this module in order to consolidate conditionally-compiled code\n\n// into as few discrete units as possible.\n\npub async fn serve_redis_node(node: InterledgerNode, ilp_address: Address) -> Result<(), ()> {\n\n let redis_connection_info = node.database_url.clone().into_connection_info().unwrap();\n\n let redis_addr = redis_connection_info.addr.clone();\n\n let redis_secret = generate_redis_secret(&node.secret_seed);\n\n let store = RedisStoreBuilder::new(redis_connection_info, redis_secret)\n\n .node_ilp_address(ilp_address.clone())\n\n .connect()\n\n .map_err(move |err| error!(target: \"interledger-node\", \"Error connecting to Redis: {:?} {:?}\", redis_addr, err))\n\n .await?;\n\n node.chain_services(store, ilp_address).await\n\n}\n\n\n", "file_path": "crates/ilp-node/src/redis_store.rs", "rank": 18, "score": 265014.55254346936 }, { "content": "// TODO: Do we really need this custom deserialization function?\n\n// You'd expect that Serde would be able to handle this.\n\n/// Helper function to deserialize JSON inside Warp\n\n/// The content-type MUST be application/json and if a charset\n\n/// is specified, it MUST be UTF-8\n\npub fn deserialize_json<T: DeserializeOwned + Send>(\n\n) -> impl Filter<Extract = (T,), Error = Rejection> + Copy {\n\n warp::header::<String>(\"content-type\")\n\n .and(warp::body::bytes())\n\n .and_then(|content_type: String, buf: Bytes| {\n\n async move {\n\n let mime_type: Mime = content_type.parse().map_err(|_| {\n\n Rejection::from(ApiError::bad_request().detail(\"Invalid content-type header.\"))\n\n })?;\n\n if mime_type.type_() != mime::APPLICATION_JSON.type_() {\n\n return Err(Rejection::from(\n\n ApiError::bad_request().detail(\"Invalid content-type.\"),\n\n ));\n\n } else if let Some(charset) = mime_type.get_param(\"charset\") {\n\n // Charset should be UTF-8\n\n // https://tools.ietf.org/html/rfc8259#section-8.1\n\n if charset != mime::UTF_8 {\n\n return Err(Rejection::from(\n\n ApiError::bad_request().detail(\"Charset should be UTF-8.\"),\n\n ));\n", "file_path": "crates/interledger-http/src/lib.rs", "rank": 19, "score": 264831.59433447197 }, { "content": "pub fn map_of_number_or_string<'de, D>(deserializer: D) -> Result<HashMap<String, f64>, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n{\n\n #[derive(Deserialize)]\n\n struct Wrapper(#[serde(deserialize_with = \"number_or_string\")] f64);\n\n\n\n let v = HashMap::<String, Wrapper>::deserialize(deserializer)?;\n\n Ok(v.into_iter().map(|(k, Wrapper(v))| (k, v)).collect())\n\n}\n\n\n\n// TODO should the methods from this trait be split up and put into the\n\n// traits that are more specific to what they're doing?\n\n// One argument against doing that is that the NodeStore allows admin-only\n\n// modifications to the values, whereas many of the other traits mostly\n\n// read from the configured values.\n", "file_path": "crates/interledger-api/src/lib.rs", "rank": 20, "score": 260516.4920754744 }, { "content": "pub fn get_open_port(try_port: Option<u16>) -> u16 {\n\n if let Some(port) = try_port {\n\n let listener = net2::TcpBuilder::new_v4().unwrap();\n\n listener.reuse_address(true).unwrap();\n\n if let Ok(listener) = listener.bind(&format!(\"127.0.0.1:{}\", port)) {\n\n return listener.listen(1).unwrap().local_addr().unwrap().port();\n\n }\n\n }\n\n\n\n for _i in 0..1000 {\n\n let listener = net2::TcpBuilder::new_v4().unwrap();\n\n listener.reuse_address(true).unwrap();\n\n if let Ok(listener) = listener.bind(\"127.0.0.1:0\") {\n\n return listener.listen(1).unwrap().local_addr().unwrap().port();\n\n }\n\n }\n\n panic!(\"Cannot find open port!\");\n\n}\n\n\n\npub async fn delay(ms: u64) {\n\n tokio::time::delay_for(Duration::from_millis(ms)).await;\n\n}\n\n\n", "file_path": "crates/ilp-node/tests/redis/redis_helpers.rs", "rank": 21, "score": 258743.44035831004 }, { "content": "#[derive(Eq, PartialEq, Hash, Debug, Default, Serialize, Deserialize, Copy, Clone)]\n\nstruct RedisAccountId(Uuid);\n\n\n\nimpl FromStr for RedisAccountId {\n\n type Err = uuid::Error;\n\n\n\n fn from_str(src: &str) -> Result<Self, Self::Err> {\n\n let id = Uuid::from_str(&src)?;\n\n Ok(RedisAccountId(id))\n\n }\n\n}\n\n\n\nimpl Display for RedisAccountId {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {\n\n f.write_str(&self.0.to_hyphenated().to_string())\n\n }\n\n}\n\n\n\nimpl ToRedisArgs for RedisAccountId {\n\n fn write_redis_args<W: RedisWrite + ?Sized>(&self, out: &mut W) {\n\n out.write_arg(self.0.to_hyphenated().to_string().as_bytes().as_ref());\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 22, "score": 257472.81914329086 }, { "content": "#[allow(unused)]\n\npub fn connection_info_to_string(info: ConnectionInfo) -> String {\n\n match info.addr.as_ref() {\n\n ConnectionAddr::Tcp(url, port) => format!(\"redis://{}:{}/{}\", url, port, info.db),\n\n ConnectionAddr::Unix(path) => {\n\n format!(\"redis+unix:{}?db={}\", path.to_str().unwrap(), info.db)\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/ilp-node/tests/redis/redis_helpers.rs", "rank": 23, "score": 255645.27266214218 }, { "content": "/// Decrypts a AES256-GCM encrypted ciphertext.\n\n///\n\n/// The secret key is generated deterministically by HMAC-256'ing the `shared_secret`\n\n/// and the hardcoded string \"ilp_stream_encryption\"\n\n///\n\n/// The `additional_data` field is left empty.\n\n///\n\n/// The nonce and auth tag are extracted from the first 12 and 16 bytes\n\n/// of the ciphertext.\n\npub fn decrypt(shared_secret: &[u8], mut ciphertext: BytesMut) -> Result<BytesMut, ()> {\n\n // ciphertext must include at least a nonce and tag\n\n if ciphertext.len() < AUTH_TAG_LENGTH {\n\n return Err(());\n\n }\n\n\n\n let key = hmac_sha256(shared_secret, &ENCRYPTION_KEY_STRING);\n\n let key = aead::UnboundKey::new(&aead::AES_256_GCM, &key)\n\n .expect(\"Failed to create a new opening key for decrypting data!\");\n\n let key = aead::LessSafeKey::new(key);\n\n\n\n let mut nonce: [u8; NONCE_LENGTH] = [0; NONCE_LENGTH];\n\n nonce.copy_from_slice(&ciphertext.split_to(NONCE_LENGTH));\n\n\n\n let auth_tag = ciphertext.split_to(AUTH_TAG_LENGTH);\n\n let additional_data: &[u8] = &[];\n\n\n\n // Ring expects the tag to come after the data\n\n ciphertext.unsplit(auth_tag);\n\n\n", "file_path": "crates/interledger-stream/src/crypto.rs", "rank": 24, "score": 254843.4730613338 }, { "content": "pub fn mock_message(status_code: usize) -> mockito::Mock {\n\n mock(\"POST\", MESSAGES_API.clone())\n\n // The messages API receives raw data\n\n .match_header(\"Content-Type\", \"application/octet-stream\")\n\n .with_status(status_code)\n\n .with_body(BODY)\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/api/test_helpers.rs", "rank": 25, "score": 254190.75243982952 }, { "content": "pub fn test_api(\n\n test_store: TestStore,\n\n should_fulfill: bool,\n\n) -> warp::filters::BoxedFilter<(impl warp::Reply,)> {\n\n let outgoing = outgoing_service_fn(move |_| {\n\n if should_fulfill {\n\n Ok(FulfillBuilder {\n\n fulfillment: &[0; 32],\n\n data: b\"hello!\",\n\n }\n\n .build())\n\n } else {\n\n Err(RejectBuilder {\n\n code: ErrorCode::F02_UNREACHABLE,\n\n message: b\"No other outgoing handler!\",\n\n data: &[],\n\n triggered_by: Some(&SERVICE_ADDRESS),\n\n }\n\n .build())\n\n }\n\n });\n\n create_settlements_filter(test_store, outgoing)\n\n}\n", "file_path": "crates/interledger-settlement/src/api/test_helpers.rs", "rank": 26, "score": 251195.74919443025 }, { "content": "pub fn test_service(\n\n) -> SettlementMessageService<impl IncomingService<TestAccount> + Clone, TestAccount> {\n\n SettlementMessageService::new(incoming_service_fn(|_request| {\n\n Err(RejectBuilder {\n\n code: ErrorCode::F02_UNREACHABLE,\n\n message: b\"No other incoming handler!\",\n\n data: &[],\n\n triggered_by: Some(&SERVICE_ADDRESS),\n\n }\n\n .build())\n\n }))\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/api/test_helpers.rs", "rank": 27, "score": 251195.74919443025 }, { "content": "pub fn test_accounts_api(\n\n) -> impl warp::Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {\n\n let incoming = incoming_service_fn(|_request| {\n\n Err(RejectBuilder {\n\n code: ErrorCode::F02_UNREACHABLE,\n\n message: b\"No other incoming handler!\",\n\n data: &[],\n\n triggered_by: None,\n\n }\n\n .build())\n\n });\n\n let outgoing = outgoing_service_fn(move |_request| {\n\n Ok(FulfillBuilder {\n\n fulfillment: &[0; 32],\n\n data: b\"hello!\",\n\n }\n\n .build())\n\n });\n\n let btp = BtpOutgoingService::new(\n\n Address::from_str(\"example.alice\").unwrap(),\n", "file_path": "crates/interledger-api/src/routes/test_helpers.rs", "rank": 28, "score": 248661.486723045 }, { "content": "type RouteVec = Vec<(String, RedisAccountId)>;\n\n\n\nuse futures::future::TryFutureExt;\n\n\n\n// TODO replace this with pubsub when async pubsub is added upstream: https://github.com/mitsuhiko/redis-rs/issues/183\n\nasync fn update_routes(\n\n mut connection: RedisReconnect,\n\n routing_table: Arc<RwLock<Arc<HashMap<String, Uuid>>>>,\n\n) -> Result<(), RedisError> {\n\n let mut pipe = redis_crate::pipe();\n\n pipe.hgetall(ROUTES_KEY)\n\n .hgetall(STATIC_ROUTES_KEY)\n\n .get(DEFAULT_ROUTE_KEY);\n\n let (routes, static_routes, default_route): (RouteVec, RouteVec, Option<RedisAccountId>) =\n\n pipe.query_async(&mut connection).await?;\n\n trace!(\n\n \"Loaded routes from redis. Static routes: {:?}, default route: {:?}, other routes: {:?}\",\n\n static_routes,\n\n default_route,\n\n routes\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 29, "score": 242336.05436662078 }, { "content": "/// Converts a number from a precision to another while taking precision loss into account\n\n///\n\n/// # Examples\n\n/// ```rust\n\n/// # use num_bigint::BigUint;\n\n/// # use interledger_settlement::core::scale_with_precision_loss;\n\n/// assert_eq!(\n\n/// scale_with_precision_loss(BigUint::from(905u32), 9, 11),\n\n/// (BigUint::from(9u32), BigUint::from(5u32))\n\n/// );\n\n///\n\n/// assert_eq!(\n\n/// scale_with_precision_loss(BigUint::from(8053u32), 9, 12),\n\n/// (BigUint::from(8u32), BigUint::from(53u32))\n\n/// );\n\n///\n\n/// assert_eq!(\n\n/// scale_with_precision_loss(BigUint::from(1u32), 9, 6),\n\n/// (BigUint::from(1000u32), BigUint::from(0u32))\n\n/// );\n\n/// ```\n\npub fn scale_with_precision_loss(\n\n amount: BigUint,\n\n local_scale: u8,\n\n remote_scale: u8,\n\n) -> (BigUint, BigUint) {\n\n // It's safe to unwrap here since BigUint's normalize_scale cannot fail.\n\n let scaled = amount\n\n .normalize_scale(ConvertDetails {\n\n from: remote_scale,\n\n to: local_scale,\n\n })\n\n .unwrap();\n\n\n\n if local_scale < remote_scale {\n\n // If we ended up downscaling, scale the value back up back,\n\n // and return any precision loss\n\n // note that `from` and `to` are reversed compared to the previous call\n\n let upscaled = scaled\n\n .normalize_scale(ConvertDetails {\n\n from: local_scale,\n", "file_path": "crates/interledger-settlement/src/core/mod.rs", "rank": 30, "score": 240533.4428683202 }, { "content": "pub fn encrypt_token(encryption_key: &aead::LessSafeKey, token: &[u8]) -> BytesMut {\n\n let mut token = token.to_vec();\n\n\n\n let mut nonce: [u8; NONCE_LENGTH] = [0; NONCE_LENGTH];\n\n SystemRandom::new()\n\n .fill(&mut nonce)\n\n .expect(\"Unable to get sufficient entropy for nonce\");\n\n let nonce_copy = nonce;\n\n let nonce = aead::Nonce::assume_unique_for_key(nonce);\n\n match encryption_key.seal_in_place_append_tag(nonce, aead::Aad::from(&[]), &mut token) {\n\n Ok(_) => {\n\n token.append(&mut nonce_copy.as_ref().to_vec());\n\n BytesMut::from(token.as_slice())\n\n }\n\n _ => panic!(\"Unable to encrypt token\"),\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store/src/crypto.rs", "rank": 31, "score": 240428.7530301512 }, { "content": "/// Encrypts a plaintext by calling [encrypt_with_nonce](./fn.encrypt_with_nonce.html)\n\n/// with a random nonce of [`NONCE_LENGTH`](./constant.NONCE_LENGTH.html) generated using\n\n/// [SystemRandom::new()](../../ring/rand/struct.SystemRandom.html#method.new)\n\npub fn encrypt(shared_secret: &[u8], plaintext: BytesMut) -> BytesMut {\n\n // Generate a random nonce or IV\n\n let mut nonce: [u8; NONCE_LENGTH] = [0; NONCE_LENGTH];\n\n SystemRandom::new()\n\n .fill(&mut nonce[..])\n\n .expect(\"Failed to securely generate a random nonce!\");\n\n\n\n encrypt_with_nonce(shared_secret, plaintext, nonce)\n\n}\n\n\n", "file_path": "crates/interledger-stream/src/crypto.rs", "rank": 32, "score": 235284.0245279023 }, { "content": "#[async_trait]\n\npub trait IdempotentStore {\n\n /// Returns the API response that was saved when the idempotency key was used\n\n /// Also returns a hash of the input data which resulted in the response\n\n async fn load_idempotent_data(\n\n &self,\n\n idempotency_key: String,\n\n ) -> Result<Option<IdempotentData>, IdempotentStoreError>;\n\n\n\n /// Saves the data that was passed along with the api request for later\n\n /// The store also saves the hash of the input, so that it errors out on requests\n\n /// with conflicting input hashes for the same idempotency key\n\n async fn save_idempotent_data(\n\n &self,\n\n idempotency_key: String,\n\n input_hash: [u8; 32],\n\n status_code: StatusCode,\n\n data: Bytes,\n\n ) -> Result<(), IdempotentStoreError>;\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/core/idempotency.rs", "rank": 33, "score": 224651.21804066264 }, { "content": "fn address_to_string<S>(address: &Address, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n serializer.serialize_str(str::from_utf8(address.as_ref()).unwrap_or(\"\"))\n\n}\n\n\n", "file_path": "crates/interledger-store/src/account.rs", "rank": 34, "score": 218207.14418757282 }, { "content": "/// Extension trait for [Account](../interledger_service/trait.Account.html) with [settlement](https://interledger.org/rfcs/0038-settlement-engines/) related information\n\npub trait SettlementAccount: Account {\n\n /// The [SettlementEngineDetails](./struct.SettlementEngineDetails.html) (if any) associated with that account\n\n fn settlement_engine_details(&self) -> Option<SettlementEngineDetails> {\n\n None\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "crates/interledger-settlement/src/core/types.rs", "rank": 35, "score": 217338.68510830106 }, { "content": "fn payment_pointer_to_url(payment_pointer: &str) -> String {\n\n let mut url: String = if payment_pointer.starts_with('$') {\n\n let mut url = \"https://\".to_string();\n\n url.push_str(&payment_pointer[1..]);\n\n url\n\n } else {\n\n payment_pointer.to_string()\n\n };\n\n\n\n let num_slashes = url.matches('/').count();\n\n if num_slashes == 2 {\n\n url.push_str(\"/.well-known/pay\");\n\n } else if num_slashes == 1 && url.ends_with('/') {\n\n url.push_str(\".well-known/pay\");\n\n }\n\n trace!(\n\n \"Converted payment pointer: {} to URL: {}\",\n\n payment_pointer,\n\n url\n\n );\n", "file_path": "crates/interledger-spsp/src/client.rs", "rank": 36, "score": 216023.9473975294 }, { "content": "fn get_bytes_option(\n\n key: &str,\n\n map: &HashMap<String, Value>,\n\n) -> Result<Option<BytesMut>, RedisError> {\n\n if let Some(ref value) = map.get(key) {\n\n let vec: Vec<u8> = from_redis_value(value)?;\n\n Ok(Some(BytesMut::from(vec.as_slice())))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 37, "score": 214293.1863260924 }, { "content": "// returns (subcommand paths, config path)\n\nfn precheck_arguments(mut app: App) -> Result<(Vec<String>, Option<String>), ()> {\n\n // not to cause `required fields error`.\n\n reset_required(&mut app);\n\n let matches = app.get_matches_safe();\n\n if matches.is_err() {\n\n // if app could not get any appropriate match, just return not to show help etc.\n\n return Err(());\n\n }\n\n let matches = &matches.unwrap();\n\n let mut path = Vec::<String>::new();\n\n let subcommand = get_deepest_command(matches, &mut path);\n\n let mut config_path: Option<String> = None;\n\n if let Some(config_path_arg) = subcommand.value_of(\"config\") {\n\n config_path = Some(config_path_arg.to_string());\n\n };\n\n Ok((path, config_path))\n\n}\n\n\n", "file_path": "crates/ilp-node/src/main.rs", "rank": 38, "score": 211129.5986509499 }, { "content": "fn optional_secret_bytes_to_utf8<S>(\n\n _bytes: &Option<SecretBytesMut>,\n\n serializer: S,\n\n) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n serializer.serialize_str(\"SECRET\")\n\n}\n\n\n\nimpl Account {\n\n /// Creates an account from the provided id and details. If there is no ILP Address\n\n /// in the provided details, then the account's ILP Address is generated by appending\n\n /// the `details.username` to the provided `node_ilp_address`.\n\n /// The default RoutingRelation is `NonRoutingAccount`\n\n pub fn try_from(\n\n id: Uuid,\n\n details: AccountDetails,\n\n node_ilp_address: Address,\n\n ) -> Result<Account, CreateAccountError> {\n", "file_path": "crates/interledger-store/src/account.rs", "rank": 39, "score": 209029.25160707865 }, { "content": "pub fn test_service_with_routes() -> (\n\n CcpRouteManager<\n\n impl IncomingService<TestAccount> + Clone,\n\n impl OutgoingService<TestAccount> + Clone,\n\n TestStore,\n\n TestAccount,\n\n >,\n\n OutgoingRequests,\n\n) {\n\n let local_routes = HashMap::from_iter(vec![\n\n (\n\n \"example.local.1\".to_string(),\n\n TestAccount::new(Uuid::from_slice(&[1; 16]).unwrap(), \"example.local.1\"),\n\n ),\n\n (\n\n \"example.connector.other-local\".to_string(),\n\n TestAccount {\n\n id: Uuid::from_slice(&[3; 16]).unwrap(),\n\n ilp_address: Address::from_str(\"example.connector.other-local\").unwrap(),\n\n relation: RoutingRelation::NonRoutingAccount,\n", "file_path": "crates/interledger-ccp/src/test_helpers.rs", "rank": 40, "score": 206753.9690311742 }, { "content": "type RoutingTable<A> = HashMap<String, A>;\n\n\n\n#[async_trait]\n\nimpl CcpRoutingStore for RedisStore {\n\n type Account = Account;\n\n\n\n async fn get_accounts_to_send_routes_to(\n\n &self,\n\n ignore_accounts: Vec<Uuid>,\n\n ) -> Result<Vec<Account>, CcpRoutingStoreError> {\n\n let account_ids: Vec<RedisAccountId> =\n\n self.connection.clone().smembers(\"send_routes_to\").await?;\n\n let account_ids: Vec<Uuid> = account_ids\n\n .into_iter()\n\n .map(|id| id.0)\n\n .filter(|id| !ignore_accounts.contains(&id))\n\n .collect();\n\n if account_ids.is_empty() {\n\n return Ok(Vec::new());\n\n }\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 41, "score": 203989.1532231804 }, { "content": "type BoxedIlpFuture = Box<dyn Future<Output = IlpResult> + Send + 'static>;\n\n\n", "file_path": "crates/ilp-node/src/instrumentation/google_pubsub.rs", "rank": 42, "score": 203304.24828622976 }, { "content": "pub fn test_node_settings_api(\n\n) -> impl warp::Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {\n\n node_settings_api(\"admin\".to_owned(), None, TestStore).recover(default_rejection_handler)\n\n}\n\n\n", "file_path": "crates/interledger-api/src/routes/test_helpers.rs", "rank": 43, "score": 202491.70574294607 }, { "content": "#[cfg(test)]\n\n#[derive(Clone, Debug)]\n\nstruct TestAccount(Uuid);\n\n#[cfg(test)]\n\nimpl Account for TestAccount {\n\n fn id(&self) -> Uuid {\n\n self.0\n\n }\n\n\n\n fn username(&self) -> &Username {\n\n &ALICE\n\n }\n\n\n\n fn asset_code(&self) -> &str {\n\n \"XYZ\"\n\n }\n\n\n\n // All connector accounts use asset scale = 9.\n\n fn asset_scale(&self) -> u8 {\n\n 9\n\n }\n\n\n\n fn ilp_address(&self) -> &Address {\n\n &EXAMPLE_ADDRESS\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-service-util/src/validator_service.rs", "rank": 44, "score": 201651.26300638422 }, { "content": "pub fn decrypt_token(\n\n decryption_key: &aead::LessSafeKey,\n\n encrypted: &[u8],\n\n) -> Result<SecretBytesMut, ()> {\n\n if encrypted.len() < aead::MAX_TAG_LEN {\n\n return Err(());\n\n }\n\n\n\n let mut encrypted = encrypted.to_vec();\n\n let nonce_bytes = encrypted.split_off(encrypted.len() - NONCE_LENGTH);\n\n let mut nonce: [u8; NONCE_LENGTH] = [0; NONCE_LENGTH];\n\n nonce.copy_from_slice(nonce_bytes.as_ref());\n\n let nonce = aead::Nonce::assume_unique_for_key(nonce);\n\n\n\n if let Ok(token) = decryption_key.open_in_place(nonce, aead::Aad::empty(), &mut encrypted) {\n\n Ok(SecretBytesMut::new(&token[..]))\n\n } else {\n\n Err(())\n\n }\n\n}\n", "file_path": "crates/interledger-store/src/crypto.rs", "rank": 45, "score": 200477.26148993045 }, { "content": "pub fn number_or_string<'de, D, T>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n T: FromStr + Deserialize<'de>,\n\n <T as FromStr>::Err: Display,\n\n{\n\n match NumOrStr::deserialize(deserializer)? {\n\n NumOrStr::Num(n) => Ok(n),\n\n NumOrStr::Str(s) => T::from_str(&s).map_err(de::Error::custom),\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-api/src/lib.rs", "rank": 46, "score": 198643.39688017778 }, { "content": "pub fn test_service() -> CcpRouteManager<\n\n impl IncomingService<TestAccount> + Clone,\n\n impl OutgoingService<TestAccount> + Clone,\n\n TestStore,\n\n TestAccount,\n\n> {\n\n let addr = Address::from_str(\"example.connector\").unwrap();\n\n CcpRouteManagerBuilder::new(\n\n addr.clone(),\n\n TestStore::new(),\n\n outgoing_service_fn(|_request| {\n\n Err(RejectBuilder {\n\n code: ErrorCode::F02_UNREACHABLE,\n\n message: b\"No other outgoing handler!\",\n\n data: &[],\n\n triggered_by: Some(&EXAMPLE_CONNECTOR),\n\n }\n\n .build())\n\n }),\n\n incoming_service_fn(|_request| {\n", "file_path": "crates/interledger-ccp/src/test_helpers.rs", "rank": 47, "score": 198593.976926144 }, { "content": "#[async_trait]\n\npub trait CcpRoutingStore: Clone {\n\n type Account: CcpRoutingAccount;\n\n\n\n // TODO should we have a way to only get the details for specific routes?\n\n /// Gets the local and manually configured routes\n\n async fn get_local_and_configured_routes(\n\n &self,\n\n ) -> Result<LocalAndConfiguredRoutes<Self::Account>, CcpRoutingStoreError>;\n\n\n\n /// Gets all accounts which the node should send routes to (Peer and Child accounts)\n\n /// The caller can also pass a vector of account ids to be ignored\n\n async fn get_accounts_to_send_routes_to(\n\n &self,\n\n ignore_accounts: Vec<Uuid>,\n\n ) -> Result<Vec<Self::Account>, CcpRoutingStoreError>;\n\n\n\n /// Gets all accounts which the node should receive routes to (Peer and Parent accounts)\n\n async fn get_accounts_to_receive_routes_from(\n\n &self,\n\n ) -> Result<Vec<Self::Account>, CcpRoutingStoreError>;\n\n\n\n /// Sets the new routes to the store (prefix -> account)\n\n async fn set_routes(\n\n &mut self,\n\n routes: impl IntoIterator<Item = (String, Self::Account)> + Send + 'async_trait,\n\n ) -> Result<(), CcpRoutingStoreError>;\n\n}\n", "file_path": "crates/interledger-ccp/src/lib.rs", "rank": 48, "score": 195730.1590561806 }, { "content": "pub trait ExchangeRateStore: Clone {\n\n // TODO we may want to make this async if/when we use pubsub to broadcast\n\n // rate changes to different instances of a horizontally-scalable node\n\n fn set_exchange_rates(&self, rates: HashMap<String, f64>)\n\n -> Result<(), ExchangeRateStoreError>;\n\n\n\n fn get_exchange_rates(&self, asset_codes: &[&str]) -> Result<Vec<f64>, ExchangeRateStoreError>;\n\n\n\n // TODO should this be on the API instead? That's where it's actually used\n\n // TODO should we combine this method with get_exchange_rates?\n\n // The downside of doing that is in this case we want a HashMap with owned values\n\n // (so that we don't accidentally lock up the RwLock on the store's exchange_rates)\n\n // but in the normal case of getting the rate between two assets, we don't want to\n\n // copy all the rate data\n\n fn get_all_exchange_rates(&self) -> Result<HashMap<String, f64>, ExchangeRateStoreError>;\n\n}\n\n\n\n/// This determines which external API service to poll for exchange rates.\n\n#[derive(Debug, Clone, Deserialize)]\n\npub enum ExchangeRateProvider {\n", "file_path": "crates/interledger-rates/src/lib.rs", "rank": 49, "score": 195724.77910214177 }, { "content": "// This function takes the map of arguments parsed by Clap\n\n// and extracts the values for each argument.\n\nfn extract_args<'a>(matches: &'a ArgMatches) -> (&'a str, HashMap<&'a str, &'a str>) {\n\n let mut args: HashMap<_, _> = matches // Contains data and metadata about the parsed command\n\n .args // The hashmap containing each parameter along with its values and metadata\n\n .iter()\n\n .map(|(&key, val)| (key, val.vals.get(0))) // Extract raw key/value pairs\n\n .filter(|(_, val)| val.is_some()) // Reject keys that don't have values\n\n .map(|(key, val)| (key, val.unwrap().to_str().unwrap())) // Convert values from bytes to strings\n\n .collect();\n\n let auth = args.remove(\"authorization_key\").unwrap();\n\n (auth, args)\n\n}\n\n\n", "file_path": "crates/ilp-cli/src/interpreter.rs", "rank": 50, "score": 194356.27411593474 }, { "content": "fn unflatten_pairs<'a>(matches: &'a ArgMatches) -> (&'a str, HashMap<&'a str, &'a str>) {\n\n let mut pairs = HashMap::new();\n\n if let Some(halve_matches) = matches.values_of(\"halve\") {\n\n let halves: Vec<&str> = halve_matches.collect();\n\n for pair in halves.windows(2).step_by(2) {\n\n pairs.insert(pair[0], pair[1]);\n\n }\n\n }\n\n (matches.value_of(\"authorization_key\").unwrap(), pairs)\n\n}\n\n\n", "file_path": "crates/ilp-cli/src/interpreter.rs", "rank": 51, "score": 194350.964168215 }, { "content": "fn benchmark_serialize(c: &mut Criterion) {\n\n let prepare_bytes = BytesMut::from(PREPARE.build());\n\n c.bench_function(\"Prepare (serialize)\", move |b| {\n\n b.iter(|| {\n\n assert_eq!(BytesMut::from(PREPARE.build()), prepare_bytes);\n\n });\n\n });\n\n\n\n let fulfill_bytes = BytesMut::from(FULFILL.build());\n\n c.bench_function(\"Fulfill (serialize)\", move |b| {\n\n b.iter(|| {\n\n assert_eq!(BytesMut::from(FULFILL.build()), fulfill_bytes);\n\n });\n\n });\n\n\n\n let reject_bytes = BytesMut::from(REJECT.build());\n\n c.bench_function(\"Reject (serialize)\", move |b| {\n\n b.iter(|| {\n\n assert_eq!(BytesMut::from(REJECT.build()), reject_bytes);\n\n });\n\n });\n\n}\n\n\n", "file_path": "crates/interledger-packet/benches/packets.rs", "rank": 52, "score": 193624.34738194442 }, { "content": "fn benchmark_deserialize(c: &mut Criterion) {\n\n let prepare_bytes = BytesMut::from(PREPARE.build());\n\n c.bench_function(\"Prepare (deserialize)\", move |b| {\n\n b.iter(|| {\n\n let parsed = Prepare::try_from(prepare_bytes.clone()).unwrap();\n\n assert_eq!(parsed.amount(), PREPARE.amount);\n\n assert_eq!(parsed.destination(), PREPARE.destination);\n\n });\n\n });\n\n\n\n let fulfill_bytes = BytesMut::from(FULFILL.build());\n\n c.bench_function(\"Fulfill (deserialize)\", move |b| {\n\n b.iter(|| {\n\n let parsed = Fulfill::try_from(fulfill_bytes.clone()).unwrap();\n\n assert_eq!(parsed.fulfillment(), FULFILL.fulfillment);\n\n });\n\n });\n\n\n\n let reject_bytes = BytesMut::from(REJECT.build());\n\n c.bench_function(\"Reject (deserialize)\", move |b| {\n", "file_path": "crates/interledger-packet/benches/packets.rs", "rank": 53, "score": 193606.51109968964 }, { "content": "pub fn optional_number_or_string<'de, D, T>(deserializer: D) -> Result<Option<T>, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n T: FromStr + Deserialize<'de>,\n\n <T as FromStr>::Err: Display,\n\n{\n\n match NumOrStr::deserialize(deserializer)? {\n\n NumOrStr::Num(n) => Ok(Some(n)),\n\n NumOrStr::Str(s) => T::from_str(&s)\n\n .map_err(de::Error::custom)\n\n .and_then(|n| Ok(Some(n))),\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-api/src/lib.rs", "rank": 54, "score": 192206.31993283133 }, { "content": "/// Returns a Node Settlement filter which exposes a Warp-compatible\n\n/// idempotent API which\n\n/// 1. receives messages about incoming settlements from the engine\n\n/// 1. sends messages from the connector's engine to the peer's\n\n/// message service which are sent to the peer's engine\n\npub fn create_settlements_filter<S, O, A>(\n\n store: S,\n\n outgoing_handler: O,\n\n) -> warp::filters::BoxedFilter<(impl warp::Reply,)>\n\nwhere\n\n S: LeftoversStore<AccountId = Uuid, AssetType = BigUint>\n\n + SettlementStore<Account = A>\n\n + IdempotentStore\n\n + AccountStore<Account = A>\n\n + Clone\n\n + Send\n\n + Sync\n\n + 'static,\n\n O: OutgoingService<A> + Clone + Send + Sync + 'static,\n\n A: SettlementAccount + Account + Send + Sync + 'static,\n\n{\n\n let with_store = warp::any().map(move || store.clone());\n\n let idempotency = warp::header::optional::<String>(\"idempotency-key\");\n\n let account_id_filter = warp::path(\"accounts\").and(warp::path::param::<String>()); // account_id\n\n\n", "file_path": "crates/interledger-settlement/src/api/node_api.rs", "rank": 55, "score": 190054.26062567087 }, { "content": "fn merge_config_file(config_path: &str, config: &mut Config) -> Result<(), ConfigError> {\n\n let file_config = config::File::with_name(config_path);\n\n let file_config = file_config.collect()?;\n\n // if the key is not defined in the given config already, set it to the config\n\n // because the original values override the ones from the config file\n\n for (k, v) in file_config {\n\n if config.get_str(&k).is_err() {\n\n config.set(&k, v)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/ilp-node/src/main.rs", "rank": 56, "score": 189445.45541135967 }, { "content": "pub fn accounts_api<I, O, S, A, B>(\n\n server_secret: Bytes,\n\n admin_api_token: String,\n\n default_spsp_account: Option<Username>,\n\n incoming_handler: I,\n\n outgoing_handler: O,\n\n btp: BtpOutgoingService<B, A>,\n\n store: S,\n\n) -> impl warp::Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone\n\nwhere\n\n I: IncomingService<A> + Clone + Send + Sync + 'static,\n\n O: OutgoingService<A> + Clone + Send + Sync + 'static,\n\n B: OutgoingService<A> + Clone + Send + Sync + 'static,\n\n S: NodeStore<Account = A>\n\n + AccountStore<Account = A>\n\n + AddressStore\n\n + HttpStore<Account = A>\n\n + BalanceStore\n\n + StreamNotificationsStore<Account = A>\n\n + ExchangeRateStore\n", "file_path": "crates/interledger-api/src/routes/accounts.rs", "rank": 57, "score": 187389.152808377 }, { "content": "#[derive(Clone)]\n\nstruct TestStore;\n\n\n\nuse serde_json::json;\n\npub static USERNAME: Lazy<Username> = Lazy::new(|| Username::from_str(\"alice\").unwrap());\n\npub static EXAMPLE_ADDRESS: Lazy<Address> =\n\n Lazy::new(|| Address::from_str(\"example.alice\").unwrap());\n\npub static DETAILS: Lazy<Option<Value>> = Lazy::new(|| {\n\n Some(json!({\n\n \"ilp_address\": \"example.alice\",\n\n \"username\": \"alice\",\n\n \"asset_code\": \"XYZ\",\n\n \"asset_scale\": 9,\n\n \"ilp_over_http_incoming_token\" : \"password\",\n\n }))\n\n});\n\nconst AUTH_PASSWORD: &str = \"password\";\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct TestAccount;\n\n\n", "file_path": "crates/interledger-api/src/routes/test_helpers.rs", "rank": 58, "score": 182664.06125202245 }, { "content": "// This sets the Config values which contains environment variables, config file settings, and STDIN\n\n// settings, into each option's env value which is used when Parser parses the arguments. If this\n\n// value is set, the Parser reads the value from it and doesn't warn even if the argument is not\n\n// given from CLI.\n\n// Usually `env` fn is used when creating `App` but this function automatically fills it so\n\n// we don't need to call `env` fn manually.\n\nfn set_app_env(env_config: &Config, app: &mut App, path: &[String], depth: usize) {\n\n if depth == 1 {\n\n for item in &mut app.p.opts {\n\n if let Ok(value) = env_config.get_str(&item.b.name.to_lowercase()) {\n\n item.v.env = Some((&OsStr::new(item.b.name), Some(OsString::from(value))));\n\n }\n\n }\n\n return;\n\n }\n\n for subcommand in &mut app.p.subcommands {\n\n if subcommand.get_name() == path[path.len() - depth] {\n\n set_app_env(env_config, subcommand, path, depth - 1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/ilp-node/src/main.rs", "rank": 59, "score": 181323.6430176456 }, { "content": "/// Returns a 32-byte sha256 digest of the provided preimage\n\npub fn hash_sha256(preimage: &[u8]) -> [u8; 32] {\n\n let output = digest::digest(&digest::SHA256, &preimage[..]);\n\n let mut to_return: [u8; 32] = [0; 32];\n\n to_return.copy_from_slice(output.as_ref());\n\n to_return\n\n}\n\n\n", "file_path": "crates/interledger-stream/src/crypto.rs", "rank": 60, "score": 181140.41284931469 }, { "content": "// TODO test traits\n\npub trait ReadOerExt: Read + ReadBytesExt + Debug {\n\n #[inline]\n\n fn read_var_octet_string(&mut self) -> Result<Vec<u8>> {\n\n let length: u8 = self.read_u8()?;\n\n\n\n if length == 0 {\n\n return Ok(vec![]);\n\n }\n\n\n\n let actual_length: u64 = if length & HIGH_BIT != 0 {\n\n let length_prefix_length = length & LOWER_SEVEN_BITS;\n\n // TODO check for canonical length\n\n self.read_uint::<BigEndian>(length_prefix_length as usize)? as u64\n\n } else {\n\n u64::from(length)\n\n };\n\n\n\n // TODO handle if the length is too long\n\n let mut buf = Vec::with_capacity(actual_length as usize);\n\n self.take(actual_length).read_to_end(&mut buf)?;\n", "file_path": "crates/interledger-btp/src/oer.rs", "rank": 61, "score": 180995.9713708256 }, { "content": "pub trait WriteOerExt: Write + WriteBytesExt + Debug {\n\n #[inline]\n\n fn write_var_octet_string(&mut self, string: &[u8]) -> Result<()> {\n\n let length = string.len();\n\n\n\n if length < 127 {\n\n self.write_u8(length as u8)?;\n\n } else {\n\n let bit_length_of_length = format!(\"{:b}\", length).chars().count();\n\n let length_of_length = { bit_length_of_length as f32 / 8.0 }.ceil() as u8;\n\n self.write_u8(HIGH_BIT | length_of_length)?;\n\n self.write_uint::<BigEndian>(length as u64, length_of_length as usize)?;\n\n }\n\n self.write_all(string)?;\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n // Write a u64 as an OER VarUInt\n\n fn write_var_uint(&mut self, uint: &BigUint) -> Result<()> {\n\n self.write_var_octet_string(&uint.to_bytes_be())?;\n\n Ok(())\n\n }\n\n}\n\n\n\n// Add this trait to all Writable things when this is used\n\nimpl<W: io::Write + ?Sized + Debug> WriteOerExt for W {}\n\n\n", "file_path": "crates/interledger-btp/src/oer.rs", "rank": 62, "score": 180990.6637837976 }, { "content": "/// See: https://github.com/interledger/rfcs/blob/master/0029-stream/0029-stream.md#514-maximum-varuint-size\n\nfn saturating_read_var_uint<'a>(reader: &mut impl BufOerExt<'a>) -> Result<u64, ParseError> {\n\n if reader.peek_var_octet_string()?.len() > 8 {\n\n reader.skip_var_octet_string()?;\n\n Ok(u64::MAX)\n\n } else {\n\n Ok(reader.read_var_uint()?)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod serialization {\n\n use super::*;\n\n use std::str::FromStr;\n\n\n\n static PACKET: Lazy<StreamPacket> = Lazy::new(|| {\n\n StreamPacketBuilder {\n\n sequence: 1,\n\n ilp_packet_type: IlpPacketType::try_from(12).unwrap(),\n\n prepare_amount: 99,\n\n frames: &[\n", "file_path": "crates/interledger-stream/src/packet.rs", "rank": 63, "score": 179887.81907868336 }, { "content": "#[async_trait]\n\npub trait SettlementEngine {\n\n /// Informs the settlement engine that a new account was created\n\n /// within the accounting system using the given account identifier.\n\n /// The settlement engine MAY perform tasks as a prerequisite to settle with the account.\n\n /// For example, a settlement engine implementation might send messages to\n\n /// the peer to exchange ledger identifiers or to negotiate settlement-related fees.\n\n async fn create_account(&self, account_id: String) -> ApiResult;\n\n\n\n /// Instructs the settlement engine that an account was deleted.\n\n async fn delete_account(&self, account_id: String) -> ApiResult;\n\n\n\n /// Asynchronously send an outgoing settlement. The accounting system sends this request and accounts for outgoing settlements.\n\n async fn send_money(&self, account_id: String, money: Quantity) -> ApiResult;\n\n\n\n /// Process and respond to an incoming message from the peer's settlement engine.\n\n /// The connector sends this request when it receives an incoming settlement message\n\n /// from the peer, and returns the response message back to the peer.\n\n async fn receive_message(&self, account_id: String, message: Vec<u8>) -> ApiResult;\n\n}\n\n\n\n// TODO: Since we still haven't finalized all the settlement details, we might\n\n// end up deciding to add some more values, e.g. some settlement engine uid or similar.\n\n// All instances of this struct should be replaced with Url instances once/if we\n\n// agree that there is no more info required to refer to an engine.\n\n/// The details associated with a settlement engine\n\npub struct SettlementEngineDetails {\n\n /// Base URL of the settlement engine\n\n pub url: Url,\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/core/types.rs", "rank": 64, "score": 179395.0718075754 }, { "content": "/// Returns the size (in bytes) of the buffer that encodes a VarOctetString of\n\n/// `length` bytes.\n\npub fn predict_var_octet_string(length: usize) -> usize {\n\n if length < 128 {\n\n 1 + length\n\n } else {\n\n let length_of_length = predict_var_uint_size(length as u64);\n\n 1 + length_of_length + length\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-packet/src/oer.rs", "rank": 65, "score": 179368.65262421226 }, { "content": "/// Trait used by the connector to adjust account balances on settlement events\n\npub trait SettlementStore {\n\n type Account: Account;\n\n\n\n /// Increases the account's balance/prepaid amount by the provided amount\n\n ///\n\n /// This is optionally idempotent. If the same idempotency_key is provided\n\n /// then no database operation must happen. If there is an idempotency\n\n /// conflict (same idempotency key, different inputs to function) then\n\n /// it should return an error\n\n async fn update_balance_for_incoming_settlement(\n\n &self,\n\n account_id: Uuid,\n\n amount: u64,\n\n idempotency_key: Option<String>,\n\n ) -> Result<(), SettlementStoreError>;\n\n\n\n /// Increases the account's balance by the provided amount.\n\n /// Only call this if a settlement request has failed\n\n async fn refund_settlement(\n\n &self,\n\n account_id: Uuid,\n\n settle_amount: u64,\n\n ) -> Result<(), SettlementStoreError>;\n\n}\n\n\n\n/// Trait used by the connector and engine to track amounts which should have been\n\n/// settled but were not due to precision loss\n", "file_path": "crates/interledger-settlement/src/core/types.rs", "rank": 66, "score": 178416.5044944344 }, { "content": "fn get_deepest_command<'a>(matches: &'a ArgMatches, path: &mut Vec<String>) -> &'a ArgMatches<'a> {\n\n let (name, subcommand_matches) = matches.subcommand();\n\n path.push(name.to_string());\n\n if let Some(matches) = subcommand_matches {\n\n return get_deepest_command(matches, path);\n\n }\n\n matches\n\n}\n\n\n", "file_path": "crates/ilp-node/src/main.rs", "rank": 67, "score": 178099.88441594463 }, { "content": "#[inline]\n\nfn volatile_write<T>(dst: &mut T, src: T) {\n\n unsafe { ptr::write_volatile(dst, src) }\n\n}\n\n\n\n/// Use fences to prevent accesses from being reordered before this\n\n/// point, which should hopefully help ensure that all accessors\n\n/// see zeroes after this point.\n", "file_path": "crates/interledger-store/src/crypto.rs", "rank": 68, "score": 174899.61797387802 }, { "content": "#[derive(Debug, Clone)]\n\nstruct AmountWithScale {\n\n num: BigUint,\n\n scale: u8,\n\n}\n\n\n\nimpl ToRedisArgs for AmountWithScale {\n\n fn write_redis_args<W>(&self, out: &mut W)\n\n where\n\n W: ?Sized + RedisWrite,\n\n {\n\n let mut rv = Vec::new();\n\n self.num.to_string().write_redis_args(&mut rv);\n\n self.scale.to_string().write_redis_args(&mut rv);\n\n ToRedisArgs::make_arg_vec(&rv, out);\n\n }\n\n}\n\n\n\nimpl AmountWithScale {\n\n fn parse_multi_values(items: &[Value]) -> Option<Self> {\n\n // We have to iterate over all values because in this case we're making\n", "file_path": "crates/interledger-store/src/redis/mod.rs", "rank": 69, "score": 174685.8207182763 }, { "content": "/// Returns the HMAC-SHA256 of the provided message using the provided **secret** key\n\npub fn hmac_sha256(key: &[u8], message: &[u8]) -> [u8; 32] {\n\n let key = hmac::Key::new(hmac::HMAC_SHA256, key);\n\n let output = hmac::sign(&key, message);\n\n let mut to_return: [u8; 32] = [0; 32];\n\n to_return.copy_from_slice(output.as_ref());\n\n to_return\n\n}\n\n\n", "file_path": "crates/interledger-stream/src/crypto.rs", "rank": 70, "score": 172005.57111737411 }, { "content": "#[cfg(test)]\n\n#[derive(Clone)]\n\nstruct TestStore;\n\n\n\n#[cfg(test)]\n\nuse interledger_errors::AddressStoreError;\n\n\n\n#[cfg(test)]\n\n#[async_trait]\n\nimpl AddressStore for TestStore {\n\n /// Saves the ILP Address in the store's memory and database\n\n async fn set_ilp_address(&self, _ilp_address: Address) -> Result<(), AddressStoreError> {\n\n unimplemented!()\n\n }\n\n\n\n async fn clear_ilp_address(&self) -> Result<(), AddressStoreError> {\n\n unimplemented!()\n\n }\n\n\n\n /// Get's the store's ilp address from memory\n\n fn get_ilp_address(&self) -> Address {\n\n Address::from_str(\"example.connector\").unwrap()\n", "file_path": "crates/interledger-service-util/src/validator_service.rs", "rank": 71, "score": 171939.6070360192 }, { "content": "pub fn generate_redis_secret(secret_seed: &[u8; 32]) -> [u8; 32] {\n\n let mut redis_secret: [u8; 32] = [0; 32];\n\n let sig = hmac::sign(\n\n &hmac::Key::new(hmac::HMAC_SHA256, secret_seed),\n\n REDIS_SECRET_GENERATION_STRING.as_bytes(),\n\n );\n\n redis_secret.copy_from_slice(sig.as_ref());\n\n redis_secret\n\n}\n", "file_path": "crates/ilp-node/src/redis_store.rs", "rank": 72, "score": 171159.49095954688 }, { "content": "#[async_trait]\n\npub trait AccountStore {\n\n /// The provided account type. Must implement the `Account` trait.\n\n type Account: Account;\n\n\n\n /// Loads the accounts which correspond to the provided account ids\n\n async fn get_accounts(\n\n &self,\n\n // The account ids (UUID format) of the accounts you are fetching\n\n account_ids: Vec<Uuid>,\n\n ) -> Result<Vec<Self::Account>, AccountStoreError>;\n\n\n\n /// Loads the account id which corresponds to the provided username\n\n async fn get_account_id_from_username(\n\n &self,\n\n // The username of the account you are fetching\n\n username: &Username,\n\n ) -> Result<Uuid, AccountStoreError>;\n\n}\n\n\n", "file_path": "crates/interledger-service/src/lib.rs", "rank": 73, "score": 170605.75025523046 }, { "content": "#[async_trait]\n\npub trait LeftoversStore {\n\n type AccountId: ToString;\n\n /// The data type that the store uses for tracking numbers.\n\n type AssetType: ToString;\n\n\n\n /// Saves the leftover data\n\n ///\n\n /// @dev:\n\n /// If your store needs to support Big Integers but cannot, consider setting AssetType to String,\n\n /// and then proceed to save a list of uncredited amounts as strings which would get loaded and summed\n\n /// by the load_uncredited_settlement_amount and get_uncredited_settlement_amount\n\n /// functions\n\n async fn save_uncredited_settlement_amount(\n\n &self,\n\n // The account id that for which there was a precision loss\n\n account_id: Self::AccountId,\n\n // The amount for which precision loss occurred, along with their scale\n\n uncredited_settlement_amount: (Self::AssetType, u8),\n\n ) -> Result<(), LeftoversStoreError>;\n\n\n", "file_path": "crates/interledger-settlement/src/core/types.rs", "rank": 74, "score": 167687.2801623012 }, { "content": "#[derive(Debug, Clone)]\n\nstruct AmountWithScale {\n\n num: BigUint,\n\n scale: u8,\n\n}\n\n\n\nimpl ToRedisArgs for AmountWithScale {\n\n fn write_redis_args<W>(&self, out: &mut W)\n\n where\n\n W: ?Sized + RedisWrite,\n\n {\n\n let mut rv = Vec::new();\n\n self.num.to_string().write_redis_args(&mut rv);\n\n self.scale.to_string().write_redis_args(&mut rv);\n\n ToRedisArgs::make_arg_vec(&rv, out);\n\n }\n\n}\n\n\n\nimpl AmountWithScale {\n\n /// Iterates over all values because in this case it's making\n\n /// an lrange call. This returns all the tuple elements in 1 array, and\n", "file_path": "crates/interledger-settlement/src/core/backends_common/redis/mod.rs", "rank": 75, "score": 167668.34272322303 }, { "content": "/// Extension trait for [Account](../interledger_service/trait.Account.html) with [ILP over HTTP](https://interledger.org/rfcs/0035-ilp-over-http/) related information\n\npub trait HttpAccount: Account {\n\n /// Returns the HTTP URL corresponding to this account\n\n fn get_http_url(&self) -> Option<&Url>;\n\n /// Returns the HTTP token which is sent as an HTTP header on each ILP over HTTP request\n\n fn get_http_auth_token(&self) -> Option<SecretString>;\n\n}\n\n\n\n/// The interface for Stores that can be used with the HttpServerService.\n\n// TODO do we need all of these constraints?\n", "file_path": "crates/interledger-http/src/lib.rs", "rank": 76, "score": 163414.74563051926 }, { "content": "/// Extension trait for [Account](../interledger_service/trait.Account.html) with [ILP over BTP](https://interledger.org/rfcs/0023-bilateral-transfer-protocol/) related information\n\npub trait BtpAccount: Account {\n\n /// Returns the BTP Websockets URL corresponding to this account\n\n fn get_ilp_over_btp_url(&self) -> Option<&Url>;\n\n /// Returns the BTP authentication token which is used when initiating a BTP connection\n\n /// with a peer\n\n fn get_ilp_over_btp_outgoing_token(&self) -> Option<&[u8]>;\n\n}\n\n\n\n/// The interface for Store implementations that can be used with the BTP Server.\n", "file_path": "crates/interledger-btp/src/lib.rs", "rank": 77, "score": 163414.72607980622 }, { "content": "pub fn generate_keys(server_secret: &[u8]) -> (Secret<EncryptionKey>, Secret<DecryptionKey>) {\n\n let generation_key = GenerationKey(hmac::Key::new(hmac::HMAC_SHA256, server_secret));\n\n let encryption_key = Secret::new(EncryptionKey(aead::LessSafeKey::new(\n\n aead::UnboundKey::new(\n\n &aead::AES_256_GCM,\n\n hmac::sign(&generation_key.0, ENCRYPTION_KEY_GENERATION_STRING).as_ref(),\n\n )\n\n .unwrap(),\n\n )));\n\n let decryption_key = Secret::new(DecryptionKey(aead::LessSafeKey::new(\n\n aead::UnboundKey::new(\n\n &aead::AES_256_GCM,\n\n hmac::sign(&generation_key.0, ENCRYPTION_KEY_GENERATION_STRING).as_ref(),\n\n )\n\n .unwrap(),\n\n )));\n\n // the generation key is dropped and zeroized here\n\n (encryption_key, decryption_key)\n\n}\n\n\n", "file_path": "crates/interledger-store/src/crypto.rs", "rank": 78, "score": 163037.86973564993 }, { "content": "fn settlement_engines<'a, 'b>() -> App<'a, 'b> {\n\n SubCommand::with_name(\"settlement-engines\")\n\n .about(\"Interact with the settlement engine configurations\")\n\n}\n\n\n", "file_path": "crates/ilp-cli/src/parser.rs", "rank": 79, "score": 162913.82410785923 }, { "content": "/// Define CcpAccount methods and Account types that need to be used by the CCP Service\n\npub trait CcpRoutingAccount: Account {\n\n /// The type of relationship we have with this account\n\n fn routing_relation(&self) -> RoutingRelation;\n\n\n\n /// Indicates whether we should send CCP Route Updates to this account\n\n fn should_send_routes(&self) -> bool {\n\n self.routing_relation() == RoutingRelation::Child\n\n || self.routing_relation() == RoutingRelation::Peer\n\n }\n\n\n\n /// Indicates whether we should accept CCP Route Update Requests from this account\n\n fn should_receive_routes(&self) -> bool {\n\n self.routing_relation() == RoutingRelation::Parent\n\n || self.routing_relation() == RoutingRelation::Peer\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-ccp/src/lib.rs", "rank": 80, "score": 161988.19685612086 }, { "content": "fn settlement_engines_set_all<'a, 'b>() -> App<'a, 'b> {\n\n AuthorizedSubCommand::with_name(\"set-all\")\n\n .about(\"Configure the default settlement engines for given asset codes\")\n\n .arg(\n\n Arg::with_name(\"halve\")\n\n .long(\"pair\")\n\n .number_of_values(2)\n\n .multiple(true)\n\n .help(\"A set of space-separated key/value pairs, representing an asset code and a settlement engine; may appear multiple times\"),\n\n )\n\n}\n\n\n", "file_path": "crates/ilp-cli/src/parser.rs", "rank": 81, "score": 160404.56500726723 }, { "content": "type RoutingTable<A> = HashMap<String, A>;\n\n\n\n#[async_trait]\n\nimpl AddressStore for TestStore {\n\n /// Saves the ILP Address in the store's memory and database\n\n async fn set_ilp_address(&self, _ilp_address: Address) -> Result<(), AddressStoreError> {\n\n unimplemented!()\n\n }\n\n\n\n async fn clear_ilp_address(&self) -> Result<(), AddressStoreError> {\n\n unimplemented!()\n\n }\n\n\n\n /// Get's the store's ilp address from memory\n\n fn get_ilp_address(&self) -> Address {\n\n Address::from_str(\"example.connector\").unwrap()\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "crates/interledger-ccp/src/test_helpers.rs", "rank": 82, "score": 160314.8489653797 }, { "content": "fn ilp_packet_to_ws_message(request_id: u32, packet: Packet) -> Message {\n\n let (data, is_response) = match packet {\n\n Packet::Prepare(prepare) => (BytesMut::from(prepare).to_vec(), false),\n\n Packet::Fulfill(fulfill) => (BytesMut::from(fulfill).to_vec(), true),\n\n Packet::Reject(reject) => (BytesMut::from(reject).to_vec(), true),\n\n };\n\n let btp_packet = if is_response {\n\n BtpMessage {\n\n request_id,\n\n protocol_data: vec![ProtocolData {\n\n protocol_name: \"ilp\".to_string(),\n\n content_type: ContentType::ApplicationOctetStream,\n\n data,\n\n }],\n\n }\n\n .to_bytes()\n\n } else {\n\n BtpResponse {\n\n request_id,\n\n protocol_data: vec![ProtocolData {\n\n protocol_name: \"ilp\".to_string(),\n\n content_type: ContentType::ApplicationOctetStream,\n\n data,\n\n }],\n\n }\n\n .to_bytes()\n\n };\n\n Message::binary(btp_packet)\n\n}\n", "file_path": "crates/interledger-btp/src/service.rs", "rank": 83, "score": 159649.70048178258 }, { "content": "/// Extension trait for [`Account`](../interledger_service/trait.Account.html) with rate limiting related information\n\npub trait RateLimitAccount: Account {\n\n /// The maximum packets per minute allowed for this account\n\n fn packets_per_minute_limit(&self) -> Option<u32> {\n\n None\n\n }\n\n\n\n /// The maximum units per minute allowed for this account\n\n fn amount_per_minute_limit(&self) -> Option<u64> {\n\n None\n\n }\n\n}\n\n\n\n/// Rate limiting related errors\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum RateLimitError {\n\n /// Account exceeded their packet limit\n\n PacketLimitExceeded,\n\n /// Account exceeded their amount limit\n\n ThroughputLimitExceeded,\n\n /// There was an internal error when trying to connect to the store\n\n StoreError,\n\n}\n\n\n\n/// Store trait which manages the rate limit related information of accounts\n", "file_path": "crates/interledger-service-util/src/rate_limit_service.rs", "rank": 84, "score": 157930.22103054533 }, { "content": "/// An account with a round trip time, used by the [`ExpiryShortenerService`](./struct.ExpiryShortenerService.html)\n\n/// to shorten a packet's expiration time to account for latency\n\npub trait RoundTripTimeAccount: Account {\n\n /// The account's round trip time\n\n fn round_trip_time(&self) -> u32 {\n\n DEFAULT_ROUND_TRIP_TIME\n\n }\n\n}\n\n\n\n/// # Expiry Shortener Service\n\n///\n\n/// Each node shortens the `Prepare` packet's expiry duration before passing it on.\n\n/// Nodes shorten the expiry duration so that even if the packet is fulfilled just before the expiry,\n\n/// they will still have enough time to pass the fulfillment to the previous node before it expires.\n\n///\n\n/// This service reduces the expiry time of each packet before forwarding it out.\n\n/// Requires a `RoundtripTimeAccount` and _no store_\n\n#[derive(Clone)]\n\npub struct ExpiryShortenerService<O> {\n\n next: O,\n\n max_expiry_duration: u32,\n\n}\n", "file_path": "crates/interledger-service-util/src/expiry_shortener_service.rs", "rank": 85, "score": 156664.0928076771 }, { "content": "pub fn main() {\n\n // 1. Define the arguments to the CLI application\n\n let app = parser::build();\n\n\n\n // 2. Parse the command line\n\n let matches = app.clone().get_matches();\n\n\n\n // 3. Interpret this CLI invocation\n\n let result = interpreter::run(&matches);\n\n\n\n // 4. Handle interpreter output\n\n match result {\n\n Err(interpreter::Error::UsageErr(s)) => {\n\n // Clap doesn't seem to have a built-in way of manually printing the\n\n // help text for an arbitrary subcommand, but this works just the same.\n\n app.get_matches_from(s.split(' '));\n\n }\n\n Err(e) => {\n\n eprintln!(\"ilp-cli error: {}\", e);\n\n exit(1);\n", "file_path": "crates/ilp-cli/src/main.rs", "rank": 86, "score": 156387.70984601672 }, { "content": "/// Extension trait for [`Account`](../interledger_service/trait.Account.html) with the max packet amount\n\n/// allowed for this account\n\npub trait MaxPacketAmountAccount: Account {\n\n fn max_packet_amount(&self) -> u64;\n\n}\n\n\n\n/// # MaxPacketAmount Service\n\n///\n\n/// This service is used by nodes to limit the maximum value of each packet they are willing to forward.\n\n/// Nodes may limit the packet amount for a variety of reasons:\n\n/// - Liquidity: a node operator may not way to allow a single high-value packet to tie up a large portion of its liquidity at once (especially because they do not know whether the packet will be fulfilled or rejected)\n\n/// - Security: each packet carries some risk, due to the possibility that a node's failure to pass back the fulfillment within the available time window would cause that node to lose money. Keeping the value of each individual packet low may help reduce the impact of such a failure\n\n/// Signaling: nodes SHOULD set the maximum packet amount _lower_ than the maximum amount in flight (also known as the payment or money bandwidth). `T04: Insufficient Liquidity` errors do not communicate to the sender how much they can send, largely because the \"available liquidity\" may be time based or based on the rate of other payments going through and thus difficult to communicate effectively. In contrast, the `F08: Amount Too Large` error conveys the maximum back to the sender, because this limit is assumed to be a static value, and alllows sender-side software like STREAM implementations to respond accordingly. Therefore, setting the maximum packet amount lower than the total money bandwidth allows client implementations to quickly adjust their packet amounts to appropriate levels.\n\n/// Requires a `MaxPacketAmountAccount` and _no store_.\n\n#[derive(Clone)]\n\npub struct MaxPacketAmountService<I, S> {\n\n next: I,\n\n store: S,\n\n}\n\n\n\nimpl<I, S> MaxPacketAmountService<I, S> {\n\n /// Simple constructor\n", "file_path": "crates/interledger-service-util/src/max_packet_amount_service.rs", "rank": 87, "score": 155413.57276644936 }, { "content": "#[allow(unreachable_code)]\n\nfn default_database_url() -> String {\n\n #[cfg(feature = \"redis\")]\n\n return default_redis_url();\n\n panic!(\"no backing store configured\")\n\n}\n\n\n", "file_path": "crates/ilp-node/src/node.rs", "rank": 88, "score": 154928.54413639827 }, { "content": "fn deserialize_32_bytes_hex<'de, D>(deserializer: D) -> Result<[u8; 32], D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n <[u8; 32]>::from_hex(String::deserialize(deserializer)?).map_err(|err| {\n\n DeserializeError::custom(format!(\n\n \"Invalid hex value (must be 32 hex-encoded bytes): {:?}\",\n\n err\n\n ))\n\n })\n\n}\n\n\n", "file_path": "crates/ilp-node/src/node.rs", "rank": 89, "score": 153683.58315025104 }, { "content": " let (store, _context, accounts) = test_store().await.unwrap();\n\n let mut settings = AccountSettings::default();\n\n // Redis.rs cannot save a value larger than i64::MAX\n\n settings.settle_to = Some(std::i64::MAX as u64 + 1);\n\n let account = accounts[0].clone();\n\n let id = account.id();\n\n let err = store\n\n .modify_account_settings(id, settings)\n\n .await\n\n .unwrap_err();\n\n assert_eq!(\n\n err.to_string(),\n\n \"invalid account: the provided value for parameter `settle_to` was too large\"\n\n );\n\n}\n\n\n\n#[tokio::test]\n\nasync fn modify_account_settings_unchanged() {\n\n let (store, _context, accounts) = test_store().await.unwrap();\n\n let settings = AccountSettings::default();\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 90, "score": 151398.81627586708 }, { "content": " let err = store.get_all_accounts().await.unwrap_err();\n\n assert_eq!(err.to_string(), \"Broken pipe (os error 32)\");\n\n}\n\n\n\n#[tokio::test]\n\nasync fn update_accounts() {\n\n let (store, _context, accounts) = test_store().await.unwrap();\n\n let id = accounts[0].id();\n\n let mut new = ACCOUNT_DETAILS_0.clone();\n\n new.asset_code = String::from(\"TUV\");\n\n let account = store.update_account(id, new.clone()).await.unwrap();\n\n assert_eq!(account.asset_code(), \"TUV\");\n\n\n\n let id = Uuid::new_v4();\n\n let err = store.update_account(id, new).await.unwrap_err();\n\n assert_eq!(err.to_string(), format!(\"account `{}` was not found\", id));\n\n}\n\n\n\n#[tokio::test]\n\nasync fn modify_account_settings_settle_to_overflow() {\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 91, "score": 151398.37762319768 }, { "content": "async fn fetches_account_from_username() {\n\n let (store, _context, accs) = test_store().await.unwrap();\n\n let account_id = store\n\n .get_account_id_from_username(&Username::from_str(\"alice\").unwrap())\n\n .await\n\n .unwrap();\n\n assert_eq!(account_id, accs[0].id());\n\n\n\n let err = store\n\n .get_account_id_from_username(&Username::from_str(\"random\").unwrap())\n\n .await\n\n .unwrap_err();\n\n assert_eq!(err.to_string(), \"account `random` was not found\");\n\n}\n\n\n\n#[tokio::test]\n\nasync fn get_all_accounts() {\n\n let (store, _context, _) = test_store().await.unwrap();\n\n let accounts = store.get_all_accounts().await.unwrap();\n\n assert_eq!(accounts.len(), 2);\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 92, "score": 151394.6806670021 }, { "content": "#[tokio::test]\n\nasync fn only_one_parent_allowed() {\n\n let mut acc = ACCOUNT_DETAILS_2.clone();\n\n acc.routing_relation = Some(\"Parent\".to_owned());\n\n acc.username = Username::from_str(\"another_name\").unwrap();\n\n acc.ilp_address = Some(Address::from_str(\"example.another_name\").unwrap());\n\n let (store, _context, accs) = test_store().await.unwrap();\n\n let res = store.insert_account(acc.clone()).await;\n\n // This should fail\n\n assert!(res.is_err());\n\n store.delete_account(accs[0].id()).await.unwrap();\n\n // must also clear the ILP Address to indicate that we no longer\n\n // have a parent account configured\n\n store.clear_ilp_address().await.unwrap();\n\n let res = store.insert_account(acc).await;\n\n assert!(res.is_ok());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn delete_accounts() {\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 93, "score": 151392.53523898934 }, { "content": " .insert_account(ACCOUNT_DETAILS_2.clone())\n\n .await\n\n .unwrap_err();\n\n assert_eq!(err.to_string(), \"account `charlie` already exists\");\n\n}\n\n\n\n#[tokio::test]\n\nasync fn cannot_insert_invalid_accounts() {\n\n let (store, _context, _) = test_store().await.unwrap();\n\n let details = ACCOUNT_DETAILS_2.clone();\n\n let mut acc = details.clone();\n\n\n\n // invalid http url\n\n acc.ilp_over_http_url = Some(\"asdf\".to_owned());\n\n let err = store.insert_account(acc).await.unwrap_err();\n\n assert_eq!(\n\n err.to_string(),\n\n \"invalid account: the provided http url is not valid: relative URL without a base\"\n\n );\n\n\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 94, "score": 151390.82823268644 }, { "content": "\n\n#[tokio::test]\n\nasync fn decrypts_outgoing_tokens_acc() {\n\n let (store, _context, accs) = test_store().await.unwrap();\n\n let acc = accs[0].clone();\n\n let accounts = store.get_accounts(vec![acc.id()]).await.unwrap();\n\n let account = accounts[0].clone();\n\n assert_eq!(\n\n account.get_http_auth_token().unwrap().expose_secret(),\n\n acc.get_http_auth_token().unwrap().expose_secret(),\n\n );\n\n assert_eq!(\n\n account.get_ilp_over_btp_outgoing_token().unwrap(),\n\n acc.get_ilp_over_btp_outgoing_token().unwrap(),\n\n );\n\n}\n\n\n\n#[tokio::test]\n\nasync fn errors_for_unknown_accounts() {\n\n let (store, _context, _) = test_store().await.unwrap();\n\n let err = store\n\n .get_accounts(vec![Uuid::new_v4(), Uuid::new_v4()])\n\n .await\n\n .unwrap_err();\n\n assert_eq!(err.to_string(), \"wrong account length (expected 2, got 0)\");\n\n}\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 95, "score": 151389.38585951942 }, { "content": " ret.get_ilp_over_btp_outgoing_token().unwrap(),\n\n &b\"dylan:test\"[..],\n\n );\n\n\n\n let id = Uuid::new_v4();\n\n let err = store\n\n .modify_account_settings(id, settings)\n\n .await\n\n .unwrap_err();\n\n assert_eq!(err.to_string(), format!(\"account `{}` was not found\", id));\n\n}\n\n\n\n#[tokio::test]\n\nasync fn starts_with_zero_balance() {\n\n let (store, _context, accs) = test_store().await.unwrap();\n\n let balance = store.get_balance(accs[0].id()).await.unwrap();\n\n assert_eq!(balance, 0);\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 96, "score": 151387.22178827372 }, { "content": "\n\n#[tokio::test]\n\nasync fn update_ilp_and_children_addresses() {\n\n let (store, _context, accs) = test_store().await.unwrap();\n\n // Add a NonRoutingAccount to make sure its address\n\n // gets updated as well\n\n let acc2 = store\n\n .insert_account(ACCOUNT_DETAILS_2.clone())\n\n .await\n\n .unwrap();\n\n let mut accs = accs.clone();\n\n accs.push(acc2);\n\n accs.sort_by_key(|a| a.username().clone());\n\n let ilp_address = Address::from_str(\"test.parent.our_address\").unwrap();\n\n\n\n store.set_ilp_address(ilp_address.clone()).await.unwrap();\n\n let ret = store.get_ilp_address();\n\n assert_eq!(ilp_address, ret);\n\n\n\n let mut accounts = store.get_all_accounts().await.unwrap();\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 97, "score": 151385.8332357127 }, { "content": " assert_eq!(\n\n store.get_ilp_address(),\n\n Address::from_str(\"example.bob.node\").unwrap()\n\n );\n\n}\n\n\n\n#[tokio::test]\n\nasync fn insert_accounts() {\n\n let (store, _context, _) = test_store().await.unwrap();\n\n let account = store\n\n .insert_account(ACCOUNT_DETAILS_2.clone())\n\n .await\n\n .unwrap();\n\n assert_eq!(\n\n *account.ilp_address(),\n\n Address::from_str(\"example.alice.user1.charlie\").unwrap()\n\n );\n\n\n\n // cannot insert duplicate accounts\n\n let err = store\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 98, "score": 151385.76358659996 }, { "content": " let (store, context, _) = test_store().await.unwrap();\n\n let accounts = store.get_all_accounts().await.unwrap();\n\n let id = accounts[0].id();\n\n store.delete_account(id).await.unwrap();\n\n let accounts = store.get_all_accounts().await.unwrap();\n\n for a in &accounts {\n\n assert_ne!(id, a.id());\n\n }\n\n\n\n // clear all accounts and try again\n\n store.delete_account(accounts[0].id()).await.unwrap();\n\n let accounts = store.get_all_accounts().await.unwrap();\n\n assert_eq!(accounts.len(), 0);\n\n\n\n // try deleting an account which does not exist\n\n let err = store.delete_account(id).await.unwrap_err();\n\n assert_eq!(err.to_string(), format!(\"account `{}` was not found\", id));\n\n\n\n // we drop the connection so the pipe should break\n\n drop(context);\n", "file_path": "crates/interledger-store/tests/redis/accounts_test.rs", "rank": 99, "score": 151383.68540085305 } ]
Rust
9_error_handling/result/src/main.rs
rhavill/rust-programming-language
6fec6d84aa6a887e8dd5a5117ae7548780c1ecac
use std::io; use std::io::ErrorKind; use std::io::Read; use std::fs; use std::fs::File; fn main() { loop { println!("Input a number and press <Enter>:"); println!("1 - to see a generic error crash."); println!("2 - to match error and prevent crash."); println!("3 - to catch the error with unwrap_or_else and prevent crash."); println!("4 - to unwrap error and crash."); println!("5 - to crash with expect."); println!("6 - to propogate errors."); println!("7 - to propogate errors with ? operator."); println!("8 - to propogate errors with chained ? operator."); println!("9 - to propogate errors with fs::read_to_string."); println!("10 - to exit."); let mut option = String::new(); io::stdin().read_line(&mut option) .expect("Failed to read line"); let option: u32 = match option.trim().parse() { Ok(num) => num, Err(_) => continue, }; match option { 1 => generic_error(), 2 => matching_error(), 3 => avoid_match_with_closure(), 4 => unwrap_crash(), 5 => expect_crash(), 6 => propogating_errors(), 7 => question_mark_propogation(), 8 => chained_question_mark_propogation(), 9 => fs_read_to_string_propogation(), _ => break, } } } #[allow(unused_variables)] fn generic_error() { /* If we give f a type annotation that we know is not the return type of the function and then try to compile the code, the compiler will tell us that the types don’t match. The error message will then tell us what the type of f is. */ let f = File::open("hello.txt"); let f = match f { Ok(file) => file, Err(error) => { panic!("Problem opening the file: {:?}", error) }, }; } #[allow(unused_variables)] fn matching_error() { let f = File::open("hello.txt"); let f = match f { Ok(file) => file, Err(error) => match error.kind() { ErrorKind::NotFound => match File::create("hello.txt") { Ok(fc) => fc, Err(e) => panic!("Problem creating the file: {:?}", e), }, other_error => panic!("Problem opening the file: {:?}", other_error), }, }; } #[allow(unused_variables)] fn avoid_match_with_closure() { /* In Chapter 13, you’ll learn about closures; the Result<T, E> type has many methods that accept a closure and are implemented using match expressions. Using those methods will make your code more concise. In Chapter 13, you’ll learn about closures; the Result<T, E> type has many methods that accept a closure and are implemented using match expressions. Using those methods will make your code more concise. */ let f = File::open("hello.txt").unwrap_or_else(|error| { if error.kind() == ErrorKind::NotFound { File::create("hello.txt").unwrap_or_else(|error| { panic!("Problem creating the file: {:?}", error); }) } else { panic!("Problem opening the file: {:?}", error); } }); } #[allow(unused_variables)] fn unwrap_crash() { /* unwrap is a shortcut method that is implemented just like the match expression we wrote in Listing 9-4. If the Result value is the Ok variant, unwrap will return the value inside the Ok. If the Result is the Err variant, unwrap will call the panic! macro for us. */ let f = File::open("hello.txt").unwrap(); } #[allow(unused_variables)] fn expect_crash() { /* Expect, which is similar to unwrap, lets us also choose the panic! error message. Using expect instead of unwrap and providing good error messages can convey your intent and make tracking down the source of a panic easier. */ let f = File::open("hello.txt").expect("Failed to open hello.txt"); } fn propogating_errors() { let username = read_username_from_file(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file() -> Result<String, io::Error> { let f = File::open("hello.txt"); let mut f = match f { Ok(file) => file, Err(e) => return Err(e), }; let mut s = String::new(); match f.read_to_string(&mut s) { Ok(_) => Ok(s), Err(e) => Err(e), } } fn question_mark_propogation() { let username = read_username_from_file_shorthand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shorthand() -> Result<String, io::Error> { /* If the value of the Result is an Ok, the value inside the Ok will get returned from this expression, and the program will continue. If the value is an Err, the Err will be returned from the whole function as if we had used the return keyword so the error value gets propagated to the calling code. This is different from using match, because error values that have the ? operator called on them go through the from function, defined in the From trait in the standard library, which is used to convert errors from one type into another. When the ? operator calls the from function, the error type received is converted into the error type defined in the return type of the current function. This is useful when a function returns one error type to represent all the ways a function might fail, even if parts might fail for many different reasons. As long as each error type implements the from function to define how to convert itself to the returned error type, the ? operator takes care of the conversion automatically. The ? at the end of the File::open call will return the value inside an Ok to the variable f. If an error occurs, the ? operator will return early out of the whole function and give any Err value to the calling code. The same thing applies to the ? at the end of the read_to_string call. */ let mut f = File::open("hello.txt")?; let mut s = String::new(); f.read_to_string(&mut s)?; Ok(s) } fn chained_question_mark_propogation() { let username = read_username_from_file_shorterhand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shorterhand() -> Result<String, io::Error> { let mut s = String::new(); File::open("hello.txt")?.read_to_string(&mut s)?; Ok(s) } fn fs_read_to_string_propogation() { let username = read_username_from_file_shortesthand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shortesthand() -> Result<String, io::Error> { fs::read_to_string("hello.txt") } /* The main function is special, and there are restrictions on what its return type must be. One valid return type for main is (), and conveniently, another valid return type is Result<T, E>, as shown here: */
use std::io; use std::io::ErrorKind; use std::io::Read; use std::fs; use std::fs::File; fn main() { loop { println!("Input a number and press <Enter>:"); println!("1 - to see a generic error crash."); println!("2 - to match error and prevent crash."); println!("3 - to catch the error with unwrap_or_else and prevent crash."); println!("4 - to unwrap error and crash."); println!("5 - to crash with expect."); println!("6 - to propogate errors.");
ssions. Using those methods will make your code more concise. In Chapter 13, you’ll learn about closures; the Result<T, E> type has many methods that accept a closure and are implemented using match expressions. Using those methods will make your code more concise. */ let f = File::open("hello.txt").unwrap_or_else(|error| { if error.kind() == ErrorKind::NotFound { File::create("hello.txt").unwrap_or_else(|error| { panic!("Problem creating the file: {:?}", error); }) } else { panic!("Problem opening the file: {:?}", error); } }); } #[allow(unused_variables)] fn unwrap_crash() { /* unwrap is a shortcut method that is implemented just like the match expression we wrote in Listing 9-4. If the Result value is the Ok variant, unwrap will return the value inside the Ok. If the Result is the Err variant, unwrap will call the panic! macro for us. */ let f = File::open("hello.txt").unwrap(); } #[allow(unused_variables)] fn expect_crash() { /* Expect, which is similar to unwrap, lets us also choose the panic! error message. Using expect instead of unwrap and providing good error messages can convey your intent and make tracking down the source of a panic easier. */ let f = File::open("hello.txt").expect("Failed to open hello.txt"); } fn propogating_errors() { let username = read_username_from_file(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file() -> Result<String, io::Error> { let f = File::open("hello.txt"); let mut f = match f { Ok(file) => file, Err(e) => return Err(e), }; let mut s = String::new(); match f.read_to_string(&mut s) { Ok(_) => Ok(s), Err(e) => Err(e), } } fn question_mark_propogation() { let username = read_username_from_file_shorthand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shorthand() -> Result<String, io::Error> { /* If the value of the Result is an Ok, the value inside the Ok will get returned from this expression, and the program will continue. If the value is an Err, the Err will be returned from the whole function as if we had used the return keyword so the error value gets propagated to the calling code. This is different from using match, because error values that have the ? operator called on them go through the from function, defined in the From trait in the standard library, which is used to convert errors from one type into another. When the ? operator calls the from function, the error type received is converted into the error type defined in the return type of the current function. This is useful when a function returns one error type to represent all the ways a function might fail, even if parts might fail for many different reasons. As long as each error type implements the from function to define how to convert itself to the returned error type, the ? operator takes care of the conversion automatically. The ? at the end of the File::open call will return the value inside an Ok to the variable f. If an error occurs, the ? operator will return early out of the whole function and give any Err value to the calling code. The same thing applies to the ? at the end of the read_to_string call. */ let mut f = File::open("hello.txt")?; let mut s = String::new(); f.read_to_string(&mut s)?; Ok(s) } fn chained_question_mark_propogation() { let username = read_username_from_file_shorterhand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shorterhand() -> Result<String, io::Error> { let mut s = String::new(); File::open("hello.txt")?.read_to_string(&mut s)?; Ok(s) } fn fs_read_to_string_propogation() { let username = read_username_from_file_shortesthand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shortesthand() -> Result<String, io::Error> { fs::read_to_string("hello.txt") } /* The main function is special, and there are restrictions on what its return type must be. One valid return type for main is (), and conveniently, another valid return type is Result<T, E>, as shown here: */
println!("7 - to propogate errors with ? operator."); println!("8 - to propogate errors with chained ? operator."); println!("9 - to propogate errors with fs::read_to_string."); println!("10 - to exit."); let mut option = String::new(); io::stdin().read_line(&mut option) .expect("Failed to read line"); let option: u32 = match option.trim().parse() { Ok(num) => num, Err(_) => continue, }; match option { 1 => generic_error(), 2 => matching_error(), 3 => avoid_match_with_closure(), 4 => unwrap_crash(), 5 => expect_crash(), 6 => propogating_errors(), 7 => question_mark_propogation(), 8 => chained_question_mark_propogation(), 9 => fs_read_to_string_propogation(), _ => break, } } } #[allow(unused_variables)] fn generic_error() { /* If we give f a type annotation that we know is not the return type of the function and then try to compile the code, the compiler will tell us that the types don’t match. The error message will then tell us what the type of f is. */ let f = File::open("hello.txt"); let f = match f { Ok(file) => file, Err(error) => { panic!("Problem opening the file: {:?}", error) }, }; } #[allow(unused_variables)] fn matching_error() { let f = File::open("hello.txt"); let f = match f { Ok(file) => file, Err(error) => match error.kind() { ErrorKind::NotFound => match File::create("hello.txt") { Ok(fc) => fc, Err(e) => panic!("Problem creating the file: {:?}", e), }, other_error => panic!("Problem opening the file: {:?}", other_error), }, }; } #[allow(unused_variables)] fn avoid_match_with_closure() { /* In Chapter 13, you’ll learn about closures; the Result<T, E> type has many methods that accept a closure and are implemented using match expre
random
[ { "content": "fn main() {\n\n if_let_expressions();\n\n while_loop();\n\n for_loop();\n\n let_statement();\n\n function_parameters();\n\n}\n\n\n\n/*\n\nThe line if let Ok(age) = age introduces a new shadowed age variable that \n\ncontains the value inside the Ok variant. This means we need to place the if age\n\n> 30 condition within that block: we can’t combine these two conditions into if \n\nlet Ok(age) = age && age > 30. The shadowed age we want to compare to 30 isn’t \n\nvalid until the new scope starts with the curly bracket.\n\n\n\nThe downside of using if let expressions is that the compiler doesn’t check \n\nexhaustiveness, whereas with match expressions it does. If we omitted the last \n\nelse block and therefore missed handling some cases, the compiler would not \n\nalert us to the possible logic bug.\n\n*/\n", "file_path": "18_patterns_and_matching/places_to_use_patterns/src/main.rs", "rank": 0, "score": 185365.68635526998 }, { "content": "fn while_loop() {\n\n let mut stack = Vec::new();\n\n\n\n stack.push(1);\n\n stack.push(2);\n\n stack.push(3);\n\n\n\n while let Some(top) = stack.pop() {\n\n println!(\"while_loop: {}\", top);\n\n } \n\n}\n\n\n", "file_path": "18_patterns_and_matching/places_to_use_patterns/src/main.rs", "rank": 3, "score": 176939.4871027842 }, { "content": "fn for_loop() {\n\n let v = vec!['a', 'b', 'c'];\n\n\n\n for (index, value) in v.iter().enumerate() {\n\n println!(\"for_looop: {} is at index {}\", value, index);\n\n } \n\n}\n\n\n", "file_path": "18_patterns_and_matching/places_to_use_patterns/src/main.rs", "rank": 4, "score": 176939.4871027842 }, { "content": "fn main() {\n\n simple_match();\n\n bind_value_match();\n\n option_match();\n\n underscore_placeholder();\n\n}\n\n\n", "file_path": "6_enums_and_pattern_matching/enums_and_match/src/main.rs", "rank": 5, "score": 161918.6118982817 }, { "content": "fn main() {\n\n println!(\"Run this with \\\"RUST_BACKTRACE=1 cargo run\\\" to see a backtrace.\");\n\n loop {\n\n println!(\"Input a number and press <Enter>:\");\n\n println!(\"1 - to see a basic panic crash.\");\n\n println!(\"2 - to see a buffer overread crash.\");\n\n let mut option = String::new();\n\n io::stdin().read_line(&mut option)\n\n .expect(\"Failed to read line\");\n\n let option: u32 = match option.trim().parse() {\n\n Ok(num) => num,\n\n Err(_) => continue,\n\n };\n\n match option {\n\n 1 => basic_panic(),\n\n 2 => buffer_overread(),\n\n _ => continue,\n\n }\n\n }\n\n}\n\n\n", "file_path": "9_error_handling/panic/src/main.rs", "rank": 7, "score": 157611.0165001488 }, { "content": "fn main() {\n\n #[allow(unused_variables)]\n\n let some_option_value = Some(3);\n\n /*\n\n The next line causes the compiler error, \"refutable pattern in local \n\n binding: `None` not covered\", because.\n\n */\n\n // let Some(x) = some_option_value;\n\n /*\n\n The next if/let statement causes a warning: \"irrefutable if-let pattern\".\n\n */\n\n if let x = 5 {\n\n println!(\"{}\", x);\n\n} ;\n\n}\n", "file_path": "18_patterns_and_matching/refutability/src/main.rs", "rank": 9, "score": 157310.82391683853 }, { "content": "fn main() {\n\n /*\n\n Even though we tell the spawned thread to print until i is 9, it only got to \n\n 5 before the main thread shuts down. (This was the default behavior when not\n\n using the handle returned by the spawn function.)\n\n */\n\n\n\n // thread::spawn(|| {\n\n // The handle returned by spawn allows us to wait for threads to complete:\n\n let handle = thread::spawn(|| {\n\n for i in 1..10 {\n\n println!(\"in new_thread: hi number {} from the spawned thread!\", i);\n\n thread::sleep(Duration::from_millis(1));\n\n }\n\n });\n\n\n\n // Calling join at this point causes all of the output from the spawned \n\n // thread to appear before any of the output from the main thread.\n\n // handle.join().unwrap(); \n\n\n", "file_path": "16_fearless_concurrency/using_threads/src/main.rs", "rank": 11, "score": 155129.43902030773 }, { "content": "fn main() {\n\n validation();\n\n}\n\n\n\n/*\n\nThis is not an ideal solution: if it was absolutely critical that the program \n\nonly operated on values between 1 and 100, and it had many functions with this \n\nrequirement, having a check like this in every function would be tedious (and \n\nmight impact performance).\n\n\n\nloop {\n\n // --snip--\n\n\n\n let guess: i32 = match guess.trim().parse() {\n\n Ok(num) => num,\n\n Err(_) => continue,\n\n };\n\n\n\n if guess < 1 || guess > 100 {\n\n println!(\"The secret number will be between 1 and 100.\");\n", "file_path": "9_error_handling/panic_or_no_panic/src/main.rs", "rank": 12, "score": 154947.4161368763 }, { "content": "fn main() {\n\n matching_literals();\n\n matching_named_variables();\n\n multiple_patterns();\n\n matching_ranges();\n\n destructuring_structs();\n\n destructuring_enums();\n\n nested_structs_and_enums();\n\n destructuring_structs_and_tuples();\n\n ignoring_an_entire_value();\n\n ignoring_parts_of_a_value();\n\n ignore_named_var_with_underscore();\n\n ignoring_remaining_parts();\n\n match_guards();\n\n bindings();\n\n}\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 13, "score": 154656.4244199755 }, { "content": "fn main() {\n\n /*\n\n The if let syntax lets you combine if and let into a less verbose way to \n\n handle values that match one pattern while ignoring the rest. Consider \n\n this code that matches on an Option<u8> value but only wants to execute \n\n code if the value is 3.\n\n */\n\n // let some_u8_value = Some(0u8);\n\n let some_u8_value = Some(3);\n\n match some_u8_value {\n\n Some(3) => println!(\"three\"),\n\n _ => (),\n\n }\n\n\n\n // Instead, we could write this in a shorter way using if let. \n\n if let Some(3) = some_u8_value {\n\n println!(\"three\");\n\n }\n\n\n\n let coin = Coin::Quarter(UsState::Alaska);\n", "file_path": "6_enums_and_pattern_matching/enums_and_if_let/src/main.rs", "rank": 14, "score": 152146.03389729303 }, { "content": "fn main() {\n\n ip_addr_struct();\n\n ip_addr_enum();\n\n ip_addr_enum_different_types();\n\n ip_addr_standard_library();\n\n variety_of_types();\n\n option_enum();\n\n}\n\n\n", "file_path": "6_enums_and_pattern_matching/defining_enums/src/main.rs", "rank": 15, "score": 152146.03389729303 }, { "content": "fn function_parameters() {\n\n let point = (3, 5);\n\n print_coordinates(&point);\n\n}", "file_path": "18_patterns_and_matching/places_to_use_patterns/src/main.rs", "rank": 20, "score": 140125.45844781422 }, { "content": "fn let_statement() {\n\n let (x, y, z) = (1, 2, 3);\n\n println!(\"let_statemnt: x={}, y={}, z={}.\", x, y, z);\n\n}\n\n\n", "file_path": "18_patterns_and_matching/places_to_use_patterns/src/main.rs", "rank": 21, "score": 140125.45844781422 }, { "content": "fn if_let_expressions() {\n\n let favorite_color: Option<&str> = None;\n\n let is_tuesday = false;\n\n let age: Result<u8, _> = \"34\".parse();\n\n\n\n if let Some(color) = favorite_color {\n\n println!(\"if_let_expressions: Using your favorite color, {}, as the background\", color);\n\n } else if is_tuesday {\n\n println!(\"if_let_expressions: Tuesday is green day!\");\n\n } else if let Ok(age) = age {\n\n if age > 30 {\n\n println!(\"if_let_expressions: Using purple as the background color\");\n\n } else {\n\n println!(\"if_let_expressions: Using orange as the background color\");\n\n }\n\n } else {\n\n println!(\"if_let_expressions: Using blue as the background color\");\n\n }\n\n}\n\n\n", "file_path": "18_patterns_and_matching/places_to_use_patterns/src/main.rs", "rank": 22, "score": 140125.45844781422 }, { "content": "fn print_coordinates(&(x, y): &(i32, i32)) {\n\n println!(\"Current location from function_parameters/print_coordinates: ({}, {})\", x, y);\n\n}\n\n\n", "file_path": "18_patterns_and_matching/places_to_use_patterns/src/main.rs", "rank": 27, "score": 126483.82939000832 }, { "content": "fn option_match() {\n\n fn plus_one(x: Option<i32>) -> Option<i32> {\n\n match x {\n\n None => None,\n\n Some(i) => Some(i + 1),\n\n }\n\n }\n\n\n\n \n\n let five = Some(5);\n\n let six = plus_one(five);\n\n let none = plus_one(None);\n\n\n\n println!(\"In option_match, five is {:?}.\", five);\n\n println!(\"In option_match, six is {:?}.\", six);\n\n println!(\"In option_match, none is {:?}.\", none);\n\n\n\n /*\n\n This version of our plus_one function that has a bug and won’t compile (We \n\n didn’t handle the None case, so this code will cause a bug):\n\n */\n\n // fn plus_one(x: Option<i32>) -> Option<i32> {\n\n // match x {\n\n // Some(i) => Some(i + 1),\n\n // }\n\n}\n\n\n", "file_path": "6_enums_and_pattern_matching/enums_and_match/src/main.rs", "rank": 29, "score": 122614.64584799214 }, { "content": "fn simple_match() {\n\n enum Coin {\n\n Penny,\n\n Nickel,\n\n Dime,\n\n Quarter,\n\n }\n\n\n\n fn value_in_cents(coin: Coin) -> u8 {\n\n match coin {\n\n Coin::Penny => {\n\n println!(\"Lucky penny!\");\n\n 1\n\n },\n\n Coin::Nickel => 5,\n\n Coin::Dime => 10,\n\n Coin::Quarter => 25,\n\n }\n\n } \n\n println!(\"In simple_match, a penny is worth {} cent(s).\", value_in_cents(Coin::Penny));\n\n println!(\"In simple_match, a nickel is worth {} cent(s).\", value_in_cents(Coin::Nickel));\n\n println!(\"In simple_match, a dime is worth {} cent(s).\", value_in_cents(Coin::Dime));\n\n println!(\"In simple_match, a quarter is worth {} cent(s).\", value_in_cents(Coin::Quarter));\n\n}\n\n\n", "file_path": "6_enums_and_pattern_matching/enums_and_match/src/main.rs", "rank": 30, "score": 122614.64584799214 }, { "content": "fn main() {\n\n create_new_vectors();\n\n update_vector();\n\n drop_vector();\n\n read_elements();\n\n iterating_values();\n\n multiple_types_with_enum();\n\n}\n\n\n", "file_path": "8_common_collections/vectors/src/main.rs", "rank": 31, "score": 122478.3980814892 }, { "content": "fn main() {\n\n for_loop();\n\n map();\n\n}\n\n\n", "file_path": "13_iterators_and_closures/iterators/src/main.rs", "rank": 32, "score": 122478.3980814892 }, { "content": "fn main() {\n\n let v = vec![10, 9, 8, 7, 6, 5, 4, 4, 4, 4, 3, 3, 3, 3, 2, 1];\n\n println!(\"In summary, v is: {:?}\", v);\n\n println!(\"In summary, mean is: {}\", mean(&v));\n\n println!(\"In summary, median is: {}\", median(&v));\n\n println!(\"In summary, mode is: {:?}\", mode(&v));\n\n let text = String::from(\"Hello there, Alberta.\");\n\n println!(\"In summary, pig latin for '{}' is '{}'\", text, pig_latin(&text));\n\n employee_departments();\n\n}\n\n\n", "file_path": "8_common_collections/exercises/src/main.rs", "rank": 33, "score": 122478.3980814892 }, { "content": "fn main() {\n\n /*\n\n The env::args function returns an iterator! Rather than collecting the \n\n iterator values into a vector and then passing a slice to Config::new, now \n\n we’re passing ownership of the iterator returned from env::args to \n\n Config::new directly.\n\n */\n\n let config = Config::new(env::args()).unwrap_or_else(|err| {\n\n eprintln!(\"Problem parsing arguments: {}\", err);\n\n process::exit(1);\n\n });\n\n\n\n if let Err(e) = minigrep::run(config) {\n\n eprintln!(\"Application error: {}\", e);\n\n\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "13_iterators_and_closures/minigrep/src/main.rs", "rank": 34, "score": 122478.3980814892 }, { "content": "fn main() {\n\n let simulated_user_specified_value = 10;\n\n let simulated_random_number = 7;\n\n\n\n generate_workout(\n\n simulated_user_specified_value,\n\n simulated_random_number\n\n );\n\n\n\n let x = 4;\n\n\n\n let equal_to_x = |z| z == x;\n\n\n\n let y = 4;\n\n\n\n println!(\"x == y? {}\", equal_to_x(y));\n\n\n\n /*\n\n FnOnce consumes the variables it captures from its enclosing scope, known \n\n as the closure’s environment. To consume the captured variables, the closure \n", "file_path": "13_iterators_and_closures/closures/src/main.rs", "rank": 35, "score": 122478.3980814892 }, { "content": "fn main() {\n\n simple_scope();\n\n copy_bind();\n\n move_example();\n\n clone_example();\n\n stack_only_copy();\n\n ownership_and_functions();\n\n return_values_and_scope();\n\n return_value_ownership_transfer();\n\n}\n\n\n", "file_path": "4_understanding_ownership/ownership/src/main.rs", "rank": 36, "score": 122478.3980814892 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "1_getting_started/hello_world/main.rs", "rank": 37, "score": 122478.3980814892 }, { "content": "fn main() {\n\n create_strings();\n\n updating_strings();\n\n indexing_into_strings();\n\n slicing_strings();\n\n iterating_strings();\n\n}\n\n\n", "file_path": "8_common_collections/strings/src/main.rs", "rank": 38, "score": 122478.3980814892 }, { "content": "fn main() {\n\n macro_rules();\n\n derive_macro();\n\n attribute_like_macros();\n\n function_like_macros();\n\n}\n\n\n", "file_path": "19_advanced_features/macros/src/main.rs", "rank": 39, "score": 122478.3980814892 }, { "content": "fn main() {\n\n another_function(5, 6);\n\n\n\n let x = 5;\n\n\n\n let y = {\n\n let x = 3;\n\n x + 1\n\n };\n\n\n\n println!(\"The value of y is: {}\", y);\n\n\n\n let x = five();\n\n\n\n println!(\"The value of x is: {}\", x);\n\n\n\n let x = plus_one(5);\n\n\n\n println!(\"The value of x is: {}\", x);\n\n}\n\n\n", "file_path": "3_common_programming_concepts/functions/src/main.rs", "rank": 40, "score": 120891.60822805986 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "1_getting_started/hello_cargo/src/main.rs", "rank": 41, "score": 120891.60822805986 }, { "content": "fn main() {\n\n string_position();\n\n string_slice();\n\n string_literals_are_slices();\n\n string_slice_parameters();\n\n other_slices();\n\n}\n\n\n", "file_path": "4_understanding_ownership/slice_type/src/main.rs", "rank": 42, "score": 120891.60822805986 }, { "content": "fn main() {\n\n simple_message_passing();\n\n sending_multiple_values();\n\n multiple_producers();\n\n}\n\n\n", "file_path": "16_fearless_concurrency/message_passing/src/main.rs", "rank": 43, "score": 120891.60822805986 }, { "content": "fn main() {\n\n using_mutexes();\n\n multi_threaded_mutex_error();\n\n simpler_multi_threaded_mutex_error();\n\n multiple_ownership_multiple_threads();\n\n atomic_reference_counting();\n\n}\n\n\n", "file_path": "16_fearless_concurrency/shared_state/src/main.rs", "rank": 44, "score": 120891.60822805986 }, { "content": "fn main() {\n\n let value = Rc::new(RefCell::new(5));\n\n\n\n let a = Rc::new(Cons(Rc::clone(&value), Rc::new(Nil)));\n\n\n\n let b = Cons(Rc::new(RefCell::new(6)), Rc::clone(&a));\n\n let c = Cons(Rc::new(RefCell::new(10)), Rc::clone(&a));\n\n\n\n *value.borrow_mut() += 10;\n\n\n\n println!(\"a after = {:?}\", a);\n\n println!(\"b after = {:?}\", b);\n\n println!(\"c after = {:?}\", c);\n\n}", "file_path": "15_smart_pointers/interior_immutability/src/main.rs", "rank": 45, "score": 120891.60822805986 }, { "content": "fn main() {\n\n let mut collection = AveragedCollection{list: vec![], average: 0.0};\n\n collection.add(2);\n\n collection.add(4);\n\n collection.add(6);\n\n collection.remove();\n\n println!(\"average is {}.\", collection.average());\n\n println!(\"collection is {:?}.\", collection);\n\n}\n", "file_path": "17_object_oriented_programming/characteristics/src/main.rs", "rank": 46, "score": 120891.60822805986 }, { "content": "fn main() {\n\n references();\n\n mutable_references();\n\n allowed_mutable_reference_scope();\n\n dangling_reference();\n\n}\n\n\n", "file_path": "4_understanding_ownership/references_and_borrowing/src/main.rs", "rank": 47, "score": 120891.60822805986 }, { "content": "fn main() {\n\n raw_pointer_dereference();\n\n call_unsafe_function();\n\n safe_abstraction_of_unsafe_code();\n\n calling_external_code();\n\n mutable_static_variables();\n\n unsafe_trait();\n\n}\n\n\n\n/*\n\nRaw pointers can be immutable or mutable and are written as *const T and *mut T, \n\nrespectively. The asterisk isn’t the dereference operator; it’s part of the type \n\nname. In the context of raw pointers, immutable means that the pointer can’t be \n\ndirectly assigned to after being dereferenced. Different from references and \n\nsmart pointers, raw pointers:\n\n\n\n1) Are allowed to ignore the borrowing rules by having both immutable and \n\nmutable pointers or multiple mutable pointers to the same location.\n\n2) Aren’t guaranteed to point to valid memory.\n\n3) Are allowed to be null.\n\n4) Don’t implement any automatic cleanup.\n\n*/\n", "file_path": "19_advanced_features/unsafe_rust/src/main.rs", "rank": 48, "score": 120891.60822805986 }, { "content": "fn main() {\n\n function_pointers();\n\n closure_traits();\n\n initializer_functions();\n\n returning_closures();\n\n}\n\n\n", "file_path": "19_advanced_features/advanced_functions/src/main.rs", "rank": 49, "score": 120891.60822805986 }, { "content": "fn main() {\n\n operator_overloading();\n\n methods_with_same_name();\n\n supertraits();\n\n external_traits_on_external_types();\n\n}\n\n\n\n/*\n", "file_path": "19_advanced_features/advanced_traits/src/main.rs", "rank": 50, "score": 120891.60822805986 }, { "content": "fn main() {\n\n pretend_drop_example();\n\n early_drop();\n\n}\n\n\n", "file_path": "15_smart_pointers/drop_trait/src/main.rs", "rank": 51, "score": 120891.60822805986 }, { "content": "fn main() {\n\n creating_hash_maps();\n\n ownership();\n\n accessing_values();\n\n updating();\n\n}\n\n\n", "file_path": "8_common_collections/hash_maps/src/main.rs", "rank": 52, "score": 120891.60822805986 }, { "content": "fn main() {\n\n type_synonyms_with_type_aliases();\n\n never_type();\n\n sized_trait();\n\n}\n\n\n", "file_path": "19_advanced_features/advanced_types/src/main.rs", "rank": 53, "score": 120891.60822805986 }, { "content": "fn main() {\n\n let num = 10;\n\n println!(\"Hello, world! {} plus one is {}!\", num, add_one::add_one(num));\n\n println!(\"Hello, world! {} plus two is {}!\", num, add_two::add_two(num));\n\n}", "file_path": "14_more_about_cargo_and_crates/add/adder/src/main.rs", "rank": 54, "score": 120891.60822805986 }, { "content": "fn main() {\n\n // let x = 5;\n\n let mut x = 5;\n\n println!(\"The value of x is: {}\", x);\n\n x = 6;\n\n println!(\"The value of x is: {}\", x);\n\n\n\n println!(\"The value of MAX_POINTS is: {}\", MAX_POINTS);\n\n\n\n /* Shadowing is different from marking a variable as mut, because we’ll \n\n get a compile-time error if we accidentally try to reassign to this \n\n variable without using the let keyword. By using let, we can perform a few\n\n transformations on a value but have the variable be immutable after those \n\n transformations have been completed. */\n\n let y = 5;\n\n \n\n let y = y + 1;\n\n\n\n let y = y * 2;\n\n\n\n println!(\"The value of y is: {}\", y);\n\n}\n", "file_path": "3_common_programming_concepts/variables/src/main.rs", "rank": 55, "score": 120891.60822805986 }, { "content": "fn main() {\n\n box_syntax();\n\n recursive_types();\n\n}\n\n\n\n/*\n\nJust like any owned value, when a box goes out of scope, as b does at the end \n\nof main, it will be deallocated.\n\n\n\nPutting a single value on the heap isn’t very useful, so you won’t use boxes by \n\nthemselves in this way very often. Having values like a single i32 on the stack, \n\nwhere they’re stored by default, is more appropriate in the majority of \n\nsituations. \n\n*/\n", "file_path": "15_smart_pointers/box_pointer/src/main.rs", "rank": 56, "score": 120891.60822805986 }, { "content": "fn bind_value_match() {\n\n #[derive(Debug)] // so we can inspect the state in a minute\n\n enum UsState {\n\n Alabama,\n\n Alaska,\n\n // --snip--\n\n }\n\n\n\n enum Coin {\n\n Penny,\n\n Nickel,\n\n Dime,\n\n Quarter(UsState),\n\n }\n\n fn value_in_cents(coin: Coin) -> u8 {\n\n match coin {\n\n Coin::Penny => 1,\n\n Coin::Nickel => 5,\n\n Coin::Dime => 10,\n\n Coin::Quarter(state) => {\n", "file_path": "6_enums_and_pattern_matching/enums_and_match/src/main.rs", "rank": 57, "score": 120514.87137333909 }, { "content": "fn match_guards() {\n\n let num = Some(4);\n\n\n\n match num {\n\n Some(x) if x < 5 => println!(\"match_guards: num less than five: {}\", x),\n\n Some(x) => println!(\"match_guards: num {}\", x),\n\n None => (),\n\n } \n\n\n\n let x = Some(5);\n\n let y = 10;\n\n\n\n match x {\n\n Some(50) => println!(\"match_guards: Got 50\"),\n\n Some(n) if n == y => println!(\"match_guards: Matched, n = {:?}\", n),\n\n _ => println!(\"match_guards: Default case, x = {:?}\", x),\n\n }\n\n\n\n println!(\"match_guards: at the end: x = {:?}, y = {:?}\", x, y);\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 58, "score": 120123.10549800012 }, { "content": "fn matching_literals() {\n\n let x = 1;\n\n\n\n match x {\n\n 1 => println!(\"matching_literals: one\"),\n\n 2 => println!(\"matching_literals: two\"),\n\n 3 => println!(\"matching_literals: three\"),\n\n _ => println!(\"matching_literals: anything\"),\n\n } \n\n}\n\n\n\n/*\n\nNamed variables are irrefutable patterns that match any value, and we’ve used \n\nthem many times in the book. However, there is a complication when you use \n\nnamed variables in match expressions. Because match starts a new scope, \n\nvariables declared as part of a pattern inside the match expression will shadow \n\nthose with the same name outside the match construct, as is the case with all \n\nvariables. \n\n*/\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 59, "score": 120123.10549800012 }, { "content": "fn matching_ranges() {\n\n let x = 5;\n\n match x {\n\n 1...5 => println!(\"matching_ranges: one through five\"),\n\n _ => println!(\"matching_ranges: something else\"),\n\n } \n\n\n\n let x = 'c';\n\n match x {\n\n 'a'...'j' => println!(\"matching_ranges: early ASCII letter\"),\n\n 'k'...'z' => println!(\"matching_ranges: late ASCII letter\"),\n\n _ => println!(\"matching_ranges: something else\"),\n\n } \n\n}\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 60, "score": 120123.10549800012 }, { "content": "fn main() {\n\n dangling_references();\n\n lifetime_annotation_syntax();\n\n function_signature_lifetime_annotations();\n\n lifetime_annotation_restrictions();\n\n struct_lifetime_annotations();\n\n lifetime_elision();\n\n lifetime_annotations_in_methods();\n\n static_lifetime();\n\n generic_trait_params_trait_bounds_and_lifetimes();\n\n}\n\n\n", "file_path": "10_generic_types_traits_lifetimes/lifetimes/src/main.rs", "rank": 61, "score": 119385.32934188428 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let config = Config::new(&args).unwrap_or_else(|err| {\n\n eprintln!(\"Problem parsing arguments: {}\", err);\n\n process::exit(1);\n\n });\n\n\n\n if let Err(e) = minigrep::run(config) {\n\n eprintln!(\"Application error: {}\", e);\n\n\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "12_building_a_command_line_program/minigrep/src/main.rs", "rank": 62, "score": 119385.32934188428 }, { "content": "fn main() {\n\n implement_trait();\n\n default_implementation();\n\n trait_as_parameter();\n\n bound_syntax_parameters();\n\n multiple_trait_bounds();\n\n where_clause();\n\n return_implementation();\n\n get_largest();\n\n trait_bond_conditional_methods();\n\n}\n\n\n", "file_path": "10_generic_types_traits_lifetimes/traits/src/main.rs", "rank": 63, "score": 119385.32934188428 }, { "content": "fn main() {\n\n sharing_data();\n\n cloning_increases_reference_count();\n\n}\n\n\n", "file_path": "15_smart_pointers/reference_counted_pointer/src/main.rs", "rank": 64, "score": 119385.32934188428 }, { "content": "fn main() {\n\n\n\n let min = 1;\n\n let max = 10;\n\n\n\n println!(\"Guess the number from {} to {}!\", min, max);\n\n\n\n let secret_number = rand::thread_rng().gen_range(min, max + 1);\n\n\n\n // println!(\"The secret number is: {}\", secret_number);\n\n\n\n loop {\n\n\n\n println!(\"Please input your guess.\");\n\n\n\n let mut guess = String::new();\n\n\n\n io::stdin().read_line(&mut guess)\n\n .expect(\"Failed to read line\");\n\n\n", "file_path": "2_programming_a_guessing_game/guessing_game/src/main.rs", "rank": 65, "score": 119385.32934188428 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "16_fearless_concurrency/sync_and_send_traits/src/main.rs", "rank": 66, "score": 119385.32934188428 }, { "content": "fn main() {\n\n let number = 3;\n\n\n\n if number < 5 {\n\n println!(\"condition was true\");\n\n } else {\n\n println!(\"condition was false\");\n\n }\n\n\n\n let number = 3;\n\n\n\n if number != 0 {\n\n println!(\"number was something other than zero\");\n\n }\n\n\n\n let number = 6;\n\n\n\n if number % 4 == 0 {\n\n println!(\"number is divisible by 4\");\n\n } else if number % 3 == 0 {\n", "file_path": "3_common_programming_concepts/control_flow/src/main.rs", "rank": 67, "score": 119385.32934188428 }, { "content": "fn main() {\n\n let x = 2.0; // f64\n\n\n\n let y: f32 = 3.0; // f32\n\n\n\n // addition\n\n let sum = 5 + 10;\n\n\n\n // subtraction\n\n let difference = 95.5 - 4.3;\n\n\n\n // multiplication\n\n let product = 4 * 30;\n\n\n\n // division\n\n let quotient = 56.7 / 32.2;\n\n\n\n // remainder\n\n let remainder = 43 % 5;\n\n\n", "file_path": "3_common_programming_concepts/data_types/src/main.rs", "rank": 68, "score": 119385.32934188428 }, { "content": "fn main() {\n\n duplicate_code();\n\n abstracted_code();\n\n}\n\n\n", "file_path": "10_generic_types_traits_lifetimes/removing_duplication/src/main.rs", "rank": 69, "score": 117953.35114536333 }, { "content": "fn main() {\n\n oo_blog();\n\n states_as_types_blog();\n\n}\n\n\n", "file_path": "17_object_oriented_programming/state_design_pattern/src/main.rs", "rank": 70, "score": 117953.35114536333 }, { "content": "fn main() {\n\n create_modify();\n\n field_init_shorthand();\n\n struct_update_syntax();\n\n tuple_structs();\n\n}\n\n\n", "file_path": "5_structs_and_structure_related_data/defining_structs/src/main.rs", "rank": 71, "score": 117953.35114536333 }, { "content": "fn matching_named_variables() {\n\n let x = Some(5);\n\n let y = 10;\n\n\n\n match x {\n\n Some(50) => println!(\"matching_named_variables: Got 50\"),\n\n /*\n\n Because we’re in a new scope inside the match expression, this is a new \n\n y variable, not the y we declared at the beginning with the value 10. \n\n This new y binding will match any value inside a Some, which is what we \n\n have in x.\n\n */\n\n Some(y) => println!(\"matching_named_variables: matched, y = {:?}\", y),\n\n _ => println!(\"matching_named_variables: default case, x = {:?}\", x),\n\n }\n\n\n\n println!(\"matching_named_variables: at the end: x = {:?}, y = {:?}\", x, y);\n\n}\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 72, "score": 117778.3263048122 }, { "content": "fn underscore_placeholder() {\n\n // The _ pattern will match any value.\n\n let some_u8_value = 0u8;\n\n \n\n match some_u8_value {\n\n 1 => println!(\"one\"),\n\n 3 => println!(\"three\"),\n\n 5 => println!(\"five\"),\n\n 7 => println!(\"seven\"),\n\n _ => (),\n\n }\n\n}\n", "file_path": "6_enums_and_pattern_matching/enums_and_match/src/main.rs", "rank": 73, "score": 117778.3263048122 }, { "content": "fn main() {\n\n same_functions_different_types();\n\n struct_definition();\n\n mixed_type_struct();\n\n method_definition();\n\n mixup_struct_types();\n\n}\n\n\n", "file_path": "10_generic_types_traits_lifetimes/generic_data_types/src/main.rs", "rank": 74, "score": 116590.10533839746 }, { "content": "fn main() {\n\n let rect1 = Rectangle { width: 30, height: 50 };\n\n\n\n println!(\n\n \"The area of the rectangle is {} square pixels.\",\n\n rect1.area()\n\n );\n\n\n\n let rect2 = Rectangle { width: 10, height: 40 };\n\n let rect3 = Rectangle { width: 60, height: 45 };\n\n\n\n println!(\"Can rect1 hold rect2? {}\", rect1.can_hold(&rect2));\n\n println!(\"Can rect1 hold rect3? {}\", rect1.can_hold(&rect3));\n\n\n\n let sq = Rectangle::square(3);\n\n println!(\"sq is {:?}\", sq);\n\n\n\n}", "file_path": "5_structs_and_structure_related_data/struct_method_syntax/src/main.rs", "rank": 75, "score": 116590.10533839746 }, { "content": "fn main () {\n\n creating_reference_cycle();\n\n tree_structure();\n\n vizualize_strong_and_weak_count();\n\n}\n\n\n", "file_path": "15_smart_pointers/reference_cycles_can_leak_memory/src/main.rs", "rank": 76, "score": 116590.10533839746 }, { "content": "fn main() {\n\n calculate_area();\n\n calculate_tuple_area();\n\n calculate_struct_area();\n\n}\n\n\n", "file_path": "5_structs_and_structure_related_data/example_structs_program/src/main.rs", "rank": 77, "score": 116590.10533839746 }, { "content": "fn for_loop() {\n\n let v1 = vec![1, 2, 3];\n\n\n\n let v1_iter = v1.iter();\n\n\n\n for val in v1_iter {\n\n println!(\"In for_loop, got: {}\", val);\n\n }\n\n}\n\n\n", "file_path": "13_iterators_and_closures/iterators/src/main.rs", "rank": 78, "score": 116439.82753357623 }, { "content": "fn main() {\n\n let screen = Screen {\n\n components: vec![\n\n Box::new(SelectBox {\n\n width: 75,\n\n height: 10,\n\n options: vec![\n\n String::from(\"Yes\"),\n\n String::from(\"Maybe\"),\n\n String::from(\"No\")\n\n ],\n\n }),\n\n Box::new(Button {\n\n width: 50,\n\n height: 10,\n\n label: String::from(\"OK\"),\n\n }),\n\n ],\n\n };\n\n\n\n screen.run();\n\n}\n\n\n\n/*\n\nYou can only make object-safe traits into trait objects. Some complex rules \n\ngovern all the properties that make a trait object safe, but in practice, only \n\ntwo rules are relevant. A trait is object safe if all the methods defined in the \n", "file_path": "17_object_oriented_programming/trait_objects_allow_different_types/src/main.rs", "rank": 79, "score": 115290.58286670884 }, { "content": "fn main() {\n\n let listener = TcpListener::bind(\"127.0.0.1:7878\").unwrap();\n\n\n\n for stream in listener.incoming() {\n\n let stream = stream.unwrap();\n\n\n\n handle_connection(stream);\n\n }\n\n}\n\n\n", "file_path": "20_multi_threaded_web_server/single_threaded_web_server/src/main.rs", "rank": 80, "score": 115290.58286670884 }, { "content": "fn validation() {\n\n #[derive(Debug)]\n\n pub struct Guess {\n\n value: i32,\n\n }\n\n\n\n impl Guess {\n\n pub fn new(value: i32) -> Guess {\n\n if value < 1 || value > 100 {\n\n panic!(\"Guess value must be between 1 and 100, got {}.\", value);\n\n }\n\n\n\n Guess {\n\n value\n\n }\n\n }\n\n\n\n pub fn value(&self) -> i32 {\n\n self.value\n\n }\n\n }\n\n\n\n // The next line causes compilation to fail, because the value fails validation:\n\n // let guess = Guess::new(101);\n\n let guess = Guess::new(100);\n\n println!(\"Guess is {:?}\", guess.value());\n\n}", "file_path": "9_error_handling/panic_or_no_panic/src/main.rs", "rank": 81, "score": 113279.22298282951 }, { "content": "fn buffer_overread() {\n\n let v = vec![1, 2, 3];\n\n let overread = v[99];\n\n println!(\"I will crash before this is output: {}.\", overread);\n\n}\n", "file_path": "9_error_handling/panic/src/main.rs", "rank": 82, "score": 113279.22298282951 }, { "content": "fn basic_panic() {\n\n panic!(\"crash and burn\");\n\n}\n\n\n", "file_path": "9_error_handling/panic/src/main.rs", "rank": 83, "score": 113279.22298282951 }, { "content": "fn bindings() {\n\n enum Message {\n\n Hello { id: i32 },\n\n }\n\n\n\n let msg = Message::Hello { id: 5 };\n\n\n\n match msg {\n\n Message::Hello { id: id_variable @ 3...7 } => {\n\n println!(\"Found an id in range: {}\", id_variable)\n\n },\n\n Message::Hello { id: 10...12 } => {\n\n println!(\"Found an id in another range\")\n\n },\n\n Message::Hello { id } => {\n\n println!(\"Found some other id: {}\", id)\n\n },\n\n }\n\n}", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 84, "score": 112988.23126592871 }, { "content": "fn using_mutexes() {\n\n let m = Mutex::new(5);\n\n\n\n {\n\n /*\n\n To access the data inside the mutex, we use the lock method to acquire \n\n the lock. This call will block the current thread so it can’t do any \n\n work until it’s our turn to have the lock.\n\n\n\n The call to lock returns a smart pointer called MutexGuard, wrapped in a \n\n LockResult that we handled with the call to unwrap. The MutexGuard smart \n\n pointer implements Deref to point at our inner data; the smart pointer \n\n also has a Drop implementation that releases the lock automatically when \n\n a MutexGuard goes out of scope.\n\n */\n\n let mut num = m.lock().unwrap();\n\n *num = 6;\n\n }\n\n\n\n println!(\"using_mutexes: m = {:?}\", m);\n\n}\n\n\n", "file_path": "16_fearless_concurrency/shared_state/src/main.rs", "rank": 85, "score": 110809.47539957966 }, { "content": "fn destructuring_structs() {\n\n struct Point {\n\n x: i32,\n\n y: i32,\n\n }\n\n\n\n let p = Point { x: 0, y: 7 };\n\n\n\n let Point { x: a, y: b } = p;\n\n assert_eq!(0, a);\n\n assert_eq!(7, b);\n\n println!(\"destructuring_structs: a={}, b={}.\", a, b);\n\n\n\n /*\n\n Because having variable names match the fields is common and because writing \n\n let Point { x: x, y: y } = p; contains a lot of duplication, there is a \n\n shorthand for patterns that match struct fields: you only need to list the \n\n name of the struct field, and the variables created from the pattern will \n\n have the same names.\n\n */ \n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 86, "score": 110350.52749701148 }, { "content": "fn multiple_patterns() {\n\n let x = 1;\n\n\n\n match x {\n\n 1 | 2 => println!(\"multiple_patterns: one or two\"),\n\n 3 => println!(\"multiple_patterns: three\"),\n\n _ => println!(\"multiple_patterns: anything\"),\n\n } \n\n}\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 87, "score": 110350.52749701148 }, { "content": "fn destructuring_enums() {\n\n \n\n #[allow(dead_code)]\n\n enum Message {\n\n Quit,\n\n Move { x: i32, y: i32 },\n\n Write(String),\n\n ChangeColor(i32, i32, i32),\n\n } \n\n\n\n let msg = Message::ChangeColor(0, 160, 255);\n\n\n\n match msg {\n\n Message::Quit => {\n\n println!(\"destructuring_enums: The Quit variant has no data to destructure.\")\n\n },\n\n Message::Move { x, y } => {\n\n println!(\n\n \"destructuring_enums: Move in the x direction {} and in the y direction {}\",\n\n x,\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 88, "score": 110350.52749701148 }, { "content": "fn variety_of_types() {\n\n #[derive(Debug)]\n\n struct QuitMessage; // unit struct\n\n #[derive(Debug)]\n\n struct MoveMessage {\n\n x: i32,\n\n y: i32,\n\n }\n\n #[derive(Debug)]\n\n struct WriteMessage(String); // tuple struct\n\n #[derive(Debug)]\n\n struct ChangeColorMessage(i32, i32, i32); // tuple struct\n\n\n\n let quit_struct = QuitMessage;\n\n println!(\"In variety_of_types, quit_struct is {:?}.\", quit_struct);\n\n let move_struct = MoveMessage { x: 0, y: 0 };\n\n println!(\"In variety_of_types, move_struct is {:?}.\", move_struct);\n\n let write_struct = WriteMessage(String::from(\"Hello!\"));\n\n println!(\"In variety_of_types, write_struct is {:?}.\", write_struct);\n\n let color_struct = ChangeColorMessage(0, 0, 0);\n", "file_path": "6_enums_and_pattern_matching/defining_enums/src/main.rs", "rank": 89, "score": 107865.17522199696 }, { "content": "fn nested_structs_and_enums() {\n\n #[allow(dead_code)]\n\n enum Color {\n\n Rgb(i32, i32, i32),\n\n Hsv(i32, i32, i32),\n\n }\n\n #[allow(dead_code)]\n\n enum Message {\n\n Quit,\n\n Move { x: i32, y: i32 },\n\n Write(String),\n\n ChangeColor(Color),\n\n } \n\n let msg = Message::ChangeColor(Color::Hsv(0, 160, 255));\n\n\n\n match msg {\n\n Message::ChangeColor(Color::Rgb(r, g, b)) => {\n\n println!(\n\n \"nested_structs_and_enums: Change the color to red {}, green {}, and blue {}\",\n\n r,\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 90, "score": 107865.17522199696 }, { "content": "fn ignoring_an_entire_value() {\n\n foo(3, 4);\n\n}\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 91, "score": 107865.17522199696 }, { "content": "fn ignoring_remaining_parts() {\n\n #[allow(dead_code)]\n\n struct Point {\n\n x: i32,\n\n y: i32,\n\n z: i32,\n\n }\n\n\n\n let origin = Point { x: 0, y: 0, z: 0 };\n\n\n\n match origin {\n\n Point { x, .. } => println!(\"ignoring_remaining_parts: x is {}\", x),\n\n } \n\n\n\n let numbers = (2, 4, 8, 16, 32);\n\n\n\n match numbers {\n\n (first, .., last) => {\n\n println!(\"ignoring_remaining_parts: Some numbers: {}, {}\", first, last);\n\n },\n\n }\n\n}\n\n\n\n/*\n\nA match guard is an additional if condition specified after the pattern in a \n\nmatch arm that must also match, along with the pattern matching, for that arm to \n\nbe chosen. \n\n*/\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 92, "score": 107865.17522199696 }, { "content": "fn option_enum() {\n\n // This enum is Option<T>, and it is defined by the standard library as follows:\n\n // enum Option<T> {\n\n // Some(T),\n\n // None,\n\n // }\n\n let some_number = Some(5);\n\n let some_string = Some(\"a string\");\n\n let absent_number: Option<i32> = None;\n\n\n\n println!(\"In option_enum, some_number is {:?}.\", some_number);\n\n println!(\"In option_enum, some_string is {:?}.\", some_string);\n\n println!(\"In option_enum, absent_number is {:?}.\", absent_number);\n\n\n\n /*\n\n Because Option<T> and T (where T can be any type) are different types, the\n\n compiler won’t let us use an Option<T> value as if it were definitely a \n\n valid value. For example, this code won’t compile because it’s trying to \n\n add an i8 to an Option<i8> (Generally, this helps catch one of the most \n\n common issues with null: assuming that something isn’t null when it \n\n actually is.):\n\n */\n\n // let x: i8 = 5;\n\n // let y: Option<i8> = Some(5);\n\n // let sum = x + y;\n\n\n\n}", "file_path": "6_enums_and_pattern_matching/defining_enums/src/main.rs", "rank": 93, "score": 107865.17522199696 }, { "content": "fn destructuring_structs_and_tuples() {\n\n struct Point {\n\n x: i32,\n\n y: i32,\n\n }\n\n let ((feet, inches), Point {x, y}) = ((3, 10), Point { x: 3, y: -10 });\n\n println!(\"destructuring_structs_and_tuples: feet={}, inches={}, x={}, y={}.\", feet, inches, x, y);\n\n}\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 94, "score": 107865.17522199696 }, { "content": "fn ignoring_parts_of_a_value() {\n\n let mut setting_value = Some(5);\n\n let new_setting_value = Some(10);\n\n\n\n match (setting_value, new_setting_value) {\n\n (Some(_), Some(_)) => {\n\n println!(\"ignoring_parts_of_a_value: Can't overwrite an existing customized value\");\n\n }\n\n _ => {\n\n setting_value = new_setting_value;\n\n }\n\n }\n\n println!(\"ignoring_parts_of_a_value: setting is {:?}\", setting_value); \n\n\n\n let numbers = (2, 4, 8, 16, 32);\n\n\n\n match numbers {\n\n (first, _, third, _, fifth) => {\n\n println!(\"ignoring_parts_of_a_value: Some numbers: {}, {}, {}\", first, third, fifth)\n\n },\n\n } \n\n}\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 95, "score": 107865.17522199696 }, { "content": "fn multi_threaded_mutex_error() {\n\n // The commented code fails to compile with error: \"capture of moved value: \n\n //`counter`\". See the simpler_multi_threaded_mutex_error function for\n\n // a more specific compiler error.\n\n // let counter = Mutex::new(0);\n\n // let mut handles = vec![];\n\n // for _ in 0..10 {\n\n // let handle = thread::spawn(move || {\n\n // let mut num = counter.lock().unwrap();\n\n\n\n // *num += 1;\n\n // });\n\n // handles.push(handle);\n\n // }\n\n // for handle in handles {\n\n // handle.join().unwrap();\n\n // }\n\n // println!(\"multi_threaded_mutex_error result: {}\", *counter.lock().unwrap());\n\n}\n\n\n", "file_path": "16_fearless_concurrency/shared_state/src/main.rs", "rank": 96, "score": 105785.83362326339 }, { "content": "// We attach data to each variant of the enum directly, so there is no need for \n\n// an extra struct.\n\nfn ip_addr_enum() {\n\n #[derive(Debug)]\n\n enum IpAddr {\n\n V4(String),\n\n V6(String),\n\n }\n\n\n\n let home = IpAddr::V4(String::from(\"127.0.0.1\"));\n\n println!(\"In ip_addr_enum, home is {:?}.\", home);\n\n\n\n let loopback = IpAddr::V6(String::from(\"::1\"));\n\n println!(\"In ip_addr_enum, loopback is {:?}.\", loopback);\n\n}\n\n\n", "file_path": "6_enums_and_pattern_matching/defining_enums/src/main.rs", "rank": 97, "score": 105519.34540174103 }, { "content": "fn ignore_named_var_with_underscore() {\n\n let s = Some(String::from(\"Hello!\"));\n\n /*\n\n An unused variable starting with an underscore still binds the value, which \n\n might take ownership of the value. The next line causes compiler error: \"\n\n borrow of moved value: `s`\". because the s value will still be moved \n\n into _s, which prevents us from using s again. \n\n */\n\n // if let Some(_s) = s {\n\n /*\n\n Using the underscore by itself doesn’t ever bind to the value. The next line\n\n will compile without any errors because s doesn’t get moved into \"_\".\n\n */\n\n if let Some(_) = s {\n\n println!(\"ignore_named_var_with_underscore: found a string\");\n\n }\n\n\n\n println!(\"ignore_named_var_with_underscore: s={:?}\", s); \n\n}\n\n\n", "file_path": "18_patterns_and_matching/pattern_syntax/src/main.rs", "rank": 98, "score": 105519.34540174103 }, { "content": "fn ip_addr_struct() {\n\n\n\n #[derive(Debug)]\n\n struct IpAddr {\n\n kind: IpAddrKind,\n\n address: String,\n\n }\n\n\n\n let four = IpAddrKind::V4;\n\n let six = IpAddrKind::V6;\n\n\n\n route(four);\n\n route(six);\n\n\n\n let home = IpAddr {\n\n kind: IpAddrKind::V4,\n\n address: String::from(\"127.0.0.1\"),\n\n };\n\n println!(\"In ip_addr_struct, home is {:?}.\", home);\n\n\n\n let loopback = IpAddr {\n\n kind: IpAddrKind::V6,\n\n address: String::from(\"::1\"),\n\n };\n\n println!(\"In ip_addr_struct, loopback is {:?}.\", loopback);\n\n}\n\n\n", "file_path": "6_enums_and_pattern_matching/defining_enums/src/main.rs", "rank": 99, "score": 105519.34540174103 } ]
Rust
src/number.rs
terrynsun/js.rs
702c162b7ad8e59c5e9b9ce0c0ec0ec43b532804
use std::cell::RefCell; use std::rc::Rc; use french_press::ScopeManager; use jsrs_common::backend::Backend; use jsrs_common::ast::*; use jsrs_common::ast::BinOp::*; use jsrs_common::types::coerce::{AsBool, AsNumber, AsString}; use jsrs_common::types::js_var::{JsVar, JsType}; use jsrs_common::types::js_var::JsPtrEnum::*; use jsrs_common::types::js_var::JsType::*; use jsrs_common::types::js_var::JsPtrTag; use jsrs_common::js_error::{self, JsError}; use eval::eval_exp; macro_rules! b { ($e: expr) => { $e.as_bool() } } macro_rules! n { ($e: expr) => { $e.as_number() } } macro_rules! ni64 { ($e: expr) => { $e.as_number() as i64 } } macro_rules! nu64 { ($e: expr) => { $e.as_number() as u64 } } macro_rules! ni32 { ($e: expr) => { { let n = $e.as_number(); if n.is_nan() { 0i32 } else { n as i32 } } } } macro_rules! nu32 { ($e: expr) => { $e.as_number() as u32 } } pub fn eval_binop(op: &BinOp, e1: &Exp, e2: &Exp, state: Rc<RefCell<ScopeManager>>) -> js_error::Result<JsType> { if let &And = op { let val1: JsVar = try!(eval_exp(e1, state.clone())).0; let b = if b!(val1) == false { JsBool(false) } else { let val2: JsVar = try!(eval_exp(e2, state.clone())).0; JsBool(b!(val2)) }; return Ok(b); } else if let &Or = op { let val1: JsVar = try!(eval_exp(e1, state.clone())).0; let b = if b!(val1) == true { JsBool(true) } else { let val2: JsVar = try!(eval_exp(e2, state.clone())).0; JsBool(b!(val2)) }; return Ok(b); } let val1_is_instance_var = match e1 { &Exp::InstanceVar(..) | &Exp::KeyAccessor(..) => true, _ => false }; let val2_is_instance_var = match e2 { &Exp::InstanceVar(..) | &Exp::KeyAccessor(..) => true, _ => false }; let (val1, ptr1) = try!(eval_exp(e1, state.clone())); let (val2, ptr2) = try!(eval_exp(e2, state.clone())); if let Err(e) = state.borrow_mut().alloc(val1.clone(), ptr1) { return Err(JsError::from(e)); } if let Err(e) = state.borrow_mut().alloc(val2.clone(), ptr2) { return Err(JsError::from(e)); } let v = match *op { And => { println!("{:?}", val1); if b!(val1) == false { JsBool(false) } else { JsBool(b!(val2)) } } Or => JsBool(b!(val1) || b!(val2)), Ge => JsBool(b!(val1) >= b!(val2)), Gt => JsBool(b!(val1) > b!(val2)), Le => JsBool(b!(val1) <= b!(val2)), Lt => JsBool(b!(val1) < b!(val2)), Neq => { if let Ok(JsBool(b)) = eval_binop(&Eql, e1, e2, state) { JsBool(!b) } else { JsBool(false) } } Eql => { let b = match (&val1.t, &val2.t) { (&JsNull, &JsNull) => false, (&JsUndef, &JsNull) => false, (&JsNull, &JsUndef) => false, (&JsUndef, &JsUndef) => false, (&JsNum(ref n1), &JsNum(ref n2)) => n1 == n2, (&JsBool(ref b1), &JsBool(ref b2)) => b1 == b2, (&JsPtr(_), &JsPtr(_)) => { let ptr1 = try_load!(state, &val1, val1_is_instance_var); let ptr2 = try_load!(state, &val2, val2_is_instance_var); match (&ptr1, &ptr2) { (&Some(JsSym(_)), &Some(JsSym(_))) => val1 == val2, (&Some(JsStr(ref s1)), &Some(JsStr(ref s2))) => s1 == s2, (&Some(JsObj(_)), &Some(JsObj(_))) => val1 == val2, (&Some(JsFn(_)), &Some(JsFn(_))) => val1 == val2, _ => false, } }, (&JsNum(ref n), &JsPtr(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| *n == n!(ptr)), (&JsPtr(_), &JsNum(ref n)) => try_load!(state, &val2,val2_is_instance_var).map_or(false, |ptr| *n == n!(ptr)), (&JsBool(_), &JsPtr(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| n!(val1) == n!(ptr)), (&JsPtr(_), &JsBool(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| n!(val2) == n!(ptr)), _ => false, }; JsBool(b) } EqlStrict => { let b = match (&val1.t, &val2.t) { (&JsNull, &JsNull) => true, (&JsUndef, &JsUndef) => true, (&JsNum(ref n1), &JsNum(ref n2)) => n1 == n2, (&JsBool(ref b1), &JsBool(ref b2)) => b1 == b2, (&JsPtr(_), &JsPtr(_)) => { let ptr1 = try_load!(state, &val1, val1_is_instance_var); let ptr2 = try_load!(state, &val2, val2_is_instance_var); match (&ptr1, &ptr2) { (&Some(JsSym(_)), &Some(JsSym(_))) => val1 == val2, (&Some(JsStr(ref s1)), &Some(JsStr(ref s2))) => s1 == s2, (&Some(JsObj(_)), &Some(JsObj(_))) => val1 == val2, (&Some(JsFn(_)), &Some(JsFn(_))) => val1 == val2, _ => false, } } _ => false, }; JsBool(b) } NeqStrict => { if let Ok(JsBool(b)) = eval_binop(&EqlStrict, e1, e2, state) { JsBool(!b) } else { JsBool(false) } } BitOr => JsNum((ni32!(val1) | ni32!(val2)) as f64), BitXor => JsNum((ni32!(val1) ^ ni32!(val2)) as f64), BitAnd => JsNum((ni32!(val1) & ni32!(val2)) as f64), ShiftLeft => JsNum(0.0), ShiftRight => JsNum(0.0), ShiftRightUnsigned => JsNum(0.0), Minus => JsNum(n!(val1) - n!(val2)), Plus => JsNum(n!(val1) + n!(val2)), Slash => JsNum(n!(val1) / n!(val2)), Star => JsNum(n!(val1) * n!(val2)), Mod => JsNum(n!(val1) % n!(val2)), Exponent => JsNum(n!(val1) % n!(val2)), InstanceOf => { let ptr = try_load!(state, &val1, val1_is_instance_var); let b = match (ptr, &val2.t) { (Some(JsObj(ref obj)), &JsPtr(JsPtrTag::NativeFn { ref name})) => &obj.name == name, (_, &JsPtr(JsPtrTag::NativeFn {..})) => false, (_, &JsPtr(JsPtrTag::JsFn{..})) => false, _ => { let ptr2 = try_load!(state, &val2, val2_is_instance_var); let err_str = ptr2.map(|p| p.as_string()).unwrap_or(val2.t.as_string()); return Err(JsError::TypeError(format!("Expecting a function in instanceof check, but got {}", err_str))); } }; JsBool(b) } }; Ok(v) }
use std::cell::RefCell; use std::rc::Rc; use french_press::ScopeManager; use jsrs_common::backend::Backend; use jsrs_common::ast::*; use jsrs_common::ast::BinOp::*; use jsrs_common::types::coerce::{AsBool, AsNumber, AsString}; use jsrs_common::types::js_var::{JsVar, JsType}; use jsrs_common::types::js_var::JsPtrEnum::*; use jsrs_common::types::js_var::JsType::*; use jsrs_common::types::js_var::JsPtrTag; use jsrs_common::js_error::{self, JsError}; use eval::eval_exp; macro_rules! b { ($e: expr) => { $e.as_bool() } } macro_rules! n { ($e: expr) => { $e.as_number() } } macro_rules! ni64 { ($e: expr) => { $e.as_number() as i64 } } macro_rules! nu64 { ($e: expr) => { $e.as_number() as u64 } } macro_rules! ni32 { ($e: expr) => { { let n = $e.as_number(); if n.is_nan() { 0i32 } else { n as i32 } } } } macro_rules! nu32 { ($e: expr) => { $e.as_number() as u32 } } pub fn eval_binop(op: &BinOp, e1: &Exp, e2: &Exp, state: Rc<RefCell<ScopeManager>>) -> js_error::Result<JsType> { if let &And = op { let val1: JsVar = try!(eval_exp(e1, state.clone())).0; let b = if b!(val1) == false { JsBool(false) } else { let val2: JsVar = try!(eval_exp(e2, state.clone())).0; JsBool(b!(val2)) }; return Ok(b); } else if let &Or = op { let val1: JsVar = try!(eval_exp(e1, state.clone())).0; let b = if b!(val1) == true { JsBool(true) } else { let val2: JsVar = try!(eval_exp(e2, state.clone())).0; JsBool(b!(val2)) }; return Ok(b); } let val1_is_instance_var = match e1 { &Exp::InstanceVar(..) | &Exp::KeyAccessor(..) => true, _ => false }; let val2_is_instance_var = match e2 { &Exp::InstanceVar(..) | &Exp::KeyAccessor(..) => true, _ => false }; let (val1, ptr1) = try!(eval_exp(e1, state.clone())); let (val2, ptr2) = try!(eval_exp(e2, state.clone())); if let Err(e) = state.borrow_mut().alloc(val1.clone(), ptr1) { return Err(JsError::from(e)); } if let Err(e) = state.borrow_mut().alloc(val2.clone(), ptr2) { return Err(JsError::from(e)); } let v = match *op { And => { println!("{:?}", val1); if b!(val1) == false { JsBool(false) } else { JsBool(b!(val2)) } } Or => JsBool(b!(val1) || b!(val2)), Ge => JsBool(b!(val1) >= b!(val2)), Gt => JsBool(b!(val1) > b!(val2)), Le => JsBool(b!(val1) <= b!(val2)), Lt => JsBool(b!(val1) < b!(val2)), Neq => { if let Ok(JsBool(b)) = eval_binop(&Eql, e1, e2, state) { JsBool(!b) } else { JsBool(false) } } Eql => { let b = match (&val1.t, &val2.t) { (&JsNull, &JsNull) => false, (&JsUndef, &JsNull) => false, (&JsNull, &JsUndef) => false, (&JsUndef, &JsUndef) => false, (&JsNum(ref n1), &JsNum(ref n2)) => n1 == n2, (&JsBool(ref b1), &JsBool(ref b2)) => b1 == b2, (&JsPtr(_), &JsPtr(_)) => { let ptr1 = try_load!(state, &val1, val1_is_instance_var); let ptr2 = try_load!(state, &val2, val2_is_instance_var); match (&ptr1, &ptr2) { (&Some(JsSym(_)), &Some(JsSym(_))) => val1 == val2, (&Some(JsStr(ref s1)), &Some(JsStr(ref s2))) => s1 == s2, (&Some(JsObj(_)), &Some(JsObj(_))) => val1 == val2, (&Some(JsFn(_)), &Some(JsFn(_))) => val1 == val2, _ => false, } }, (&JsNum(ref n), &JsPtr(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| *n == n!(ptr)), (&JsPtr(_), &JsNum(ref n)) => try_load!(state, &val2,val2_is_instance_var).map_or(false, |ptr| *n == n!(ptr)), (&JsBool(_), &JsPtr(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| n!(val1) == n!(ptr)), (&JsPtr(_), &JsBool(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| n!(val2) == n!(ptr)), _ => false, }; JsBool(b) } EqlStrict => { let b =
; JsBool(b) } NeqStrict => { if let Ok(JsBool(b)) = eval_binop(&EqlStrict, e1, e2, state) { JsBool(!b) } else { JsBool(false) } } BitOr => JsNum((ni32!(val1) | ni32!(val2)) as f64), BitXor => JsNum((ni32!(val1) ^ ni32!(val2)) as f64), BitAnd => JsNum((ni32!(val1) & ni32!(val2)) as f64), ShiftLeft => JsNum(0.0), ShiftRight => JsNum(0.0), ShiftRightUnsigned => JsNum(0.0), Minus => JsNum(n!(val1) - n!(val2)), Plus => JsNum(n!(val1) + n!(val2)), Slash => JsNum(n!(val1) / n!(val2)), Star => JsNum(n!(val1) * n!(val2)), Mod => JsNum(n!(val1) % n!(val2)), Exponent => JsNum(n!(val1) % n!(val2)), InstanceOf => { let ptr = try_load!(state, &val1, val1_is_instance_var); let b = match (ptr, &val2.t) { (Some(JsObj(ref obj)), &JsPtr(JsPtrTag::NativeFn { ref name})) => &obj.name == name, (_, &JsPtr(JsPtrTag::NativeFn {..})) => false, (_, &JsPtr(JsPtrTag::JsFn{..})) => false, _ => { let ptr2 = try_load!(state, &val2, val2_is_instance_var); let err_str = ptr2.map(|p| p.as_string()).unwrap_or(val2.t.as_string()); return Err(JsError::TypeError(format!("Expecting a function in instanceof check, but got {}", err_str))); } }; JsBool(b) } }; Ok(v) }
match (&val1.t, &val2.t) { (&JsNull, &JsNull) => true, (&JsUndef, &JsUndef) => true, (&JsNum(ref n1), &JsNum(ref n2)) => n1 == n2, (&JsBool(ref b1), &JsBool(ref b2)) => b1 == b2, (&JsPtr(_), &JsPtr(_)) => { let ptr1 = try_load!(state, &val1, val1_is_instance_var); let ptr2 = try_load!(state, &val2, val2_is_instance_var); match (&ptr1, &ptr2) { (&Some(JsSym(_)), &Some(JsSym(_))) => val1 == val2, (&Some(JsStr(ref s1)), &Some(JsStr(ref s2))) => s1 == s2, (&Some(JsObj(_)), &Some(JsObj(_))) => val1 == val2, (&Some(JsFn(_)), &Some(JsFn(_))) => val1 == val2, _ => false, } } _ => false, }
if_condition
[ { "content": "/// Evaluate an expression into a JsVar.\n\npub fn eval_exp(e: &Exp, state: Rc<RefCell<ScopeManager>>) -> js_error::Result<JsVarValue> {\n\n match e {\n\n // [ e1, e2, ... ]\n\n &Array(ref elems) => {\n\n let proto = Some(Box::new(get_array_proto(elems.len() as f64, state.clone())));\n\n\n\n let mut kv_tuples = Vec::new();\n\n for (i, elem) in elems.iter().enumerate() {\n\n let i_str = format!(\"{}\", i);\n\n let (mut var, ptr) = try!(eval_exp(elem, state.clone()));\n\n let key = js_str_key(&i_str);\n\n var.mangle(&i_str);\n\n kv_tuples.push((key, var, ptr));\n\n }\n\n\n\n let mut state_ref = state.borrow_mut();\n\n let obj = JsObjStruct::new(proto, \"array\", kv_tuples,\n\n &mut *(state_ref.alloc_box.borrow_mut()));\n\n\n\n Ok((JsVar::new(JsType::JsPtr(JsPtrTag::JsObj)), Some(JsPtrEnum::JsObj(obj))))\n", "file_path": "src/eval/mod.rs", "rank": 1, "score": 273686.880368861 }, { "content": "pub fn is_nan(state: Rc<RefCell<Backend>>, _this: Option<(JsVar, JsPtrEnum)>,\n\n args: Vec<(JsVar, Option<JsPtrEnum>)>) -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n let number = match args.first() {\n\n Some(&(ref var, Some(JsPtrEnum::JsObj(ref obj)))) if obj.proto.is_some() && obj.name == \"array\" => {\n\n let string = try!(array_to_string_helper(state.clone(), var.clone(), obj.clone()));\n\n JsPtrEnum::JsStr(JsStrStruct::new(&string)).as_number()\n\n }\n\n Some(&(_, Some(ref ptr))) => ptr.as_number(),\n\n Some(&(ref var, None)) => var.as_number(),\n\n None => 0.0\n\n };\n\n\n\n Ok((JsVar::new(JsType::JsBool(number.is_nan())), None))\n\n}\n\n\n", "file_path": "src/native/stdlib.rs", "rank": 2, "score": 257093.23312617675 }, { "content": "pub fn object(state: Rc<RefCell<Backend>>, _this: Option<(JsVar, JsPtrEnum)>,\n\n args: Vec<(JsVar, Option<JsPtrEnum>)>) -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n let state_ref = state.borrow_mut();\n\n let alloc_box = state_ref.get_alloc_box();\n\n let (var, ptr) = args.first().map(|&(ref var, ref ptr)| (var.clone(), ptr.clone())).unwrap_or((\n\n JsVar::new(JsType::JsPtr(JsPtrTag::JsObj)),\n\n Some(JsPtrEnum::JsObj(JsObjStruct::new(None, \"Object\", Vec::new(), &mut *(alloc_box.borrow_mut()))))\n\n ));\n\n\n\n Ok((var, ptr))\n\n}\n\n\n", "file_path": "src/native/types.rs", "rank": 3, "score": 257093.23312617672 }, { "content": "pub fn number(state: Rc<RefCell<Backend>>, _this: Option<(JsVar, JsPtrEnum)>,\n\n args: Vec<(JsVar, Option<JsPtrEnum>)>) -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n let number = match args.first() {\n\n Some(&(ref var, Some(JsPtrEnum::JsObj(ref obj)))) if obj.proto.is_some() && obj.name == \"array\" => {\n\n let string = try!(array_to_string_helper(state.clone(), var.clone(), obj.clone()));\n\n JsPtrEnum::JsStr(JsStrStruct::new(&string)).as_number()\n\n }\n\n Some(&(_, Some(ref ptr))) => ptr.as_number(),\n\n Some(&(ref var, None)) => var.as_number(),\n\n None => 0.0\n\n };\n\n\n\n Ok((JsVar::new(JsType::JsNum(number)), None))\n\n}\n\n\n", "file_path": "src/native/types.rs", "rank": 4, "score": 257093.23312617675 }, { "content": "pub fn string(state: Rc<RefCell<Backend>>, _this: Option<(JsVar, JsPtrEnum)>,\n\n args: Vec<(JsVar, Option<JsPtrEnum>)>) -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n let string = match args.first() {\n\n Some(&(ref var, Some(JsPtrEnum::JsObj(ref obj)))) if obj.proto.is_some() && obj.name == \"array\" =>\n\n try!(array_to_string_helper(state.clone(), var.clone(), obj.clone())),\n\n Some(&(_, Some(ref ptr))) => ptr.as_string(),\n\n Some(&(ref var, None)) => var.t.as_string(),\n\n None => String::new()\n\n };\n\n\n\n\n\n Ok((JsVar::new(JsType::JsPtr(JsPtrTag::JsStr)), Some(JsPtrEnum::JsStr(JsStrStruct::new(&string)))))\n\n}\n", "file_path": "src/native/types.rs", "rank": 5, "score": 257093.23312617672 }, { "content": "// Helper to avoid repeating this everywhere\n\npub fn scalar(v: JsType) -> (JsVar, Option<JsPtrEnum>) {\n\n (JsVar::new(v), None)\n\n}\n\n\n", "file_path": "src/var.rs", "rank": 6, "score": 255106.99781559868 }, { "content": "pub fn array_to_string(state: Rc<RefCell<Backend>>, this: Option<(JsVar, JsPtrEnum)>,\n\n _args: Vec<(JsVar, Option<JsPtrEnum>)>)\n\n -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n let this_obj = match this.clone() {\n\n Some((_, JsPtrEnum::JsObj(obj))) => obj,\n\n Some(_) => panic!(\"Trying to push onto array, but `this` is not an object\"),\n\n None => panic!(\"Trying to push onto array, but `this` is None\")\n\n };\n\n\n\n let length_ptr = match this_obj.dict.get(&js_str_key(\"length\")) {\n\n Some(js_var) => {\n\n let state_ref = state.borrow_mut();\n\n let alloc_box = state_ref.get_alloc_box();\n\n let alloc_ref = alloc_box.borrow_mut();\n\n match alloc_ref.find_id(&js_var.unique).map(|p| p.borrow().clone()) {\n\n Some(JsPtrEnum::NativeVar(native_var)) => native_var,\n\n Some(_) => panic!(\"Array length pointer is not a native variable\"),\n\n None => panic!(\"No pointer for array length\"),\n\n }\n\n }\n", "file_path": "src/native/array.rs", "rank": 7, "score": 254302.632138652 }, { "content": "pub fn array_push(state: Rc<RefCell<Backend>>, this: Option<(JsVar, JsPtrEnum)>,\n\n args: Vec<(JsVar, Option<JsPtrEnum>)>)\n\n -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n // TODO: Change panics to actual errors\n\n let (this_var, mut this_obj) = match this.clone() {\n\n Some((v, JsPtrEnum::JsObj(obj))) => (v, obj),\n\n Some(_) => panic!(\"Trying to push onto array, but `this` is not an object\"),\n\n None => panic!(\"Trying to push onto array, but `this` is None\")\n\n };\n\n\n\n let mut length_ptr = match this_obj.dict.get(&js_str_key(\"length\")) {\n\n Some(js_var) => {\n\n let state_ref = state.borrow_mut();\n\n let alloc_box = state_ref.get_alloc_box();\n\n let alloc_ref = alloc_box.borrow_mut();\n\n match alloc_ref.find_id(&js_var.unique).map(|p| p.borrow().clone()) {\n\n Some(JsPtrEnum::NativeVar(native_var)) => native_var,\n\n Some(_) => panic!(\"Array length pointer is not a native variable\"),\n\n None => panic!(\"No pointer for array length\"),\n\n }\n", "file_path": "src/native/array.rs", "rank": 8, "score": 254302.632138652 }, { "content": "pub fn array_length_setter(state: Rc<RefCell<Backend>>, old_var: JsVar, old_ptr: Option<JsPtrEnum>,\n\n this: Option<(JsVar, JsPtrEnum)>, new_var: JsVar, new_ptr: Option<JsPtrEnum>) -> JsVarValue {\n\n let new_len = var_type_as_number(&new_var, new_ptr.as_ref());\n\n let old_len = var_type_as_number(&old_var, old_ptr.as_ref());\n\n\n\n if !(new_len.is_normal() || new_len == 0.0) || new_len.is_sign_negative() || new_len != new_len.trunc() {\n\n // TODO: Return a `RangeError` instead of panicking.\n\n panic!(\"Invalid array length:\\n var: {:#?}\\nptr: {:#?}\", new_var, new_ptr);\n\n }\n\n\n\n let (this_var, mut this_obj) = match this.clone() {\n\n Some((v, JsPtrEnum::JsObj(obj))) => (v, obj),\n\n Some(_) => panic!(\"Trying to set array length, but `this` is not an object\"),\n\n None => panic!(\"Trying to set array length, but `this` is None\")\n\n };\n\n\n\n let new_len_int = new_len as i32;\n\n let old_len_int = old_len as i32;\n\n\n\n let state_ref = state.borrow_mut();\n", "file_path": "src/native/array.rs", "rank": 9, "score": 251313.6514296953 }, { "content": "pub fn boolean(_state: Rc<RefCell<Backend>>, _this: Option<(JsVar, JsPtrEnum)>,\n\n args: Vec<(JsVar, Option<JsPtrEnum>)>) -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n let boolean = args.first().map(|&(ref var, ref ptr)| ptr.as_ref().map(|p| p.as_bool()).unwrap_or(var.as_bool()));\n\n Ok((JsVar::new(JsType::JsBool(boolean.unwrap_or(false))), None))\n\n}\n\n\n", "file_path": "src/native/types.rs", "rank": 10, "score": 240909.63636797963 }, { "content": "pub fn add_pervasives(state: Rc<RefCell<ScopeManager>>) {\n\n add_native_fn!(log::log, state, \"log\");\n\n add_native_fn!(log::error, state, \"$ERROR\");\n\n add_native_fn!(types::boolean, state, \"Boolean\");\n\n add_native_fn!(types::number, state, \"Number\");\n\n add_native_fn!(types::string, state, \"String\");\n\n add_native_fn!(types::object, state, \"Object\");\n\n\n\n add_native_fn!(stdlib::is_nan, state, \"isNaN\");\n\n add_array(state)\n\n}\n\n\n", "file_path": "src/native/mod.rs", "rank": 11, "score": 220114.60440648985 }, { "content": "/// Evaluate a single JS statement (which may be a block or sequence of statements).\n\n/// Returns tuple of (evaluated final value, return value), where return value requires that\n\n/// `return` be used to generate it.\n\npub fn eval_stmt(s: &Stmt, state: Rc<RefCell<ScopeManager>>)\n\n -> js_error::Result<(JsVarValue, JsReturnValue)> {\n\n match *s {\n\n // var_string = exp;\n\n Assign(ref lhs, ref exp) => {\n\n let (rhs_var, rhs_ptr) = try!(eval_exp(exp, state.clone()));\n\n\n\n let var = match lhs {\n\n &Var(ref string) => {\n\n let result = state.borrow_mut().load(&Binding::new(string.to_owned()));\n\n let (mut var, ptr) = match result {\n\n Ok((v, p)) => (v, p),\n\n Err(GcError::Load(_)) => return eval_stmt(&Decl(string.to_owned(), exp.clone()), state.clone()),\n\n Err(e) => return Err(JsError::from(e)),\n\n };\n\n\n\n match ptr {\n\n Some(JsPtrEnum::NativeVar(mut native_var)) => {\n\n native_var.set(state.clone(), None, rhs_var, rhs_ptr);\n\n return Ok(((native_var.var, native_var.ptr.map(|x| *x)), None));\n", "file_path": "src/eval/mod.rs", "rank": 12, "score": 207176.80837059213 }, { "content": "pub fn eval_stmt_block(block: &Vec<Stmt>, state: Rc<RefCell<ScopeManager>>)\n\n -> js_error::Result<(JsVarValue, JsReturnValue)> {\n\n let mut ret = (scalar(JsUndef), None);\n\n for stmt in &*block {\n\n ret = try!(eval_stmt(stmt, state.clone()));\n\n if let Some(..) = ret.1 {\n\n return Ok(ret);\n\n }\n\n }\n\n Ok(ret)\n\n}\n\n\n", "file_path": "src/eval/mod.rs", "rank": 13, "score": 196371.98082249617 }, { "content": "pub fn log(_scope: Rc<RefCell<Backend>>, _this: Option<(JsVar, JsPtrEnum)>,\n\n args: Vec<(JsVar, Option<JsPtrEnum>)>) -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n match args.first() {\n\n Some(&(_, Some(ref var))) => println!(\"{}\", var.as_string()),\n\n Some(&(ref var, _)) => println!(\"{}\", var.t.as_string()),\n\n None => println!(\"\")\n\n };\n\n\n\n Ok((JsVar::new(JsType::JsNull), None))\n\n}\n\n\n", "file_path": "src/native/log.rs", "rank": 14, "score": 192813.77022124952 }, { "content": "pub fn get_array_proto(len: f64, state: Rc<RefCell<ScopeManager>>) -> JsObjStruct {\n\n let (zero, undef) = scalar(JsType::JsNum(len));\n\n let array_length = NativeVar::new(zero, undef, \"length\", default_getter, array::array_length_setter);\n\n let array_push = NativeFn::new(array::array_push);\n\n let array_to_string = NativeFn::new(array::array_to_string);\n\n\n\n let length_var = JsVar::new(JsType::JsPtr(JsPtrTag::NativeVar { type_string: String::from(\"number\") }));\n\n let length_ptr = JsPtrEnum::NativeVar(array_length);\n\n\n\n let push_var = JsVar::new(JsType::JsPtr(JsPtrTag::NativeFn { name: String::from(\"push\") }));\n\n let push_ptr = JsPtrEnum::NativeFn(array_push);\n\n\n\n let to_string_var = JsVar::new(JsType::JsPtr(JsPtrTag::NativeFn { name: String::from(\"toString\") }));\n\n let to_string_ptr = JsPtrEnum::NativeFn(array_to_string);\n\n\n\n\n\n let mut state_ref = state.borrow_mut();\n\n\n\n // Not really sure what the `name` argument is for, but okay\n\n let mut array_proto = JsObjStruct::new(\n", "file_path": "src/native/mod.rs", "rank": 15, "score": 192246.18859075 }, { "content": "pub fn array_to_string_helper(state: Rc<RefCell<Backend>>, var: JsVar, obj: JsObjStruct) -> js_error::Result<String> {\n\n let o_this = Some((var, JsPtrEnum::JsObj(obj)));\n\n let (o_var, o_ptr) = try!(array_to_string(state.clone(), o_this, Vec::new()));\n\n Ok(o_ptr.map(|p| p.as_string()).unwrap_or(o_var.t.as_string()))\n\n}\n\n\n", "file_path": "src/native/types.rs", "rank": 16, "score": 184496.74334176505 }, { "content": "/// Evaluate a string containing some JavaScript statements (or sequences of statements).\n\n/// Returns a JsVar which is the return value of those statements.\n\npub fn eval_string(string: &str, state: Rc<RefCell<ScopeManager>>) -> js_error::Result<JsVarValue> {\n\n match parse_Stmt(string) {\n\n Ok(stmt) => {\n\n Ok(try!(eval_stmt(&stmt, state)).0)\n\n }\n\n Err(e) => Err(JsError::ParseError(format!(\"{:?}\", e))),\n\n }\n\n}\n\n\n", "file_path": "src/eval/mod.rs", "rank": 17, "score": 181685.07531957692 }, { "content": "fn var_type_as_number(var: &JsVar, ptr: Option<&JsPtrEnum>) -> f64 {\n\n match ptr {\n\n Some(ref ptr) => ptr.as_number(),\n\n None => var.as_number()\n\n }\n\n}\n\n\n", "file_path": "src/native/array.rs", "rank": 18, "score": 171920.6597682719 }, { "content": "#[inline]\n\npub fn js_str_key(key: &str) -> JsKey {\n\n JsKey::JsStr(JsStrStruct::new(key))\n\n}\n\n\n\n/// Loads a pointer from the scope, and returns JsError::undefined if not found.\n\nmacro_rules! try_load {\n\n ($state:ident, $var:expr, $is_instance_var:expr) => {{\n\n let mut state_ref = $state.borrow_mut();\n\n\n\n if $is_instance_var {\n\n state_ref.alloc_box.borrow_mut().find_id(&$var.unique).map(|p| p.borrow().clone())\n\n } else { \n\n match state_ref.load(&$var.binding) {\n\n Ok((_, ptr)) => ptr,\n\n Err(_) => return Err(JsError::undefined(&$var.binding.0)),\n\n }\n\n }\n\n }}\n\n}\n", "file_path": "src/var.rs", "rank": 19, "score": 154661.00110430483 }, { "content": "fn add_array(state: Rc<RefCell<ScopeManager>>) {\n\n let array_var = JsVar::bind(\"Array\", JsType::JsPtr(JsPtrTag::JsObj));\n\n let array_ptr = Some(JsPtrEnum::JsObj(get_array_proto(0.0, state.clone())));\n\n let mut state_ref = state.borrow_mut();\n\n state_ref.alloc(array_var, array_ptr).expect(\"Unable to alloc Array prototype\");\n\n}\n", "file_path": "src/native/mod.rs", "rank": 20, "score": 152996.42152526445 }, { "content": "pub fn add_semicolon(mut input: String) -> String {\n\n if !input.ends_with(';') && !input.ends_with(\"*/\") && !input.ends_with('}') {\n\n input.push_str(\";\")\n\n }\n\n input\n\n}\n", "file_path": "src/preprocess.rs", "rank": 21, "score": 151762.64525056104 }, { "content": "pub fn clean_string(mut input: String) -> String {\n\n input = String::from(input.trim());\n\n input = unescape(&input).unwrap_or(String::from(\"\"));\n\n\n\n if input == \"\" {\n\n return input;\n\n }\n\n\n\n // remove line-comments\n\n let mut last = '\\0';\n\n let mut len = input.len();\n\n for (i, c) in input.chars().enumerate() {\n\n if last == '/' && c == '/' {\n\n len = i-1;\n\n break;\n\n }\n\n last = c;\n\n }\n\n\n\n input.truncate(len);\n\n\n\n input\n\n}\n\n\n", "file_path": "src/preprocess.rs", "rank": 22, "score": 151762.64525056104 }, { "content": "pub fn error(_scope: Rc<RefCell<Backend>>,\n\n _this: Option<(JsVar, JsPtrEnum)>,\n\n args: Vec<(JsVar, Option<JsPtrEnum>)>) -> js_error::Result<(JsVar, Option<JsPtrEnum>)> {\n\n let s = match args.first() {\n\n Some(&(_, Some(ref var))) => var.as_string(),\n\n Some(&(ref var, _)) => var.t.as_string(),\n\n None => String::from(\"\"),\n\n };\n\n\n\n println!(\"{}\", s);\n\n\n\n //let (var, ptr) = try!(eval_exp(exp, state));\n\n Err(JsError::TestError(s))\n\n //(JsVar::new(JsType::JsNull), None)\n\n}\n", "file_path": "src/native/log.rs", "rank": 23, "score": 150031.47162587394 }, { "content": "fn repl(scope_manager: Rc<RefCell<ScopeManager>>) -> i32 {\n\n let mut rl = Editor::new();\n\n let mut stderr = io::stderr();\n\n\n\n if metadata(\".history\").is_ok() && rl.load_history(\".history\").is_err() {\n\n writeln!(stderr, \"Error: unable to load history on startup\").unwrap();\n\n }\n\n\n\n loop {\n\n // prompt\n\n let readline = rl.readline(\">> \");\n\n\n\n match readline {\n\n Ok(line) => {\n\n rl.add_history_entry(&line);\n\n let input = add_semicolon(clean_string(String::from(line)));\n\n if input == \"\" {\n\n continue;\n\n }\n\n rl.add_history_entry(&input);\n", "file_path": "src/main.rs", "rank": 24, "score": 148393.02565814907 }, { "content": "fn default_getter(_backend: Rc<RefCell<Backend>>, var: JsVar, ptr: Option<JsPtrEnum>, _this: Option<JsPtrEnum>) -> JsVarValue {\n\n (var, ptr)\n\n}\n\n\n", "file_path": "src/native/mod.rs", "rank": 25, "score": 144335.5210396747 }, { "content": "fn main() {\n\n let args: Args = Args::docopt().decode().unwrap_or_else(|e| e.exit());\n\n if args.flag_test {\n\n let dir_name = \"sputnik\";\n\n\n\n test_dir(String::from(dir_name))\n\n } else {\n\n let scope_manager = Rc::new(RefCell::new(init_gc()));\n\n add_pervasives(scope_manager.clone());\n\n\n\n if args.arg_file == \"\" {\n\n let ret = repl(scope_manager.clone());\n\n exit(ret)\n\n } else {\n\n eval_file(args.arg_file, true, true, scope_manager.clone())\n\n .expect(\"Error evaluating file\");\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 26, "score": 101536.78173185134 }, { "content": "fn test_dir(dir_name: String) {\n\n for entry in WalkDir::new(dir_name) {\n\n let entry = entry.unwrap();\n\n if !entry.path().is_dir() {\n\n let entry_path = entry.path().display().to_string();\n\n let scope_manager = Rc::new(RefCell::new(init_gc()));\n\n add_pervasives(scope_manager.clone());\n\n println!(\"file: {}\", entry_path.clone());\n\n match eval_file(entry_path.clone(), false, false, scope_manager.clone()) {\n\n Ok(_) => {\n\n println!(\"{}: {}\", entry_path, \"OK\");\n\n }\n\n Err(e) => {\n\n println!(\"{}: {}\", entry_path, e);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 27, "score": 86459.86370821694 }, { "content": "fn eval_file(filename: String, debug: bool, should_repl: bool,\n\n scope_manager: Rc<RefCell<ScopeManager>>) -> js_error::Result<()> {\n\n if debug {\n\n println!(\"Reading from \\\"{}\\\"\", filename);\n\n }\n\n\n\n let path = Path::new(&filename);\n\n let file = File::open(&path)\n\n .expect(&format!(\"Cannot open \\\"{}\\\": no such file or directory\", filename));\n\n let file_buffer = BufReader::new(file);\n\n\n\n // Build lines\n\n let mut line_builder = String::new();\n\n let mut braces = Vec::new();\n\n let mut negative_test = false;\n\n\n\n let mut file_iter = file_buffer.lines();\n\n while let Some(line) = file_iter.next() {\n\n let input = String::from(line.expect(&format!(\"Cannot read from {}\", filename)));\n\n let input = clean_string(input);\n", "file_path": "src/main.rs", "rank": 28, "score": 73093.19343689963 }, { "content": "var x1=1;\n", "file_path": "sputnik/12_Statement/12.9_The_return_Statement/S12.9_A5.js", "rank": 29, "score": 51910.841702543985 }, { "content": "var x=1;\n", "file_path": "sputnik/12_Statement/12.9_The_return_Statement/S12.9_A1_T2.js", "rank": 30, "score": 51910.841702543985 }, { "content": "var x2=1;\n", "file_path": "sputnik/12_Statement/12.9_The_return_Statement/S12.9_A5.js", "rank": 31, "score": 51910.841702543985 }, { "content": "var y=2;\n", "file_path": "sputnik/12_Statement/12.9_The_return_Statement/S12.9_A1_T1.js", "rank": 32, "score": 51910.841702543985 }, { "content": "var x3=1;\n", "file_path": "sputnik/12_Statement/12.9_The_return_Statement/S12.9_A5.js", "rank": 33, "score": 51910.841702543985 }, { "content": "var x=1;\n", "file_path": "sputnik/12_Statement/12.9_The_return_Statement/S12.9_A1_T1.js", "rank": 34, "score": 51910.841702543985 }, { "content": "var y=2;\n", "file_path": "sputnik/12_Statement/12.9_The_return_Statement/S12.9_A1_T2.js", "rank": 35, "score": 51910.841702543985 }, { "content": "var _ = 1;\n", "file_path": "sputnik/07_Lexical_Conventions/7.6_Identifiers/S7.6_A1.3_T1.js", "rank": 36, "score": 51216.42704283511 }, { "content": "var n = 1;\n", "file_path": "sputnik/08_Types/8.7_The_Reference_Type/S8.7_A6.js", "rank": 37, "score": 51215.03189501628 }, { "content": "var a=1,b=2,d=4,e=5;\n", "file_path": "sputnik/07_Lexical_Conventions/7.9_Automatic_Semicolon_Insertion/7.9.2_Examples_of_Automatic_Semicolon_Insertion/S7.9.2_A1_T7.js", "rank": 38, "score": 51215.03189501628 }, { "content": "var n = {};\n", "file_path": "sputnik/08_Types/8.7_The_Reference_Type/S8.7_A7.js", "rank": 39, "score": 51215.03189501628 }, { "content": "var callee=0, b;\n", "file_path": "sputnik/13_Function_Definition/13.2_Creating_Function_Objects/S13.2.2_A18_T1.js", "rank": 40, "score": 51191.083455297616 }, { "content": "var b=false;\n", "file_path": "sputnik/12_Statement/12.13_The_throw_statement/S12.13_A3_T5.js", "rank": 41, "score": 51186.42546336667 }, { "content": "var b=10;\n", "file_path": "sputnik/12_Statement/12.13_The_throw_statement/S12.13_A3_T3.js", "rank": 42, "score": 51186.42546336667 }, { "content": "var b=13;\n", "file_path": "sputnik/12_Statement/12.13_The_throw_statement/S12.13_A2_T5.js", "rank": 43, "score": 51186.42546336667 }, { "content": "var a=1,b=2,d=4,e=5;\n", "file_path": "sputnik/07_Lexical_Conventions/7.9_Automatic_Semicolon_Insertion/7.9.2_Examples_of_Automatic_Semicolon_Insertion/S7.9.2_A1_T7.js", "rank": 44, "score": 51186.42546336667 }, { "content": "var b=\"exception #1\";\n", "file_path": "sputnik/12_Statement/12.13_The_throw_statement/S12.13_A2_T4.js", "rank": 45, "score": 51186.42546336667 }, { "content": "var b;\n", "file_path": "sputnik/13_Function_Definition/13.2_Creating_Function_Objects/S13.2.2_A18_T2.js", "rank": 46, "score": 51186.42546336667 }, { "content": "var b=\"exception\"\n", "file_path": "sputnik/12_Statement/12.13_The_throw_statement/S12.13_A3_T2.js", "rank": 47, "score": 51186.42546336667 }, { "content": "var b=true;\n", "file_path": "sputnik/12_Statement/12.14_The_try_Statement/S12.14_A18_T3.js", "rank": 48, "score": 51186.42546336667 }, { "content": "var a=1,b=2;\n", "file_path": "sputnik/07_Lexical_Conventions/7.9_Automatic_Semicolon_Insertion/7.9.2_Examples_of_Automatic_Semicolon_Insertion/S7.9.2_A1_T4.js", "rank": 49, "score": 51186.42546336667 }, { "content": "var a=1,b=2,c=3,d;\n", "file_path": "sputnik/07_Lexical_Conventions/7.9_Automatic_Semicolon_Insertion/7.9.2_Examples_of_Automatic_Semicolon_Insertion/S7.9.2_A1_T6.js", "rank": 50, "score": 51186.42546336667 }, { "content": "var b=10;\n", "file_path": "sputnik/12_Statement/12.14_The_try_Statement/S12.14_A18_T5.js", "rank": 51, "score": 51186.42546336667 }, { "content": "var a = 1, b = \"a\";\n", "file_path": "sputnik/13_Function_Definition/13.2_Creating_Function_Objects/S13.2.2_A19_T8.js", "rank": 52, "score": 51186.42546336667 }, { "content": "var b=true;\n", "file_path": "sputnik/12_Statement/12.13_The_throw_statement/S12.13_A2_T3.js", "rank": 53, "score": 51186.42546336667 }, { "content": "var a=1,b=2,c=3;\n", "file_path": "sputnik/07_Lexical_Conventions/7.9_Automatic_Semicolon_Insertion/7.9.2_Examples_of_Automatic_Semicolon_Insertion/S7.9.2_A1_T5.js", "rank": 54, "score": 51186.42546336667 }, { "content": "var b=\" #1\";\n", "file_path": "sputnik/12_Statement/12.14_The_try_Statement/S12.14_A18_T4.js", "rank": 55, "score": 51186.42546336667 }, { "content": "var b=true;\n", "file_path": "sputnik/12_Statement/12.13_The_throw_statement/S12.13_A3_T1.js", "rank": 56, "score": 51186.42546336667 }, { "content": "var NaN=1.0;\n", "file_path": "sputnik/08_Types/8.5_The_Number_Type/S8.5_A4.js", "rank": 57, "score": 50997.22998999568 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.10_T4.js", "rank": 58, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T5.js", "rank": 59, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T5.js", "rank": 60, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.12_T1.js", "rank": 61, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.11_T2.js", "rank": 62, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T1.js", "rank": 63, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.11_T3.js", "rank": 64, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T1.js", "rank": 65, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.11_T2.js", "rank": 66, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T3.js", "rank": 67, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T1.js", "rank": 68, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.12_T2.js", "rank": 69, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T4.js", "rank": 70, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T3.js", "rank": 71, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T4.js", "rank": 72, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.10_T5.js", "rank": 73, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T1.js", "rank": 74, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T4.js", "rank": 75, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T2.js", "rank": 76, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T3.js", "rank": 77, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T4.js", "rank": 78, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.10_T4.js", "rank": 79, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.12_T2.js", "rank": 80, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T2.js", "rank": 81, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T2.js", "rank": 82, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.12_T1.js", "rank": 83, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T5.js", "rank": 84, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.11_T3.js", "rank": 85, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T5.js", "rank": 86, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.3_T3.js", "rank": 87, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.10_T5.js", "rank": 88, "score": 50205.627645443106 }, { "content": "var st_NaN = \"NaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.11_T1.js", "rank": 89, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.11_T1.js", "rank": 90, "score": 50205.627645443106 }, { "content": "var st_isNaN = \"isNaN\";\n", "file_path": "sputnik/12_Statement/12.10_The_with_Statement/S12.10_A1.2_T2.js", "rank": 91, "score": 50205.627645443106 }, { "content": " }\n\n &BitNot(ref exp) => {\n\n let i = try!(eval_exp(exp, state.clone())).0.as_number() as i32;\n\n Ok(scalar(JsNum((!i) as f64)))\n\n }\n\n // e1 [op] e2\n\n &BinExp(ref e1, ref op, ref e2) => {\n\n let result = try!(eval_binop(op, e1, e2, state.clone()));\n\n Ok(scalar(result))\n\n }\n\n &Bool(b) => Ok(scalar(JsBool(b))),\n\n\n\n // fun_name([arg_exp1, arg_exps])\n\n &Call(ref fun_name, ref arg_exps) => {\n\n let ((fun_binding, fun_ptr), this) = match **fun_name {\n\n InstanceVar(ref lhs, ref name) => {\n\n let (obj_var, obj_ptr) = try!(eval_exp(lhs, state.clone()));\n\n let state_clone = state.clone();\n\n (try!(instance_var_eval!(obj_var.clone(), obj_ptr.clone(), name, state_clone)), obj_ptr.map(|x| (obj_var, x)))\n\n }\n", "file_path": "src/eval/mod.rs", "rank": 99, "score": 39.38699336008507 } ]
Rust
crates/zoon/src/routing/router.rs
afidegnum/MoonZoon
e8be4f08eedb295b01aa55295fe1ea2c2e24384a
use crate::{routing::decode_uri_component, *}; use futures_signals::signal::{channel, Sender}; use std::marker::PhantomData; use web_sys::MouseEvent; type UrlChangeSender = Sender<Option<Vec<String>>>; pub struct Router<R> { popstate_listener: SendWrapper<Closure<dyn Fn()>>, link_interceptor: SendWrapper<Closure<dyn Fn(MouseEvent)>>, url_change_sender: UrlChangeSender, _url_change_handle: TaskHandle, _route_type: PhantomData<R>, } impl<R: FromRouteSegments> Router<R> { pub fn new(on_route_change: impl FnOnce(Option<R>) + Clone + 'static) -> Self { let (url_change_sender, _url_change_handle) = setup_url_change_handler(on_route_change); Router { popstate_listener: setup_popstate_listener(url_change_sender.clone()), link_interceptor: setup_link_interceptor(url_change_sender.clone()), url_change_sender, _url_change_handle, _route_type: PhantomData, } } pub fn go<'a>(&self, to: impl IntoCowStr<'a>) { go(&self.url_change_sender, to); } pub fn replace<'a>(&self, with: impl IntoCowStr<'a>) { replace(&self.url_change_sender, with); } } impl<R> Drop for Router<R> { fn drop(&mut self) { window() .remove_event_listener_with_callback( "popstate", self.popstate_listener.as_ref().unchecked_ref(), ) .unwrap_throw(); document() .remove_event_listener_with_callback( "click", self.link_interceptor.as_ref().unchecked_ref(), ) .unwrap_throw(); } } fn setup_url_change_handler<R: FromRouteSegments>( on_route_change: impl FnOnce(Option<R>) + Clone + 'static, ) -> (UrlChangeSender, TaskHandle) { let on_route_change = move |route: Option<R>| on_route_change.clone()(route); let (url_change_sender, url_change_receiver) = channel(current_url_segments()); let url_change_handler = url_change_receiver.for_each(move |segments| { let route = segments.and_then(R::from_route_segments); on_route_change(route); async {} }); let url_change_handle = Task::start_droppable(url_change_handler); (url_change_sender, url_change_handle) } fn go<'a>(url_change_sender: &UrlChangeSender, to: impl IntoCowStr<'a>) { let to = to.into_cow_str(); if !to.starts_with('/') { return window().location().assign(&to).unwrap_throw(); } history() .push_state_with_url(&JsValue::NULL, "", Some(&to)) .unwrap_throw(); url_change_sender .send(current_url_segments()) .unwrap_throw(); } fn replace<'a>(url_change_sender: &UrlChangeSender, with: impl IntoCowStr<'a>) { let with = with.into_cow_str(); if !with.starts_with('/') { return window().location().replace(&with).unwrap_throw(); } history() .replace_state_with_url(&JsValue::NULL, "", Some(&with)) .unwrap_throw(); url_change_sender .send(current_url_segments()) .unwrap_throw(); } fn current_url_segments() -> Option<Vec<String>> { let path = window().location().pathname().unwrap_throw(); let mut segments = Vec::new(); for segment in path.trim_start_matches('/').split_terminator('/') { match decode_uri_component(segment) { Ok(segment) => segments.push(segment), Err(error) => { crate::eprintln!( "Cannot decode the URL segment '{}'. Error: {:#?}", segment, error ); None? } } } Some(segments) } fn setup_popstate_listener(url_change_sender: UrlChangeSender) -> SendWrapper<Closure<dyn Fn()>> { let closure = Closure::wrap(Box::new(move || { url_change_sender .send(current_url_segments()) .unwrap_throw(); }) as Box<dyn Fn()>); window() .add_event_listener_with_callback("popstate", closure.as_ref().unchecked_ref()) .unwrap_throw(); SendWrapper::new(closure) } fn setup_link_interceptor( url_change_sender: UrlChangeSender, ) -> SendWrapper<Closure<dyn Fn(MouseEvent)>> { let closure = Closure::wrap(Box::new(move |event| { link_click_handler(event, &url_change_sender); }) as Box<dyn Fn(MouseEvent)>); document() .add_event_listener_with_callback("click", closure.as_ref().unchecked_ref()) .unwrap_throw(); SendWrapper::new(closure) } fn link_click_handler(event: MouseEvent, url_change_sender: &UrlChangeSender) -> Option<()> { if event.ctrl_key() || event.meta_key() || event.shift_key() || event.button() != 0 { None? } let ws_element: web_sys::Element = event.target()?.dyn_into().ok()?; let a: web_sys::Element = ws_element .closest(r#"a[href^="/"]:not([download], [target="_blank"])"#) .ok()??; let href = a.get_attribute("href")?; event.prevent_default(); go(url_change_sender, href); Some(()) }
use crate::{routing::decode_uri_component, *}; use futures_signals::signal::{channel, Sender}; use std::marker::PhantomData; use web_sys::MouseEvent; type UrlChangeSender = Sender<Option<Vec<String>>>; pub struct Router<R> { popstate_listener: SendWrapper<Closure<dyn Fn()>>, link_interceptor: SendWrapper<Closure<dyn Fn(MouseEvent)>>, url_change_sender: UrlChangeSender, _url_change_handle: TaskHandle, _route_type: PhantomData<R>, } impl<R: FromRouteSegments> Router<R> { pub fn new(on_route_change: impl FnOnce(Option<R>) + Clone + 'static) -> Self { let (url_change_sender, _url_change_handle) = setup_url_change_handler(on_route_change); Router { popstate_listener: setup_popstate_listener(url_change_sender.clone()), link_interceptor: setup_link_interceptor(url_change_sender.clone()), url_change_sender, _url_change_handle, _route_type: PhantomData, } } pub fn go<'a>(&self, to: impl IntoCowStr<'a>) { go(&self.url_change_sender, to); } pub fn replace<'a>(&self, with: impl IntoCowStr<'a>) { replace(&self.url_change_sender, with); } } impl<R> Drop for Router<R> { fn drop(&mut self) { window() .remove_event_listener_with_callback( "popstate", self.popstate_listener.as_ref().unchecked_ref(), ) .unwrap_throw(); document() .remove_event_listener_with_callback( "click", self.link_interceptor.as_ref().unchecked_ref(), ) .unwrap_throw(); } } fn setup_url_change_handler<R: FromRouteSegments>( on_route_change: impl FnOnce(Option<R>) + Clone + 'static, ) -> (UrlChangeSender, TaskHandle) { let on_route_change = move |route: Option<R>| on_route_change.clone()(route); let (url_change_sender, url_change_receiver) = channel(current_url_segments()); let url_change_handler = url_change_receiver.for_each(move |segments| { let route = segments.and_then(R::from_route_segments); on_route_change(route); async {} }); let url_change_handle = Task::start_droppable(url_change_handler); (url_change_sender, url_change_handle) } fn go<'a>(url_change_sender: &UrlChangeSender, to: impl IntoCowStr<'a>) { let to = to.into_cow_str(); if !to.starts_with('/') { return window().location().assign(&to).unwrap_throw(); } history() .push_state_with_url(&JsValue::NULL, "", Some(&to)) .unwrap_throw(); url_change_sender .send(current_url_segments()) .unwrap_throw(); } fn replace<'a>(url_change_sender: &UrlChangeSender, with: impl IntoCowStr<'a>) { let with = with.into_cow_str(); if !with.starts_with('/') { return window().location().replace(&with).unwrap_throw(); } history() .replace_state_with_url(&JsValue::NULL, "", Some(&with)) .unwrap_throw(); url_change_sender .send(current_url_segments()) .unwrap_throw(); }
fn setup_popstate_listener(url_change_sender: UrlChangeSender) -> SendWrapper<Closure<dyn Fn()>> { let closure = Closure::wrap(Box::new(move || { url_change_sender .send(current_url_segments()) .unwrap_throw(); }) as Box<dyn Fn()>); window() .add_event_listener_with_callback("popstate", closure.as_ref().unchecked_ref()) .unwrap_throw(); SendWrapper::new(closure) } fn setup_link_interceptor( url_change_sender: UrlChangeSender, ) -> SendWrapper<Closure<dyn Fn(MouseEvent)>> { let closure = Closure::wrap(Box::new(move |event| { link_click_handler(event, &url_change_sender); }) as Box<dyn Fn(MouseEvent)>); document() .add_event_listener_with_callback("click", closure.as_ref().unchecked_ref()) .unwrap_throw(); SendWrapper::new(closure) } fn link_click_handler(event: MouseEvent, url_change_sender: &UrlChangeSender) -> Option<()> { if event.ctrl_key() || event.meta_key() || event.shift_key() || event.button() != 0 { None? } let ws_element: web_sys::Element = event.target()?.dyn_into().ok()?; let a: web_sys::Element = ws_element .closest(r#"a[href^="/"]:not([download], [target="_blank"])"#) .ok()??; let href = a.get_attribute("href")?; event.prevent_default(); go(url_change_sender, href); Some(()) }
fn current_url_segments() -> Option<Vec<String>> { let path = window().location().pathname().unwrap_throw(); let mut segments = Vec::new(); for segment in path.trim_start_matches('/').split_terminator('/') { match decode_uri_component(segment) { Ok(segment) => segments.push(segment), Err(error) => { crate::eprintln!( "Cannot decode the URL segment '{}'. Error: {:#?}", segment, error ); None? } } } Some(segments) }
function_block-full_function
[ { "content": "#[static_ref]\n\npub fn router() -> &'static Router<Route> {\n\n Router::new(|route| match route {\n\n Some(Route::Active) => app::select_filter(app::Filter::Active),\n\n Some(Route::Completed) => app::select_filter(app::Filter::Completed),\n\n Some(Route::Root) | None => app::select_filter(app::Filter::All),\n\n })\n\n}\n\n\n\n// ------ Route -------\n\n\n\n#[route]\n\n#[derive(Clone, Copy)]\n\npub enum Route {\n\n #[route(\"active\")]\n\n Active,\n\n #[route(\"completed\")]\n\n Completed,\n\n #[route()]\n\n Root,\n\n}\n", "file_path": "examples/todomvc/frontend/src/router.rs", "rank": 0, "score": 360594.9110697749 }, { "content": "#[static_ref]\n\npub fn router() -> &'static Router<Route> {\n\n Router::new(|route: Option<Route>| {\n\n println!(\"{}\", routing::current_url());\n\n\n\n let route = match route {\n\n Some(route) => {\n\n push_to_route_history(route.clone());\n\n route\n\n }\n\n None => {\n\n return app::set_page_id(PageId::Unknown);\n\n }\n\n };\n\n\n\n match route {\n\n Route::ReportRoot => {\n\n if not(app::is_user_logged()) {\n\n return router().replace(Route::Login);\n\n }\n\n app::set_page_id(PageId::Report);\n", "file_path": "examples/pages/frontend/src/router.rs", "rank": 1, "score": 360594.91106977494 }, { "content": "#[static_ref]\n\nfn route_history() -> &'static Mutable<VecDeque<Route>> {\n\n Mutable::new(VecDeque::new())\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/router.rs", "rank": 2, "score": 280603.88916411577 }, { "content": "fn data_type(return_type: &ReturnType) -> Option<&Box<Type>> {\n\n let type_ = match return_type {\n\n ReturnType::Type(_, type_) => type_,\n\n _ => None?,\n\n };\n\n let type_reference = match type_.as_ref() {\n\n Type::Reference(type_reference) => type_reference,\n\n _ => None?,\n\n };\n\n if type_reference.mutability.is_some() {\n\n None?\n\n }\n\n if type_reference.lifetime.as_ref()?.ident != \"static\" {\n\n None?\n\n }\n\n Some(&type_reference.elem)\n\n}\n", "file_path": "crates/static_ref_macro/src/lib.rs", "rank": 7, "score": 246905.51100998052 }, { "content": "pub fn encode_uri_component(component: impl AsRef<str>) -> String {\n\n let encoded = js_sys::encode_uri_component(component.as_ref());\n\n String::from(encoded)\n\n}\n", "file_path": "crates/zoon/src/routing.rs", "rank": 8, "score": 245189.13154246815 }, { "content": " pub trait FrontBuilder<FRBO: FrontBuilderOutput> = FnOnce() -> FRBO + Clone + Send + 'static;\n\n\n", "file_path": "crates/moon/src/lib.rs", "rank": 9, "score": 242239.71925099043 }, { "content": "pub fn previous_route() -> Option<Route> {\n\n route_history().lock_ref().get(1).cloned()\n\n}\n\n\n\n// ------ router ------\n\n\n", "file_path": "examples/pages/frontend/src/router.rs", "rank": 10, "score": 242183.8425892689 }, { "content": "pub fn document() -> web_sys::Document {\n\n window().document().unwrap_throw()\n\n}\n\n\n", "file_path": "crates/zoon/src/dom.rs", "rank": 11, "score": 241595.95727575017 }, { "content": "pub fn window() -> web_sys::Window {\n\n web_sys::window().unwrap_throw()\n\n}\n\n\n", "file_path": "crates/zoon/src/dom.rs", "rank": 12, "score": 241595.95727575017 }, { "content": "fn route_fields(segments: &[RouteSegment]) -> impl Iterator<Item = FieldValue> + '_ {\n\n segments.iter().enumerate().filter_map(|(index, segment)| {\n\n if let RouteSegment::Ident(ident) = segment {\n\n Some(parse_quote!(\n\n #ident: RouteSegment::from_string_segment(&segments[#index])?\n\n ))\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n\n// ------ generate_impl_from_route_segments ------\n\n\n", "file_path": "crates/route_macro/src/lib.rs", "rank": 13, "score": 237302.28461695876 }, { "content": "pub fn decode_uri_component(component: impl AsRef<str>) -> Result<String, JsValue> {\n\n let decoded = js_sys::decode_uri_component(component.as_ref())?;\n\n Ok(String::from(decoded))\n\n}\n\n\n", "file_path": "crates/zoon/src/routing.rs", "rank": 15, "score": 232933.25628825347 }, { "content": "fn lit_str_validations(segments: &[RouteSegment]) -> impl Iterator<Item = ExprIf> + '_ {\n\n segments.iter().enumerate().filter_map(|(index, segment)| {\n\n if let RouteSegment::LitStr(lit_str) = segment {\n\n Some(parse_quote!(\n\n if segments[#index] != #lit_str { None? }\n\n ))\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "crates/route_macro/src/lib.rs", "rank": 16, "score": 232694.80878895347 }, { "content": " pub trait UpHandler<UPHO: UpHandlerOutput, UMsg> = FnOnce(UpMsgRequest<UMsg>) -> UPHO + Clone + Send + 'static;\n\n}\n\n\n\n// ------ ------\n\n// Start\n\n// ------ ------\n\n\n\npub async fn start<'de, FRB, FRBO, UPH, UPHO, UMsg>(\n\n frontend: FRB,\n\n up_msg_handler: UPH,\n\n service_config: impl FnOnce(&mut web::ServiceConfig) + Clone + Send + 'static,\n\n) -> io::Result<()>\n\nwhere\n\n FRB: FrontBuilder<FRBO>,\n\n FRBO: FrontBuilderOutput,\n\n UPH: UpHandler<UPHO, UMsg>,\n\n UPHO: UpHandlerOutput,\n\n UMsg: 'static + Deserialize,\n\n{\n\n // ------ Init ------\n", "file_path": "crates/moon/src/lib.rs", "rank": 17, "score": 226228.52267828916 }, { "content": "pub fn global_styles() -> &'static GlobalStyles {\n\n static GLOBAL_STYLES: OnceBox<GlobalStyles> = OnceBox::new();\n\n GLOBAL_STYLES.get_or_init(|| Box::new(GlobalStyles::new()))\n\n}\n\n\n\npub struct GlobalStyles {\n\n sheet: SendWrapper<CssStyleSheet>,\n\n rule_ids: MonotonicIds,\n\n}\n\n\n\nimpl GlobalStyles {\n\n fn new() -> Self {\n\n let style_element: HtmlStyleElement = document()\n\n .create_element(\"style\")\n\n .unwrap_throw()\n\n .unchecked_into();\n\n document()\n\n .head()\n\n .unwrap_throw()\n\n .append_child(&style_element)\n", "file_path": "crates/zoon/src/style.rs", "rank": 18, "score": 220276.36806733592 }, { "content": "pub fn root() -> impl Element {\n\n Column::new()\n\n .s(Padding::all(20))\n\n .s(Spacing::new(20))\n\n .item(header())\n\n .item(page())\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/app.rs", "rank": 19, "score": 220233.39805389906 }, { "content": "pub fn header() -> impl Element {\n\n Row::new()\n\n .s(Spacing::new(20))\n\n .item(back_button())\n\n .item(link(\"Home\", Route::Root))\n\n .item(link(\"Report\", Route::ReportRoot))\n\n .item(link(\"Calc\", Route::CalcRoot))\n\n .item_signal(app::logged_user().signal_ref(|name| {\n\n if let Some(name) = name {\n\n log_out_button(name).left_either()\n\n } else {\n\n link(\"Log in\", Route::Login).right_either()\n\n }\n\n }))\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/header.rs", "rank": 20, "score": 220233.39805389906 }, { "content": "pub fn session_storage() -> &'static SessionStorage {\n\n static SESSION_STORAGE: OnceBox<SendWrapper<SessionStorage>> = OnceBox::new();\n\n SESSION_STORAGE.get_or_init(|| {\n\n let storage = SessionStorage::try_new().unwrap_throw();\n\n Box::new(SendWrapper::new(storage))\n\n })\n\n}\n\n\n\n// ------ Error ------\n\n\n\n#[derive(thiserror::Error, Debug)]\n\npub enum Error {\n\n #[error(\"the platform does not support the required WebStorage\")]\n\n StorageNotFoundError,\n\n #[error(\"cannot get access to the required WebStorage\")]\n\n GetStorageError(JsValue),\n\n #[error(\"cannot insert or update the given key-value pair (error: `{0:?}`)\")]\n\n InsertError(JsValue),\n\n #[error(\"(de)serialization failed (error: `{0}`)\")]\n\n SerdeError(serde_lite::Error),\n", "file_path": "crates/zoon/src/web_storage.rs", "rank": 21, "score": 218570.96661981917 }, { "content": "pub fn local_storage() -> &'static LocalStorage {\n\n static LOCAL_STORAGE: OnceBox<SendWrapper<LocalStorage>> = OnceBox::new();\n\n LOCAL_STORAGE.get_or_init(|| {\n\n let storage = LocalStorage::try_new().unwrap_throw();\n\n Box::new(SendWrapper::new(storage))\n\n })\n\n}\n\n\n\n// ------ session_storage ------\n\n\n", "file_path": "crates/zoon/src/web_storage.rs", "rank": 22, "score": 218570.96661981917 }, { "content": "pub fn page() -> impl Element {\n\n Column::new()\n\n .s(Spacing::new(20))\n\n .item(greeting())\n\n .item(switch_frequency_link())\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/report_page.rs", "rank": 23, "score": 218488.39869166096 }, { "content": "pub fn root() -> impl Element {\n\n Column::new()\n\n .s(Width::fill())\n\n .s(Height::fill().min_screen())\n\n .s(Font::new()\n\n .size(14)\n\n .color(hsl(0, 0, 5.1))\n\n .weight(NamedWeight::Light)\n\n .family(vec![\n\n FontFamily::new(\"Helvetica Neue\"),\n\n FontFamily::new(\"Helvetica\"),\n\n FontFamily::new(\"Arial\"),\n\n FontFamily::SansSerif,\n\n ]))\n\n .s(Background::new().color(hsl(0, 0, 96.5)))\n\n .item(content())\n\n}\n\n\n", "file_path": "examples/todomvc/frontend/src/app/view.rs", "rank": 24, "score": 218488.39869166093 }, { "content": "pub fn root() -> impl Element {\n\n Column::new().item(control_counters()).item(counters())\n\n}\n\n\n", "file_path": "examples/counters/frontend/src/app/view.rs", "rank": 25, "score": 218488.39869166096 }, { "content": "pub fn page() -> impl Element {\n\n Column::new()\n\n .s(Spacing::new(20))\n\n .item(result())\n\n .item(expressions())\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/calc_page.rs", "rank": 26, "score": 218488.39869166096 }, { "content": "pub fn page() -> impl Element {\n\n Row::new().item(name_input()).item(log_in_button())\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/login_page.rs", "rank": 27, "score": 218488.39869166093 }, { "content": "fn generate_impl_from_route_segments(route_count: usize) -> ItemImpl {\n\n let route_fn_idents =\n\n (0..route_count).map(|index| format_ident!(\"route_{}_from_route_segments\", index));\n\n parse_quote!(\n\n impl FromRouteSegments for Route {\n\n fn from_route_segments(segments: Vec<String>) -> Option<Self> {\n\n let route_fns = [\n\n #(Self::#route_fn_idents),*\n\n ];\n\n for route_fn in route_fns {\n\n let this = route_fn(&segments);\n\n if this.is_some() {\n\n return this\n\n }\n\n }\n\n None\n\n }\n\n }\n\n )\n\n}\n\n\n\n// ------ generate_impl_into_cow_str ------\n\n\n", "file_path": "crates/route_macro/src/lib.rs", "rank": 28, "score": 216962.52716232033 }, { "content": "fn push_to_route_history(route: Route) {\n\n let mut history = route_history().lock_mut();\n\n if history.len() == 2 {\n\n history.pop_back();\n\n }\n\n history.push_front(route);\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/router.rs", "rank": 29, "score": 216752.93909069296 }, { "content": "#[static_ref]\n\npub fn expression() -> &'static Mutable<Option<Expression>> {\n\n Mutable::new(None)\n\n}\n\n\n\n// ------ ------\n\n// Signals\n\n// ------ ------\n\n\n", "file_path": "examples/pages/frontend/src/calc_page.rs", "rank": 30, "score": 208815.77379154396 }, { "content": "#[static_ref]\n\npub fn logged_user() -> &'static Mutable<Option<String>> {\n\n Mutable::new(None)\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/app.rs", "rank": 31, "score": 208815.773791544 }, { "content": "pub fn hsl(h: impl Into<f64>, s: impl Into<f64>, l: impl Into<f64>) -> HSLuv {\n\n hsla(h, s, l, 100)\n\n}\n\n\n", "file_path": "crates/zoon/src/style/color.rs", "rank": 32, "score": 208660.04200220085 }, { "content": "pub fn counter_count() -> impl Signal<Item = usize> {\n\n map_ref! {\n\n let column_count = column_count(),\n\n let row_count = row_count() =>\n\n column_count * row_count\n\n }\n\n}\n\n\n", "file_path": "examples/counters/frontend/src/app.rs", "rank": 34, "score": 207159.80307779385 }, { "content": "fn cy_attr_signal() -> impl Signal<Item = &'static str> {\n\n light_state().signal_ref(|light_state| match light_state[0] {\n\n Stop => \"50\",\n\n Ready => \"150\",\n\n Go => \"250\",\n\n })\n\n}\n\n\n\n// ------ ------\n\n// Commands\n\n// ------ ------\n\n\n", "file_path": "examples/svg/frontend/src/lib.rs", "rank": 35, "score": 206882.44197796404 }, { "content": "fn color_attr_signal() -> impl Signal<Item = &'static str> {\n\n light_state().signal_ref(|light_state| match light_state[0] {\n\n Stop => \"red\",\n\n Ready => \"yellow\",\n\n Go => \"green\",\n\n })\n\n}\n\n\n", "file_path": "examples/svg/frontend/src/lib.rs", "rank": 36, "score": 206882.44197796404 }, { "content": "pub fn hsla(h: impl Into<f64>, s: impl Into<f64>, l: impl Into<f64>, a: impl Into<f64>) -> HSLuv {\n\n HSLuv {\n\n h: h.into().clamp(0., 360.),\n\n s: s.into().clamp(0., 100.),\n\n l: l.into().clamp(0., 100.),\n\n a: a.into().clamp(0., 100.),\n\n }\n\n}\n\n\n\n/// https://www.hsluv.org/\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]\n\npub struct HSLuv {\n\n h: f64,\n\n s: f64,\n\n l: f64,\n\n a: f64,\n\n}\n\n\n\nimpl Color<'_> for HSLuv {}\n\n\n", "file_path": "crates/zoon/src/style/color.rs", "rank": 37, "score": 205933.57483710558 }, { "content": "pub fn counter_count_hundreds() -> impl Signal<Item = String> {\n\n counter_count().map(|count| format!(\"{:.2}\", count as f64 / 1_000.))\n\n}\n\n\n\n// ------ ------\n\n// Handlers\n\n// ------ ------\n\n\n", "file_path": "examples/counters/frontend/src/app.rs", "rank": 38, "score": 205544.67120615442 }, { "content": "pub fn back() {\n\n history().back().unwrap_throw();\n\n}\n\n\n", "file_path": "crates/zoon/src/routing.rs", "rank": 39, "score": 201232.66828280242 }, { "content": "pub fn history() -> web_sys::History {\n\n window().history().unwrap_throw()\n\n}\n", "file_path": "crates/zoon/src/dom.rs", "rank": 41, "score": 199157.30267769162 }, { "content": "pub fn default_icon(checked_signal: MutableSignal<bool>) -> impl Element {\n\n // @TODO replace with better custom icons\n\n // Icons from https://github.com/tastejs/todomvc\n\n static ACTIVE_ICON: &str = \"data:image/svg+xml;utf8,%3Csvg%20xmlns%3D%22http%3A//www.w3.org/2000/svg%22%20width%3D%2240%22%20height%3D%2240%22%20viewBox%3D%22-10%20-18%20100%20135%22%3E%3Ccircle%20cx%3D%2250%22%20cy%3D%2250%22%20r%3D%2250%22%20fill%3D%22none%22%20stroke%3D%22%23ededed%22%20stroke-width%3D%223%22/%3E%3C/svg%3E\";\n\n static COMPLETED_ICON: &str = \"data:image/svg+xml;utf8,%3Csvg%20xmlns%3D%22http%3A//www.w3.org/2000/svg%22%20width%3D%2240%22%20height%3D%2240%22%20viewBox%3D%22-10%20-18%20100%20135%22%3E%3Ccircle%20cx%3D%2250%22%20cy%3D%2250%22%20r%3D%2250%22%20fill%3D%22none%22%20stroke%3D%22%23bddad5%22%20stroke-width%3D%223%22/%3E%3Cpath%20fill%3D%22%235dc2af%22%20d%3D%22M72%2025L42%2071%2027%2056l-4%204%2020%2020%2034-52z%22/%3E%3C/svg%3E\";\n\n\n\n El::new().s(Width::new(40)).s(Height::new(40)).s(\n\n Background::new().url_signal(checked_signal.map_bool(|| COMPLETED_ICON, || ACTIVE_ICON))\n\n )\n\n}\n", "file_path": "crates/zoon/src/element/checkbox.rs", "rank": 42, "score": 198973.56759797968 }, { "content": "fn action_button(id: &'static str, title: &'static str, on_click: fn()) -> RawHtmlEl {\n\n RawHtmlEl::new(\"div\")\n\n .attr(\"class\", \"col-sm-6 smallpad\")\n\n .child(\n\n RawHtmlEl::new(\"button\")\n\n .attr(\"id\", id)\n\n .attr(\"class\", \"btn btn-primary btn-block\")\n\n .attr(\"type\", \"button\")\n\n .event_handler(move |_: events::Click| on_click())\n\n .child(title),\n\n )\n\n}\n\n\n", "file_path": "examples/js-framework-benchmark/keyed/frontend/src/lib.rs", "rank": 43, "score": 197593.74491908835 }, { "content": "pub fn px<'a>(px: impl IntoCowStr<'a>) -> Cow<'a, str> {\n\n [&px.into_cow_str(), \"px\"].concat().into()\n\n}\n\n\n\n// ------ Style ------\n\n\n", "file_path": "crates/zoon/src/style.rs", "rank": 44, "score": 194935.47438858036 }, { "content": "pub fn current_url() -> String {\n\n window().location().href().unwrap_throw()\n\n}\n\n\n", "file_path": "crates/zoon/src/routing.rs", "rank": 45, "score": 194198.45383659465 }, { "content": "pub trait FromRouteSegments: Sized {\n\n fn from_route_segments(segments: Vec<String>) -> Option<Self>;\n\n}\n", "file_path": "crates/zoon/src/routing/from_route_segments.rs", "rank": 46, "score": 190684.99400196385 }, { "content": "pub trait RouteSegment: Sized {\n\n fn from_string_segment(segment: &str) -> Option<Self>;\n\n\n\n fn into_string_segment(self) -> Cow<'static, str>;\n\n}\n\n\n\n//-- impls --\n\n\n\nimpl RouteSegment for String {\n\n fn from_string_segment(segment: &str) -> Option<Self> {\n\n Some(segment.to_owned())\n\n }\n\n\n\n fn into_string_segment(self) -> Cow<'static, str> {\n\n self.into()\n\n }\n\n}\n\n\n\nimpl RouteSegment for Cow<'static, str> {\n\n fn from_string_segment(segment: &str) -> Option<Self> {\n", "file_path": "crates/zoon/src/routing/route_segment.rs", "rank": 47, "score": 190684.99400196385 }, { "content": "fn click_me_button() -> impl Element {\n\n let click_count = Mutable::new(0);\n\n let title = click_count.signal().map(|count| {\n\n if count == 0 {\n\n return Cow::from(\"Click me!\");\n\n }\n\n Cow::from(format!(\"Clicked {}x\", count))\n\n });\n\n Row::new().item(\n\n Button::new()\n\n .label_signal(title)\n\n .on_press(move || click_count.update(|count| count + 1)),\n\n )\n\n}\n\n\n", "file_path": "examples/counters/frontend/src/app/view.rs", "rank": 48, "score": 190571.42418647444 }, { "content": "fn link(label: &str, route: Route) -> impl Element {\n\n Link::new()\n\n .s(Font::new().underline().color(NamedColor::Blue7))\n\n .label(label)\n\n .to(route)\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/header.rs", "rank": 49, "score": 186554.07056838722 }, { "content": "#[proc_macro_attribute]\n\npub fn route(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n let mut input_enum: ItemEnum = syn::parse(input)\n\n .expect(\"'route' attribute is applicable only to enums and their variants\");\n\n\n\n let routes = extract_routes(&mut input_enum);\n\n\n\n let route_fns = generate_route_fns(&routes);\n\n let impl_from_route_segments = generate_impl_from_route_segments(routes.len());\n\n let impl_into_cow_str = generate_impl_into_cow_str(&routes);\n\n\n\n quote::quote_spanned!(input_enum.span()=>\n\n #input_enum\n\n #route_fns\n\n #impl_from_route_segments\n\n #impl_into_cow_str\n\n )\n\n .into()\n\n}\n\n\n\n// ------ extract_routes ------\n\n\n", "file_path": "crates/route_macro/src/lib.rs", "rank": 50, "score": 182494.2971500251 }, { "content": "#[proc_macro_attribute]\n\npub fn static_ref(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n let mut input_fn: ItemFn = syn::parse(input).unwrap();\n\n\n\n let data_type =\n\n data_type(&input_fn.sig.output).expect(\"the function has to return &'static MyType\");\n\n\n\n let inner_block = input_fn.block;\n\n input_fn.block = parse_quote!({\n\n use once_cell::race::OnceBox;\n\n static INSTANCE: OnceBox<#data_type> = OnceBox::new();\n\n INSTANCE.get_or_init(move || Box::new(#inner_block))\n\n });\n\n\n\n quote::quote_spanned!(input_fn.span()=>\n\n #input_fn\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "crates/static_ref_macro/src/lib.rs", "rank": 51, "score": 179728.73300104355 }, { "content": "fn row_label(id: ID, label: impl Signal<Item = String> + Unpin + 'static) -> RawHtmlEl {\n\n RawHtmlEl::new(\"td\").attr(\"class\", \"col-md-4\").child(\n\n RawHtmlEl::new(\"a\")\n\n .event_handler(move |_: events::Click| select_row(id))\n\n .child(Text::with_signal(label)),\n\n )\n\n}\n\n\n", "file_path": "examples/js-framework-benchmark/keyed/frontend/src/lib.rs", "rank": 52, "score": 179055.0972836424 }, { "content": "fn get_route_segments(route_attr: &Attribute) -> Vec<RouteSegment> {\n\n let parser = Punctuated::<RouteSegment, Token![,]>::parse_terminated;\n\n route_attr.parse_args_with(parser)\n\n .expect(\"only parentheses with zero or more string literals and variant field names are allowed in the 'route' attribute\")\n\n .into_pairs()\n\n .map(Pair::into_value)\n\n .collect()\n\n}\n\n\n\n// ------ generate_route_fns ------\n\n\n", "file_path": "crates/route_macro/src/lib.rs", "rank": 53, "score": 176360.0141072205 }, { "content": "fn generate_impl_into_cow_str(routes: &[Route]) -> ItemImpl {\n\n let match_arms = routes.iter().map(match_arm);\n\n parse_quote!(\n\n impl<'a> IntoCowStr<'a> for Route {\n\n fn into_cow_str(self) -> std::borrow::Cow<'a, str> {\n\n match self {\n\n #(#match_arms),*\n\n }\n\n }\n\n fn take_into_cow_str(&mut self) -> std::borrow::Cow<'a, str> {\n\n unimplemented!()\n\n }\n\n }\n\n )\n\n}\n\n\n", "file_path": "crates/route_macro/src/lib.rs", "rank": 54, "score": 175429.97996720838 }, { "content": "fn assemble_url_template(segments: &[RouteSegment]) -> LitStr {\n\n let mut url_template = String::new();\n\n for segment in segments {\n\n url_template.push('/');\n\n match segment {\n\n RouteSegment::LitStr(lit_str) => url_template.push_str(&lit_str.value()),\n\n RouteSegment::Ident(ident) => {\n\n url_template.push('{');\n\n url_template.push_str(&ident.to_string());\n\n url_template.push('}');\n\n }\n\n }\n\n }\n\n if url_template.is_empty() {\n\n url_template.push('/');\n\n }\n\n LitStr::new(&url_template, Span::call_site())\n\n}\n", "file_path": "crates/route_macro/src/lib.rs", "rank": 55, "score": 171756.04652418467 }, { "content": "fn generate_route_fns(routes: &[Route]) -> ItemImpl {\n\n let route_fns = routes.iter().enumerate().map(route_fn);\n\n parse_quote!(\n\n impl Route {\n\n #(#route_fns)*\n\n }\n\n )\n\n}\n\n\n", "file_path": "crates/route_macro/src/lib.rs", "rank": 56, "score": 169396.45411930623 }, { "content": "fn stop_button(on_press: fn()) -> impl Element {\n\n button(\"Stop\", NamedColor::Red2, NamedColor::Red5, on_press)\n\n}\n\n\n", "file_path": "examples/timer/frontend/src/lib.rs", "rank": 58, "score": 154773.0351281587 }, { "content": "fn start_button(on_press: fn()) -> impl Element {\n\n button(\"Start\", NamedColor::Green2, NamedColor::Green5, on_press)\n\n}\n\n\n", "file_path": "examples/timer/frontend/src/lib.rs", "rank": 59, "score": 154773.0351281587 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start_app(\"app\", root);\n\n connection();\n\n}\n", "file_path": "examples/chat/frontend/src/lib.rs", "rank": 60, "score": 152717.08053256996 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start_app(\"app\", root);\n\n}\n", "file_path": "examples/timer/frontend/src/lib.rs", "rank": 61, "score": 152717.08053256996 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start_app(\"app\", app::view::root);\n\n}\n", "file_path": "examples/counters/frontend/src/lib.rs", "rank": 62, "score": 152717.08053256996 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start_app(\"app\", root);\n\n}\n", "file_path": "examples/canvas/frontend/src/lib.rs", "rank": 63, "score": 152717.08053256996 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start_app(\"app\", root);\n\n}\n", "file_path": "examples/viewport/frontend/src/lib.rs", "rank": 64, "score": 152717.08053256996 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start_app(\"app\", root);\n\n}\n", "file_path": "examples/counter/frontend/src/lib.rs", "rank": 65, "score": 152717.08053256996 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n router::router();\n\n start_app(\"app\", app::root);\n\n}\n", "file_path": "examples/pages/frontend/src/lib.rs", "rank": 66, "score": 152717.08053256996 }, { "content": "pub fn log_out() {\n\n logged_user().take();\n\n router().go(Route::Root);\n\n}\n\n\n\n// ------ ------\n\n// View\n\n// ------ ------\n\n\n", "file_path": "examples/pages/frontend/src/app.rs", "rank": 67, "score": 152717.08053256996 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n app::load_todos();\n\n router::router();\n\n start_app(\"app\", app::view::root);\n\n}\n", "file_path": "examples/todomvc/frontend/src/lib.rs", "rank": 68, "score": 152717.08053256996 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start_app(\"app\", root);\n\n}\n", "file_path": "examples/svg/frontend/src/lib.rs", "rank": 69, "score": 152717.08053256996 }, { "content": "pub fn box_css_signal(\n\n signal: impl Signal<Item = impl IntoOptionCowStr<'static> + 'static> + Unpin + 'static,\n\n) -> BoxedCssSignal {\n\n Box::new(signal.map(|value| Box::new(value) as Box<dyn IntoOptionCowStr<'static>>))\n\n}\n\n\n", "file_path": "crates/zoon/src/style.rs", "rank": 70, "score": 151495.54891145363 }, { "content": "pub fn load_todos() {\n\n if let Some(Ok(todos)) = local_storage().get(STORAGE_KEY) {\n\n replace_todos(todos);\n\n println!(\"Todos loaded\");\n\n }\n\n}\n\n\n", "file_path": "examples/todomvc/frontend/src/app.rs", "rank": 71, "score": 151495.54891145363 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start!(app)\n\n}\n", "file_path": "examples/time_tracker/frontend/src/lib.rs", "rank": 72, "score": 151495.54891145363 }, { "content": "// https://stackoverflow.com/a/58825638\n\npub fn visit_files(\n\n path: impl Into<PathBuf>,\n\n) -> impl Stream<Item = Result<DirEntry>> + Send + 'static {\n\n #[throws]\n\n async fn one_level(path: PathBuf, to_visit: &mut Vec<PathBuf>) -> Vec<DirEntry> {\n\n let mut dir = fs::read_dir(path).await?;\n\n let mut files = Vec::new();\n\n\n\n while let Some(child) = dir.next_entry().await? {\n\n if child.metadata().await?.is_dir() {\n\n to_visit.push(child.path());\n\n } else {\n\n files.push(child)\n\n }\n\n }\n\n files\n\n }\n\n\n\n stream::unfold(vec![path.into()], |mut to_visit| async {\n\n let path = to_visit.pop()?;\n\n let file_stream = match one_level(path, &mut to_visit).await {\n\n Ok(files) => stream::iter(files).map(Ok).left_stream(),\n\n Err(error) => stream::once(async { Err(error) }).right_stream(),\n\n };\n\n Some((file_stream, to_visit))\n\n })\n\n .flatten()\n\n}\n", "file_path": "crates/mzoon/src/helper/visit_files.rs", "rank": 73, "score": 150306.33515006502 }, { "content": " pub trait UpHandlerOutput = Future<Output = ()> + 'static;\n", "file_path": "crates/moon/src/lib.rs", "rank": 74, "score": 150204.75199450384 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n start_app(\"main\", root);\n\n}\n", "file_path": "examples/js-framework-benchmark/keyed/frontend/src/lib.rs", "rank": 75, "score": 149148.17345051566 }, { "content": "#[static_ref]\n\nfn columns() -> &'static MutableVec<()> {\n\n MutableVec::new_with_values(vec![(); 5])\n\n}\n\n\n", "file_path": "examples/counters/frontend/src/app.rs", "rank": 76, "score": 148539.09355649355 }, { "content": "#[static_ref]\n\nfn rows() -> &'static MutableVec<()> {\n\n MutableVec::new_with_values(vec![(); 5])\n\n}\n\n\n", "file_path": "examples/counters/frontend/src/app.rs", "rank": 77, "score": 148539.09355649355 }, { "content": "fn jump_button(label: &str, on_press: fn()) -> impl Element {\n\n let (hovered, hovered_signal) = Mutable::new_and_signal(false);\n\n Button::new()\n\n .s(Padding::all(5))\n\n .s(Background::new()\n\n .color_signal(hovered_signal.map_bool(|| NamedColor::Green5, || NamedColor::Green2)))\n\n .on_hovered_change(move |is_hovered| hovered.set(is_hovered))\n\n .label(label)\n\n .on_press(on_press)\n\n}\n\n\n\n// ------ ------\n\n// Start\n\n// ------ ------\n\n\n", "file_path": "examples/viewport/frontend/src/lib.rs", "rank": 78, "score": 148527.47969829754 }, { "content": "fn root() -> impl Element {\n\n Column::new()\n\n .s(Spacing::new(20))\n\n .s(Padding::all(20))\n\n .item(rectangles())\n\n .item(viewport_info())\n\n .item(jump_to_top_button())\n\n .item(jump_to_bottom_button())\n\n}\n\n\n\n// -- rectangles --\n\n\n", "file_path": "examples/viewport/frontend/src/lib.rs", "rank": 79, "score": 147855.56095478853 }, { "content": "// ------ Alternative ------\n\nfn _root() -> impl Element {\n\n let (counter, counter_signal) = Mutable::new_and_signal(0);\n\n let on_press = move |step: i32| *counter.lock_mut() += step;\n\n Column::new()\n\n .item(\n\n Button::new()\n\n .label(\"-\")\n\n .on_press(clone!((on_press) move || on_press(-1))),\n\n )\n\n .item_signal(counter_signal)\n\n .item(Button::new().label(\"+\").on_press(move || on_press(1)))\n\n}\n\n// ---------- // -----------\n\n\n", "file_path": "examples/counter/frontend/src/lib.rs", "rank": 80, "score": 147855.56095478853 }, { "content": "fn canvas() -> impl Element {\n\n Canvas::new()\n\n .width(300)\n\n .height(300)\n\n .after_insert(set_canvas_context)\n\n .after_remove(|_| remove_canvas_context())\n\n}\n\n\n", "file_path": "examples/canvas/frontend/src/lib.rs", "rank": 81, "score": 147855.56095478853 }, { "content": "fn root() -> impl Element {\n\n Column::new()\n\n .s(Align::center())\n\n .s(Borders::all(Border::new().color(hsl(0, 0, 20))))\n\n .s(RoundedCorners::all(30))\n\n .s(Clip::both())\n\n .item(canvas())\n\n .item(change_color_button())\n\n}\n\n\n", "file_path": "examples/canvas/frontend/src/lib.rs", "rank": 82, "score": 147855.56095478853 }, { "content": "fn rectangles() -> impl Element {\n\n Column::new()\n\n .s(Width::new(150))\n\n .s(Height::new(200))\n\n .s(Spacing::new(20))\n\n .s(Padding::all(15))\n\n .s(Background::new().color(NamedColor::Gray5))\n\n .s(Scrollbars::both())\n\n .on_viewport_location_change(on_viewport_change)\n\n .viewport_x_signal(viewport_x().signal())\n\n .viewport_y_signal(viewport_y().signal())\n\n .items(iter::repeat_with(rectangle).take(5))\n\n}\n\n\n", "file_path": "examples/viewport/frontend/src/lib.rs", "rank": 83, "score": 147855.56095478853 }, { "content": "fn root() -> impl Element {\n\n Column::new()\n\n .item(Button::new().label(\"-\").on_press(decrement))\n\n .item(Text::with_signal(counter().signal()))\n\n .item(Button::new().label(\"+\").on_press(increment))\n\n}\n\n\n", "file_path": "examples/counter/frontend/src/lib.rs", "rank": 84, "score": 147855.56095478853 }, { "content": "fn root() -> impl Element {\n\n Column::new()\n\n .s(Align::center())\n\n .s(Spacing::new(30))\n\n .item(stopwatch_panel())\n\n .item(timeout_panel())\n\n}\n\n\n", "file_path": "examples/timer/frontend/src/lib.rs", "rank": 85, "score": 147855.56095478853 }, { "content": "fn root() -> impl Element {\n\n El::new()\n\n .s(Padding::new().y(20))\n\n .s(Scrollbars::y_and_clip_x())\n\n .s(Height::screen())\n\n .viewport_y_signal(viewport_y().signal())\n\n .child(content())\n\n}\n\n\n", "file_path": "examples/chat/frontend/src/lib.rs", "rank": 86, "score": 147855.56095478853 }, { "content": "fn rectangle() -> impl Element {\n\n El::new()\n\n .s(Width::new(150))\n\n .s(Height::new(50))\n\n .s(Background::new().color(NamedColor::Red2))\n\n}\n\n\n\n// -- viewport_info --\n\n\n", "file_path": "examples/viewport/frontend/src/lib.rs", "rank": 87, "score": 147855.56095478853 }, { "content": "fn page() -> impl Element {\n\n El::new().child_signal(page_id().signal().map(|page_id| match page_id {\n\n PageId::Report => report_page::page().into_raw_element(),\n\n PageId::Login => login_page::page().into_raw_element(),\n\n PageId::Calc => calc_page::page().into_raw_element(),\n\n PageId::Home => El::new().child(\"Welcome Home!\").into_raw_element(),\n\n PageId::Unknown => El::new().child(\"404\").into_raw_element(),\n\n }))\n\n}\n", "file_path": "examples/pages/frontend/src/app.rs", "rank": 88, "score": 147855.56095478853 }, { "content": "fn root() -> impl Element {\n\n let width = \"100\";\n\n let height = \"300\";\n\n\n\n RawSvgEl::new(\"svg\")\n\n .attr(\"width\", width)\n\n .attr(\"height\", height)\n\n .event_handler(move |_: events::Click| next_light_state())\n\n .child(\n\n RawSvgEl::new(\"rect\")\n\n .attr(\"width\", width)\n\n .attr(\"height\", height)\n\n .attr(\"fill\", \"black\"),\n\n )\n\n .child(\n\n RawSvgEl::new(\"circle\")\n\n .attr(\"cx\", \"50\")\n\n .attr_signal(\"cy\", cy_attr_signal())\n\n .attr(\"r\", \"40\")\n\n .attr_signal(\"fill\", color_attr_signal()),\n\n )\n\n}\n\n\n\n// ------ ------\n\n// Start\n\n// ------ ------\n\n\n", "file_path": "examples/svg/frontend/src/lib.rs", "rank": 89, "score": 147855.56095478853 }, { "content": "fn content() -> impl Element {\n\n Column::new()\n\n .s(Width::new(300))\n\n .s(Align::new().center_x())\n\n .s(Spacing::new(20))\n\n .item(received_messages())\n\n .item(new_message_panel())\n\n .item(username_panel())\n\n}\n\n\n\n// ------ received_messages ------\n\n\n", "file_path": "examples/chat/frontend/src/lib.rs", "rank": 90, "score": 147855.56095478853 }, { "content": "pub fn is_user_logged() -> bool {\n\n logged_user().map(Option::is_some)\n\n}\n\n\n\n// ------ ------\n\n// Commands\n\n// ------ ------\n\n\n", "file_path": "examples/pages/frontend/src/app.rs", "rank": 91, "score": 147015.96002215444 }, { "content": "fn greeting() -> impl Element {\n\n let greeting = move |frequency: Frequency| {\n\n format!(\n\n \"Hello {}! This is your {} report.\",\n\n app::logged_user().lock_ref().as_ref().unwrap_throw(),\n\n frequency.as_str()\n\n )\n\n };\n\n Text::with_signal(frequency().signal().map(greeting))\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/report_page.rs", "rank": 92, "score": 146667.5665349111 }, { "content": "fn counters() -> impl Element {\n\n Row::new().items_signal_vec(super::columns().signal_vec().map(|_| counter_column()))\n\n}\n\n\n", "file_path": "examples/counters/frontend/src/app/view.rs", "rank": 93, "score": 146667.5665349111 }, { "content": "fn username_panel() -> impl Element {\n\n let id = \"username_input\";\n\n Row::new()\n\n .s(Spacing::new(15))\n\n .item(username_input_label(id))\n\n .item(username_input(id))\n\n}\n\n\n", "file_path": "examples/chat/frontend/src/lib.rs", "rank": 94, "score": 146667.5665349111 }, { "content": "fn send_button() -> impl Element {\n\n let (hovered, hovered_signal) = Mutable::new_and_signal(false);\n\n Button::new()\n\n .s(Padding::all(10))\n\n .s(RoundedCorners::new().right(5))\n\n .s(Background::new()\n\n .color_signal(hovered_signal.map_bool(|| NamedColor::Green5, || NamedColor::Green2)))\n\n .s(Font::new().color(NamedColor::Gray10).size(17))\n\n .on_hovered_change(move |is_hovered| hovered.set(is_hovered))\n\n .on_press(send_message)\n\n .label(\"Send\")\n\n}\n\n\n\n// ------ username_panel ------\n\n\n", "file_path": "examples/chat/frontend/src/lib.rs", "rank": 95, "score": 146667.5665349111 }, { "content": "fn back_button() -> impl Element {\n\n let (hovered, hovered_signal) = Mutable::new_and_signal(false);\n\n Button::new()\n\n .s(Background::new()\n\n .color_signal(hovered_signal.map_bool(|| NamedColor::Green5, || NamedColor::Green2)))\n\n .s(Padding::new().x(7))\n\n .on_hovered_change(move |is_hovered| hovered.set(is_hovered))\n\n .label(\"< Back\")\n\n .on_press(routing::back)\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/header.rs", "rank": 96, "score": 146667.5665349111 }, { "content": "fn expressions() -> impl Element {\n\n Row::new()\n\n .s(Spacing::new(35))\n\n .item(expression_link(Expression::new(3, '+', 7)))\n\n .item(expression_link(Expression::new(2, '^', 8)))\n\n .item(expression_link(Expression::new(10, '%', 3)))\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/calc_page.rs", "rank": 97, "score": 146667.5665349111 }, { "content": "fn received_messages() -> impl Element {\n\n Column::new().items_signal_vec(messages().signal_vec_cloned().map(received_message))\n\n}\n\n\n", "file_path": "examples/chat/frontend/src/lib.rs", "rank": 98, "score": 146667.5665349111 }, { "content": "fn result() -> impl Element {\n\n Row::new()\n\n .s(Spacing::new(15))\n\n .item(El::new().child(\"Result:\"))\n\n .item_signal(result_signal())\n\n}\n\n\n", "file_path": "examples/pages/frontend/src/calc_page.rs", "rank": 99, "score": 146667.5665349111 } ]
Rust
roman-numeral/roman-numeral/src/lib.rs
Emilgardis/problems
9d1e8f917f78621b66816d624009bbd12861f075
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] extern crate num; #[macro_use] extern crate num_derive; extern crate itertools; #[macro_use] extern crate error_chain; mod errors; use errors::*; use itertools::Itertools; use std::str::FromStr; use num::traits::{CheckedSub, CheckedMul, CheckedDiv}; use std::ops::{Add, Sub}; #[repr(u16)] #[derive(FromPrimitive, Ord, Eq, PartialOrd, PartialEq, Debug, Clone)] pub enum RomanNumeral { I = 1, V = 5, X = 10, L = 50, C = 100, D = 500, M = 1000, } impl RomanNumeral { fn biggest(u: &usize) -> Result<RomanNumeral> { use self::RomanNumeral::*; Ok(match () { _ if u >= &1000 => M, _ if u >= &500 => D, _ if u >= &100 => C, _ if u >= &50 => L, _ if u >= &10 => X, _ if u >= &5 => V, _ if u >= &1 => I, _ => return Err(ErrorKind::NoZeroNumeral.into()), }) } fn step_up(&self) -> Result<RomanNumeral> { use self::RomanNumeral::*; Ok(match self { &I | &X | &C | &M => return Err(format!("Cannot step_up from {:?}", self).into()), &V => X, &L => C, &D => M, }) } } impl FromStr for RomanNumeral { type Err = Error; fn from_str(s: &str) -> Result<Self> { use self::RomanNumeral::*; Ok(match s.to_uppercase().as_str() { "I" => I, "V" => V, "X" => X, "L" => L, "C" => C, "D" => D, "M" => M, _ => return Err(format!("No roman numeral corresponding to {}", s).into()), }) } } #[derive(Clone)] pub struct Roman(Vec<RomanNumeral>); impl FromStr for Roman { type Err = Error; fn from_str(s: &str) -> Result<Self> { let mut vec = vec![]; if s.len() == 0 { return Err(ErrorKind::NoZeroNumeral.into()); } for ch in s.chars() { vec.push(ch.to_string().parse().chain_err(|| "While parsing roman numeral")?) } Ok(Roman::new(vec).chain_err(|| "While making new roman numeral after parsing")?) } } impl Add for Roman { type Output = Roman; fn add(self, v: Roman) -> Roman { unimplemented!() } } impl Sub for Roman { type Output = Roman; fn sub(self, v: Roman) -> Roman { let k = self.as_usize().unwrap().sub(v.as_usize().unwrap()); Roman::from_usize(k).unwrap() } } impl CheckedSub for Roman { fn checked_sub(&self, v: &Roman) -> Option<Roman> { let k = self.as_usize().unwrap().checked_sub(if let Ok(v_) = v.as_usize() { v_ } else { return None; }); match k { Some(0) | None => None, Some(res) => Roman::from_usize(res).ok(), } } } impl Roman { pub fn new(content: Vec<RomanNumeral>) -> Result<Roman> { Roman::validate(content).chain_err(|| "While validating the sequence of roman numerals") } #[doc(hidden)] pub fn _inner(&self) -> &Vec<RomanNumeral> { &self.0 } pub fn as_usize(&self) -> Result<usize> { let mut sum = 0; let mut iter = self.0 .clone() .into_iter() .map(|e| (e, 1)) .coalesce(|x, y| if x.0 == y.0 { Ok((x.0, x.1 + y.1)) } else { Err((x, y)) }) .peekable(); while let Some((numeral, reps)) = iter.next() { if iter.peek().is_none() || iter.peek().unwrap().0 < numeral { sum += numeral as usize * reps; } else { let (n_next, n_reps) = iter.next().unwrap(); sum += (n_next as usize * n_reps).checked_sub((numeral as usize * reps)).ok_or::<Error>("Underflow error".into())?; } } Ok(sum) } fn validate(vec: Vec<RomanNumeral>) -> Result<Roman> { let ro = Roman(vec); ro.as_usize()?; let stred = ro.as_string()?; if stred.contains("VV") { Err(ErrorKind::InvalidSequence("VV").into()) } else if stred.contains("LL") { Err(ErrorKind::InvalidSequence("LL").into()) } else if stred.contains("DD") { Err(ErrorKind::InvalidSequence("DD").into()) } else { Ok(ro) } } pub fn as_string(&self) -> Result<String> { use std::fmt::Write; let mut buf = String::new(); for numeral in &self.0 { write!(buf, "{:?}", numeral)?; } Ok(buf) } pub fn condense(&mut self) -> Result<()> { let stred = self.as_string().unwrap(); let res = stred .replace("IIIIII", "VI") .replace("VIIII", "IX") .replace("DCCCC", "CM") .replace("LXXXX", "XC") .replace("CCCC", "CD") .replace("XXXX", "XL") .replace("IIII", "IV"); self.0 = res.parse::<Roman>().chain_err(|| "After condensing")?.0; Ok(()) } pub fn expand(&mut self) -> Result<()> { let stred = self.as_string()?; println!("{}", stred); let res = stred .replace("IV", "IIII") .replace("XL", "XXXX") .replace("CD", "CCCC") .replace("XC", "LXXXX") .replace("CM", "DCCCC") .replace("IX", "VIIII"); self.0 = res.parse::<Roman>().chain_err(|| "After expanding")?.0; Ok(()) } pub fn from_usize(u: usize) -> Result<Roman> { let mut vec = vec![]; let mut rest = u; while rest != 0 { let res = RomanNumeral::biggest(&rest)?; rest -= res.clone() as usize; vec.push(res); } Ok(Roman::new(vec)?) } } #[cfg(test)] mod tests { use super::*; use self::RomanNumeral::*; #[test] fn iiii() { let n = Roman::new(vec![I, I, I, I]).unwrap(); assert_eq!(4, n.as_usize().unwrap()); } #[test] fn iv() { let n = Roman::new(vec![I, V]).unwrap(); assert_eq!(4, n.as_usize().unwrap()); } #[test] fn ordering() { assert!(I < X); assert!(X <= L); assert!(X != C); assert!(C < D); } #[test] fn cdxcix() { let n = Roman::new(vec![C, D, X, C, I, X]).unwrap(); assert_eq!(499, n.as_usize().unwrap()); } #[test] fn cccclxxxxviiii() { let n = Roman::new(vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I]).unwrap(); assert_eq!(499, n.as_usize().unwrap()); } #[test] fn iix() { let n = Roman::new(vec![I, I, X]).unwrap(); assert_eq!(8, n.as_usize().unwrap()); } #[test] fn xiix() { let n = Roman::new(vec![X, I, I, X]).unwrap(); assert_eq!(18, n.as_usize().unwrap()); } #[test] fn from_str() { let n = Roman::from_str("MMXVII").unwrap(); assert_eq!(2017, n.as_usize().unwrap()); } #[test] fn from_usize() { let n = Roman::from_usize(2017).unwrap(); assert_eq!(2017, n.as_usize().unwrap()); } #[test] fn condense_499() { let mut n = Roman::new(vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(499, n.as_usize().unwrap()); assert_eq!(&vec![C, D, X, C, I, X], n._inner()); } #[test] fn condense_viiii() { let mut n = Roman::new(vec![V, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![I, X], n._inner()); } #[test] fn condense_cccc() { let mut n = Roman::new(vec![C, C, C, C]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![C, D], n._inner()); } #[test] fn expand() { let mut n = Roman::new(vec![C, D, X, C, I, X]).unwrap(); n.expand().unwrap(); assert_eq!(&vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I], n._inner()); } #[test] fn sixteen() { let mut n = Roman::new(vec![X, I, I, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![X, V, I], n._inner()); } }
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] extern crate num; #[macro_use] extern crate num_derive; extern crate itertools; #[macro_use] extern crate error_chain; mod errors; use errors::*; use itertools::Itertools; use std::str::FromStr; use num::traits::{CheckedSub, CheckedMul, CheckedDiv}; use std::ops::{Add, Sub}; #[repr(u16)] #[derive(FromPrimitive, Ord, Eq, PartialOrd, PartialEq, Debug, Clone)] pub enum RomanNumeral { I = 1, V = 5, X = 10, L = 50, C = 100, D = 500, M = 1000, } impl RomanNumeral { fn biggest(u: &usize) -> Result<RomanNumeral> { use self::RomanNumeral::*; Ok(match () { _ if u >= &1000 => M, _ if u >= &500 => D, _ if u >= &100 => C, _ if u >= &50 => L, _ if u >= &10 => X, _ if u >= &5 => V, _ if u >= &1 => I, _ => return Err(ErrorKind::NoZeroNumeral.into()), }) } fn step_up(&self) -> Result<RomanNumeral> { use self::RomanNumeral::*; Ok(match self { &I | &X | &C | &M => return Err(format!("Cannot step_up from {:?}", self).into()), &V => X, &L => C, &D => M, }) } } impl FromStr for RomanNumeral { type Err = Error; fn from_str(s: &str) -> Result<Self> { use self::RomanNumeral::*; Ok(match s.to_uppercase().as_str() { "I" => I, "V" => V, "X" => X, "L" => L, "C" => C, "D" => D, "M" => M, _ => return Err(format!("No roman numeral corresponding to {}", s).into()), }) } } #[derive(Clone)] pub struct Roman(Vec<RomanNumeral>); impl FromStr for Roman { type Err = Error; fn from_str(s: &str) -> Result<Self> { let mut vec = vec![]; if s.len() == 0 { return Err(ErrorKind::NoZeroNumeral.into()); } for ch in s.chars() { vec.push(ch.to_string().parse().chain_err(|| "While parsing roman numeral")?) } Ok(Roman::new(vec).chain_err(|| "While making new roman numeral after parsing")?) } } impl Add for Roman { type Output = Roman; fn add(self, v: Roman) -> Roman { unimplemented!() } } impl Sub for Roman { type Output = Roman; fn sub(self, v: Roman) -> Roman { let k = self.as_usize().unwrap().sub(v.as_usize().unwrap()); Roman::from_usize(k).unwrap() } } impl CheckedSub for Roman { fn checked_sub(&self, v: &Roman) -> Option<Roman> { let k = self.as_usize().unwrap().checked_sub(if let Ok(v_) = v.as_usize() { v_ } else { return None; }); match k { Some(0) | None => None, Some(res) => Roman::from_usize(res).ok(), } } } impl Roman { pub fn new(content: Vec<RomanNumeral>) -> Result<Roman> { Roman::validate(content).chain_err(|| "While validating the sequence of roman numerals") } #[doc(hidden)] pub fn _inner(&self) -> &Vec<RomanNumeral> { &self.0 } pub fn as_usize(&self) -> Result<usize> { let mut sum = 0; let mut iter = self.0 .clone() .into_iter() .map(|e| (e, 1)) .coalesce(|x, y| if x.0 == y.0 { Ok((x.0, x.1 + y.1)) } else { Err((x, y)) }) .peekable(); while let Some((numeral, reps)) = iter.next() { if iter.peek().is_none() || iter.peek().unwrap().0 < numeral { sum += numeral as usize * reps; } else { let (n_next, n_reps) = iter.next().unwrap(); sum += (n_next as usize * n_reps).checked_sub((numeral as usize * reps)).ok_or::<Error>("Underflow error".into())?; } } Ok(sum) } fn validate(vec: Vec<RomanNumeral>) -> Result<Roman> { let ro = Roman(vec); ro.as_usize()?; let stred = ro.as_string()?; if stred.contains("VV") { Err(ErrorKind::InvalidSequence("VV").into()) } else if stred.contains("LL") { Err(ErrorKind::InvalidSequence("LL").into()) } else if stred.contains("DD") { Err(ErrorKind::InvalidSequence("DD").into()) } else { Ok(ro) } } pub fn as_string(&self) -> Result<String> { use std::fmt::Write; let mut buf = String::new(); for numeral in &self.0 { write!(buf, "{:?}", numeral)?; } Ok(buf) }
pub fn expand(&mut self) -> Result<()> { let stred = self.as_string()?; println!("{}", stred); let res = stred .replace("IV", "IIII") .replace("XL", "XXXX") .replace("CD", "CCCC") .replace("XC", "LXXXX") .replace("CM", "DCCCC") .replace("IX", "VIIII"); self.0 = res.parse::<Roman>().chain_err(|| "After expanding")?.0; Ok(()) } pub fn from_usize(u: usize) -> Result<Roman> { let mut vec = vec![]; let mut rest = u; while rest != 0 { let res = RomanNumeral::biggest(&rest)?; rest -= res.clone() as usize; vec.push(res); } Ok(Roman::new(vec)?) } } #[cfg(test)] mod tests { use super::*; use self::RomanNumeral::*; #[test] fn iiii() { let n = Roman::new(vec![I, I, I, I]).unwrap(); assert_eq!(4, n.as_usize().unwrap()); } #[test] fn iv() { let n = Roman::new(vec![I, V]).unwrap(); assert_eq!(4, n.as_usize().unwrap()); } #[test] fn ordering() { assert!(I < X); assert!(X <= L); assert!(X != C); assert!(C < D); } #[test] fn cdxcix() { let n = Roman::new(vec![C, D, X, C, I, X]).unwrap(); assert_eq!(499, n.as_usize().unwrap()); } #[test] fn cccclxxxxviiii() { let n = Roman::new(vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I]).unwrap(); assert_eq!(499, n.as_usize().unwrap()); } #[test] fn iix() { let n = Roman::new(vec![I, I, X]).unwrap(); assert_eq!(8, n.as_usize().unwrap()); } #[test] fn xiix() { let n = Roman::new(vec![X, I, I, X]).unwrap(); assert_eq!(18, n.as_usize().unwrap()); } #[test] fn from_str() { let n = Roman::from_str("MMXVII").unwrap(); assert_eq!(2017, n.as_usize().unwrap()); } #[test] fn from_usize() { let n = Roman::from_usize(2017).unwrap(); assert_eq!(2017, n.as_usize().unwrap()); } #[test] fn condense_499() { let mut n = Roman::new(vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(499, n.as_usize().unwrap()); assert_eq!(&vec![C, D, X, C, I, X], n._inner()); } #[test] fn condense_viiii() { let mut n = Roman::new(vec![V, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![I, X], n._inner()); } #[test] fn condense_cccc() { let mut n = Roman::new(vec![C, C, C, C]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![C, D], n._inner()); } #[test] fn expand() { let mut n = Roman::new(vec![C, D, X, C, I, X]).unwrap(); n.expand().unwrap(); assert_eq!(&vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I], n._inner()); } #[test] fn sixteen() { let mut n = Roman::new(vec![X, I, I, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![X, V, I], n._inner()); } }
pub fn condense(&mut self) -> Result<()> { let stred = self.as_string().unwrap(); let res = stred .replace("IIIIII", "VI") .replace("VIIII", "IX") .replace("DCCCC", "CM") .replace("LXXXX", "XC") .replace("CCCC", "CD") .replace("XXXX", "XL") .replace("IIII", "IV"); self.0 = res.parse::<Roman>().chain_err(|| "After condensing")?.0; Ok(()) }
function_block-full_function
[ { "content": "#[derive(Debug, PartialEq, PartialOrd, Eq, Ord)]\n\nstruct Name<'a>(pub &'a str);\n\n\n\nimpl<'a> Name<'a> {\n\n pub fn value(&self) -> usize {\n\n self.0.chars().fold(0, |v, ch| v + (ch as u8 - 64) as usize )\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests{\n\n\n\n use super::*;\n\n static NAMES: &'static str = r#\"\"MARY\",\"PATRICIA\",\"LINDA\",\"BARBARA\",\"ELIZABETH\"\"#;\n\n \n\n\n\n #[test]\n\n fn sort_names() {\n\n let names = NameList::new(NAMES).unwrap();\n\n println!(\"{}\", names.sorted.len());\n", "file_path": "namenames_score/src/main.rs", "rank": 0, "score": 123871.53971250614 }, { "content": "fn parse_nums(v: Values) -> Vec<u64> {\n\n v.map(|num| num.parse::<f64>().unwrap() as u64).collect()\n\n}\n\n\n", "file_path": "collatz/src/main.rs", "rank": 1, "score": 119216.91643226209 }, { "content": "#[bench]\n\nfn collatz_8500411_while_let(b: &mut Bencher) {\n\n b.iter(|| { let mut collatz = Collatz::new(8500411);\n\n while let Some(_) = collatz.next() {}\n\n })\n\n}\n", "file_path": "collatz/benches/bench.rs", "rank": 2, "score": 108700.40322904056 }, { "content": "pub trait ToNGram<Data> where Data: Ord + Clone {\n\n fn to_ngram(self, n: u8) -> NGram<Data>;\n\n}\n\n\n\nimpl ToNGram<String> for String {\n\n fn to_ngram(self, n: u8) -> NGram<String> {\n\n let n = n.clone();\n\n if n == 1 {\n\n let mut count = BTreeMap::new();\n\n let mut entries = 0;\n\n for c in self.chars() {\n\n if c.is_alphabetic() {\n\n let c_lc = c.to_lowercase().collect();\n\n entries += 1;\n\n *count.entry(c_lc).or_insert(0) += 1;\n\n }\n\n }\n\n NGram::new(count, entries, n)\n\n } else if n == 2 {\n\n let mut count = BTreeMap::new();\n", "file_path": "lang-interp/src/ngram.rs", "rank": 3, "score": 106721.4347834115 }, { "content": "fn validate_nums(v: String) -> Result<(), String> {\n\n for num in v.split_whitespace() {\n\n match num.parse::<f64>() {\n\n Ok(num_f64) => {\n\n if num_f64 == 0.0 {\n\n return Err(String::from(\"Number must be a natural number >= 1\"))\n\n }\n\n if !num_f64.is_sign_positive() {\n\n return Err(String::from(format!(\"\\\"{}\\\" is not a positive number.\", num)));\n\n }\n\n if !num_f64.is_normal() || num_f64.fract() != 0.0 {\n\n return Err(String::from(format!(\"\\\"{}\\\" is not a natural number.\", num)));\n\n }\n\n },\n\n Err(e) => {\n\n return Err(String::from(format!(\"Couldn't parse number \\\"{}\\\": {:?}\", num, e.description())));\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "collatz/src/main.rs", "rank": 4, "score": 103145.77095375847 }, { "content": "pub fn primes(limit: u64) -> Vec<sieve::Prime> {\n\n let sieve = sieve::ArSieve::new(limit);\n\n let mut result = Vec::new();\n\n for prime in sieve {\n\n result.push(prime);\n\n }\n\n result\n\n}\n\n\n\n#[cfg(test)]\n\nmod bench {\n\n use super::*;\n\n use test::Bencher;\n\n\n\n #[bench]\n\n fn prime_to_1e6(b: &mut Bencher) {\n\n b.iter(|| primes(1e6 as u64))\n\n }\n\n}\n", "file_path": "rust_prime/benches/naive_sieve.rs", "rank": 5, "score": 102124.91544579736 }, { "content": "pub fn find_languages<T: AsRef<Path>>(dir: T, depth: u8) -> Result<Vec<Language>, io::Error> {\n\n if dir.as_ref().is_dir() && depth != 0 {\n\n let mut result: Vec<Language> = vec![];\n\n for entry in try!(fs::read_dir(dir.as_ref())) {\n\n let entry = try!(entry);\n\n let path = entry.path();\n\n if path.is_dir() {\n\n let mut subresult = try!(find_languages(&path, depth-1));\n\n result.append(&mut subresult);\n\n } else {\n\n match path.extension() {\n\n None => (),\n\n Some(ext) => {\n\n if ext == \"lang\" {\n\n result.push(Language::open_lang(&path));\n\n }\n\n }\n\n }\n\n }\n\n }\n\n Ok(result)\n\n } else {\n\n Err(io::Error::new(io::ErrorKind::InvalidInput, \"Not a directory\"))\n\n }\n\n}\n", "file_path": "lang-interp/src/find_lang.rs", "rank": 6, "score": 101685.7696049804 }, { "content": "fn reduce(every: &usize, i: &mut usize, curr: &usize) -> bool {\n\n let res = (*i+1) % every == 0;\n\n *i = *i+1;\n\n res\n\n}\n", "file_path": "lucky_numbers/src/main.rs", "rank": 7, "score": 99428.62941474962 }, { "content": "error_chain!{\n\n errors {\n\n NoZeroNumeral {\n\n description(\"There is no zero roman numeral.\")\n\n }\n\n InvalidSequence(seq: &'static str) {\n\n description(\"Invalid sequence\")\n\n display(\"Invalid sequence: {}\", seq)\n\n }\n\n }\n\n foreign_links {\n\n FmtErr(::std::fmt::Error); \n\n } \n\n}\n", "file_path": "roman-numeral/roman-numeral/src/errors.rs", "rank": 8, "score": 95166.84685398571 }, { "content": "pub fn primes(limit: u64) -> slow_primes::Primes {\n\n slow_primes::Primes::sieve(limit as usize)\n\n}\n\n\n\n#[cfg(test)]\n\nmod bench {\n\n use super::*;\n\n use test::Bencher;\n\n use slow_primes::PrimeIterator;\n\n\n\n #[bench]\n\n fn extern_slow_primes_to_1e6(b: &mut Bencher) {\n\n b.iter(|| primes(1e6 as u64))\n\n }\n\n}\n", "file_path": "rust_prime/benches/extern.rs", "rank": 9, "score": 92553.24141372062 }, { "content": "// TODO: Better name.\n\npub fn get_sum_(multipliers: &[u64], max: u64) -> u64 {\n\n let mut sum = 0;\n\n for mult in multipliers {\n\n sum += (max-1)/mult * (mult + max) / 2;\n\n println!(\"{}\", mult)\n\n }\n\n \n\n sum\n\n}\n\n\n\n#[cfg(test)]\n\nmod multiplier_test {\n\n use super::*;\n\n\n\n #[test]\n\n fn multiplers_of_3_5_to_1000() {\n\n assert_eq!(233168, get_sum_(&[3,5], 1000));\n\n }\n\n}\n", "file_path": "multiplier_of/src/lib.rs", "rank": 10, "score": 90358.65856351057 }, { "content": "#[bench]\n\nfn collatz_8500411_for(b: &mut Bencher) {\n\n b.iter(|| { let mut collatz = Collatz::new(8500411);\n\n for _ in collatz {}\n\n })\n\n}\n\n\n", "file_path": "collatz/benches/bench.rs", "rank": 11, "score": 85266.42502436963 }, { "content": "#[bench]\n\nfn collatz_1_to_1e6_with_sieve(b: &mut Bencher) {\n\n b.iter(|| gen_collatz_bound_with_sieve(1..(1e6 as u64)))\n\n}\n", "file_path": "collatz/benches/bench.rs", "rank": 12, "score": 81267.7238566797 }, { "content": "#[bench]\n\nfn collatz_1_to_1e6_without_sieve(b: &mut Bencher) {\n\n b.iter(|| gen_collatz_bound_without_sieve(1..(1e6 as u64)))\n\n}\n\n\n", "file_path": "collatz/benches/bench.rs", "rank": 13, "score": 79489.81915141089 }, { "content": "fn validate_txt(v: String) -> Result<(), String> {\n\n match File::open(&v) {\n\n Ok(_) => Ok(()),\n\n Err(e) =>\n\n Err(format!(\"{}{}\",\n\n match e.kind() {\n\n ErrorKind::NotFound =>\n\n \"File not found: \",\n\n ErrorKind::PermissionDenied =>\n\n \"No permission to open: \",\n\n _ => e.description(),\n\n },\n\n v)\n\n )\n\n }\n\n}\n", "file_path": "lang-interp/src/main.rs", "rank": 14, "score": 76861.38766725619 }, { "content": "fn main() {\n\n let limit = 28123;\n\n\n\n let abundant_numbers: Vec<usize> = (1usize..limit).filter(|v| v.is_abundant()).collect();\n\n println!(\"Got abundant numbers\");\n\n println!(\"All abundant numbers: {:?}\", abundant_numbers);\n\n //println!(\"{:?}\", abundant_numbers);\n\n // Find sum of all integers that cannot be written as the sum of two abundant_numbers.\n\n // Sum of the two numbers has to be under under our 'limit'\n\n \n\n let mut sums = BTreeSet::new();\n\n\n\n 'outer: for x in abundant_numbers.iter().rev() {\n\n for y in &abundant_numbers {\n\n if x+y > limit {\n\n continue 'outer;\n\n }\n\n sums.insert(x+y);\n\n }\n\n }\n\n println!(\"Calculated sums\");\n\n println!(\"all numbers that can be expressed as the sum of two abundant numbers under 28123: {:?}\", sums);\n\n \n\n let unchecked = (1usize..limit).collect::<BTreeSet<_>>();\n\n let all = unchecked.difference(&sums).collect::<Vec<_>>();\n\n println!(\"numbers that can't: {:?}\", all);\n\n println!(\"{}\", all.into_iter().fold(0, ::std::ops::Add::add))\n\n}\n\n\n", "file_path": "non-abundant-sums/src/main.rs", "rank": 15, "score": 73686.47619587548 }, { "content": "pub trait Divisors {\n\n //fn divisors_buffed(&self, buf: &mut [u64]) -> usize;\n\n fn divisors(&self) -> Vec<usize>;\n\n fn divisors_sum(&self) -> usize;\n\n}\n\n\n\n\n", "file_path": "non-abundant-sums/src/main.rs", "rank": 16, "score": 71241.31556953867 }, { "content": "pub trait Aliquot {\n\n fn aliquot_sum(&self) -> AliquotSum;\n\n \n\n fn is_perfect(&self) -> bool {\n\n match self.aliquot_sum() {\n\n AliquotSum::Perfect => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n fn is_deficient(&self) -> bool {\n\n match self.aliquot_sum() {\n\n AliquotSum::Deficient(_) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n fn is_abundant(&self) -> bool {\n\n match self.aliquot_sum() {\n\n AliquotSum::Abundant(_) => true,\n", "file_path": "non-abundant-sums/src/main.rs", "rank": 17, "score": 71241.31556953867 }, { "content": "#![no_main]\n\n#[macro_use] extern crate libfuzzer_sys;\n\nextern crate roman_numeral;\n\n\n\nuse std::str::{self, FromStr};\n\n\n\nfuzz_target!(|data: &[u8]| {\n\n // fuzzed code goes here\n\n if let Ok(s) = str::from_utf8(data) {\n\n if let Ok(mut num) = roman_numeral::Roman::from_str(s) {\n\n println!(\"{:?}\", num._inner());\n\n num.as_usize().unwrap();\n\n num.as_string().unwrap();\n\n assert_ne!(num._inner().len(), 0);\n\n num.clone().condense().unwrap();\n\n num.expand().unwrap();\n\n }\n\n }\n\n});\n", "file_path": "roman-numeral/roman-numeral/fuzz/fuzzers/fuzzer_script_1.rs", "rank": 36, "score": 65096.53167926503 }, { "content": "fn validate_integer(s: String) -> Result<(), String> {\n\n match s.parse::<usize>() {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(format!(\"{}\", e))\n\n }\n\n}\n\n\n", "file_path": "lucky_numbers/src/main.rs", "rank": 37, "score": 60790.24218875258 }, { "content": "struct Environment{\n\n a: usize,\n\n b: usize,\n\n s: usize,\n\n m: usize,\n\n n: usize,\n\n}\n\n\n\nconst PI: f32 = std::f32::consts::PI;\n\nimpl Environment {\n\n fn new(v: Vec<usize>) -> Environment {\n\n Environment {\n\n a: v[0],\n\n b: v[1],\n\n s: v[2],\n\n m: v[3],\n\n n: v[4],\n\n }\n\n }\n\n fn calculate(self) -> String {\n\n\n\n let width = self.a * self.m;\n\n let height = self.b * self.n;\n\n let dist = (width.pow(2) as f32 + height.pow(2) as f32).sqrt();\n\n let angle = 180.0/PI * (height as f32).atan2(width as f32);\n\n let vel = dist as f32 / self.s as f32;\n\n format!(\"{:.2} {:.2}\", angle, vel)\n\n }\n\n}\n\n\n", "file_path": "billiard/src/main.rs", "rank": 38, "score": 53061.0397376114 }, { "content": "fn main() {\n\n while let Some(env) = read_numbers() {\n\n println!(\"{}\", env.calculate())\n\n }\n\n}\n", "file_path": "billiard/src/main.rs", "rank": 39, "score": 49720.079514303805 }, { "content": "fn main() {\n\n let matches = App::new(\"Collatz Sequence Generator\")\n\n //.title(\"Collatz Sequence Generator\")\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(\"Collatz discovered this in 1937!\")\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .setting(AppSettings::UnifiedHelpMessage)\n\n .arg(Arg::with_name(\"enable-sieve\")\n\n .short(\"e\")\n\n .long(\"enable-sieve\")\n\n .help(\"Tells the program to enable the sieve.{n}\\\n\n Ignored with `get` as it doesn't need it.\")\n\n )\n\n .arg(Arg::with_name(\"verbose\")\n\n .short(\"v\")\n\n .help(\"Enables full output.{n}\\\n\n Use with `--do-twos` for more complete output.\")\n\n )\n\n .arg(Arg::with_name(\"do-twos\")\n", "file_path": "collatz/src/main.rs", "rank": 40, "score": 49720.079514303805 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "multiplier_of/src/main.rs", "rank": 41, "score": 49720.079514303805 }, { "content": "fn main() {\n\n let std = io::stdout();\n\n let handle = std.lock();\n\n let mut ack = AckermannGen::new(4,3, handle);\n\n let ack_res = ack.run();\n\n println!(\"(4,3): {:?}\", ack_res);\n\n println!(\"(4,3): {:?}\", ack.total);\n\n\n\n}\n\n\n", "file_path": "ackermann_rust/src/main.rs", "rank": 42, "score": 48505.67482584214 }, { "content": "fn main() {\n\n let limit: usize = ::std::env::args().nth(1).expect(\"Please provide a number for the limit\").parse::<f64>().unwrap() as usize;\n\n let mut n = 0;\n\n for i in 11..limit {\n\n if i % 10 == 0 {\n\n continue;\n\n }\n\n if (i + i.reverse().unwrap()).is_odd_digits().unwrap() {\n\n //print!(\"{},\", i);\n\n n += 1;\n\n }\n\n }\n\n //if !n.is_multiple_of(&2) {\n\n // panic!(\"Error, n can't be uneven\");\n\n //}\n\n println!(\"{}\", n);\n\n}\n\n\n", "file_path": "reversible-numbers/src/main.rs", "rank": 43, "score": 48505.67482584214 }, { "content": "fn main() {\n\n let matches = App::new(\"Language interpreter/guesser\")\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(\"Tries to guess what language your text is written in.\")\n\n .subcommand(SubCommand::with_name(\"learn\")\n\n .about(\"Teach the program about a new language\")\n\n .arg(Arg::with_name(\"language\")\n\n .required(true))\n\n .arg(Arg::with_name(\"file\")\n\n //.min_values(1)\n\n .validator(validate_txt)\n\n .required(true))\n\n )\n\n .subcommand(SubCommand::with_name(\"guess\")\n\n .about(\"The program will try to guess the language.\")\n\n .arg(Arg::with_name(\"file\")\n\n .validator(validate_txt)\n\n .required(true))\n\n )\n", "file_path": "lang-interp/src/main.rs", "rank": 44, "score": 48505.67482584214 }, { "content": "fn main() {\n\n let matches = App::new(\"Lucky integer\")\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(\"Computes nearest lucky integer\")\n\n .arg(Arg::with_name(\"near\")\n\n .required(true)\n\n .validator(validate_integer)\n\n .takes_value(true))\n\n .get_matches();\n\n let mut ln = LuckyIter::new(matches.value_of(\"near\").unwrap().parse().unwrap());\n\n println!();\n\n for n in ln {\n\n println!(\"{}\", n);\n\n }\n\n}\n\n\n\npub struct LuckyIter {\n\n list: Vec<usize>,\n\n current: usize,\n", "file_path": "lucky_numbers/src/main.rs", "rank": 45, "score": 48505.67482584214 }, { "content": "struct NameList<'a> {\n\n pub sorted: Vec<Name<'a>>,\n\n}\n\n\n\nimpl<'a> NameList<'a> {\n\n /// Takes a comma separated file with qouted names\n\n pub fn new(source: &'a str) -> Result<NameList<'a>> {\n\n lazy_static! {\n\n static ref RE: Regex = Regex::new(r#\"\"(.*?)\"\"#).unwrap(); // Cannot fail\n\n }\n\n let mut vec: Vec<_> = RE.captures_iter(source).map(|ca| Name(ca.get(1).ok_or(\"No match for a name found\").unwrap().as_str())).collect(); // handle error\n\n vec.sort_unstable();\n\n Ok(NameList {\n\n sorted: vec,\n\n })\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for NameList<'a> {\n\n type Item = Name<'a>;\n\n type IntoIter = ::std::vec::IntoIter<Self::Item>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.sorted.into_iter()\n\n }\n\n}\n\n\n", "file_path": "namenames_score/src/main.rs", "rank": 46, "score": 47927.02864740477 }, { "content": "fn open_file<T: AsRef<Path>>(file_path: T) -> Result<String, IoErr> {\n\n let mut buf = String::new();\n\n File::open(file_path.as_ref())?.read_to_string(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "lang-interp/src/main.rs", "rank": 47, "score": 47674.09396140948 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "morse/morse-playback/src/main.rs", "rank": 48, "score": 47392.10978020514 }, { "content": "pub trait Garland {\n\n /// Gives the \"garland\" degree.\n\n fn garland(&self) -> usize;\n\n}\n\n\n\nimpl<'a> Garland for &'a str {\n\n fn garland(&self) -> usize {\n\n let mut degree = 0;\n\n let chars: Vec<_> = self.graphemes(true).collect();\n\n let len = chars.len();\n\n for i in 0..len {\n\n if chars[0..i] == chars[len-i..len] {\n\n degree = i;\n\n }\n\n }\n\n degree\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "garland_words/src/lib.rs", "rank": 49, "score": 47040.82275071519 }, { "content": "pub trait Morsify {\n\n type Error;\n\n fn to_morse<M>(&self, dict: M, buf: &mut [MorseCode]) -> Result<usize, Self::Error>\n\n where\n\n M: MorseDict;\n\n fn to_morse_standard(&self, buf: &mut [MorseCode]) -> Result<usize, Self::Error> {\n\n Morsify::to_morse(self, MorseStandard, buf)\n\n }\n\n #[cfg(feature = \"std\")]\n\n fn to_morse_string<M>(&self, dict: M) -> Result<String, Self::Error>\n\n where\n\n M: MorseDict,\n\n {\n\n //TODO: Make this much better memory wise.\n\n let size = 100 * 10; // Generally we'd need 8 places for each char, but sometimes it's even more.\n\n let mut buf = vec![MorseCode::Dot; size];\n\n let mut char_buf = [0; 2];\n\n let mut string = String::new();\n\n let end = Morsify::to_morse(self, dict, &mut buf)?;\n\n\n", "file_path": "morse/morse/src/lib.rs", "rank": 50, "score": 47040.82275071519 }, { "content": "pub trait MorseDict {\n\n type Error;\n\n fn char_to_morse(&self, c: &char, buf: &mut [MorseCode]) -> Result<usize, Self::Error>;\n\n fn morse_to_char(&self, morse: &[MorseCode]) -> Result<char, Self::Error>;\n\n}\n\n\n\nstatic STANDARD_LOOKUP: &'static str = r##\"ETIANMSURWDKGOHVFÜLÄPJBXCYZQÖĤ54Ŝ3É Ð2 È+ ÞÀĴ16=/ Ç Ĥ 7 ĜÑ8 90\n\n ?_ \\\" . @ ' - ;! ( , :\"##;\n\npub struct MorseStandard;\n\n\n\nimpl MorseDict for MorseStandard {\n\n type Error = ();\n\n fn char_to_morse(&self, c: &char, buf: &mut [MorseCode]) -> Result<usize, Self::Error> {\n\n use self::MorseCode::*;\n\n use self::MorseCode::IntraGap as G;\n\n use self::MorseCode::LetterGap as L;\n\n if c == &' ' {\n\n return Err(());\n\n }\n\n debug_assert!(buf.len() >= 6 * 2); // The depth of the lookup table is 6. times 2 since we include the gaps.\n", "file_path": "morse/morse/src/lib.rs", "rank": 51, "score": 45995.12401803234 }, { "content": "fn run() -> Result<()> {\n\n let path = std::env::args().nth(1).ok_or(\"Please provide a file path\")?;\n\n let mut string = String::new();\n\n File::open(&path).chain_err(|| format!(\"while opening file '{}'\", &path))?.read_to_string(&mut string)?;\n\n let names = NameList::new(&string)?;\n\n let mut sum = 0;\n\n for (i, name) in names.into_iter().enumerate() {\n\n sum += (i+1)*name.value();\n\n }\n\n println!(\"{}\", sum);\n\n Ok(())\n\n}\n\n\n", "file_path": "namenames_score/src/main.rs", "rank": 52, "score": 44938.7814434323 }, { "content": "fn run() -> Result<()> {\n\n use std::fs::File;\n\n use std::io::Read;\n\n\n\n let words = vec![\"one\", \"two\", \"three\", \"four\", \"five\", \"six\", \"seven\"];\n\n //let mut buf = String::new();\n\n //File::open(\"words.txt\")?.read_to_string(&mut buf)?;\n\n //let words = buf.split_whitespace().collect::<Vec<_>>();\n\n let mut hist: Histogram<char> = Histogram::new();\n\n let mut repeats: Histogram<Letter> = Histogram::new();\n\n\n\n for word in words {\n\n hist.include_word(word).chain_err(|| format!(\"While proccessing \\\"{}\\\"\", word))?;\n\n repeats.include_repeats(word).unwrap();\n\n }\n\n // Make dies/blocks.\n\n println!(\"histogram: {:?}\\nrepeats: {:?}\", hist, repeats);\n\n println!(\"histogram max: {:?}\", hist.get_maximum());\n\n println!(\"repeats:\\n 1 {:?}\\n 2 {:?}\\n 3 {:?}\\n 4 {:?}\", repeats.get_maximum_of_grade(1), repeats.get_maximum_of_grade(2), repeats.get_maximum_of_grade(3), repeats.get_maximum_of_grade(4));\n\n println!(\"order: \\n 1 {:?}\\n 2 {:?}\\n 3 {:?}\\n 4 {:?}\", repeats.get_grade(1), repeats.get_grade(2),repeats.get_grade(3),repeats.get_grade(4));\n\n\n\n\n\n println!(\"result: {:?}\", BlockBuilder::new(hist, repeats).generate()); \n\n Ok(())\n\n}\n", "file_path": "dice_spell/src/main.rs", "rank": 53, "score": 44938.7814434323 }, { "content": "struct AckermannGen<T: Write> {\n\n handle: T,\n\n pub total: Rc<u64>,\n\n args: (u64, u64),\n\n}\n\n\n\nimpl<T: Write> AckermannGen<T> {\n\n fn new(m: u64, n: u64, write: T) -> AckermannGen<T> {\n\n AckermannGen {\n\n handle: write,\n\n total: 0.into(),\n\n args: (m, n),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn _ackermann(&mut self, m: &u64, n: &u64) -> Result<u64, ()> {\n\n // Do stuff\n\n match (m, n) {\n\n (m, n) if m == &0 => Ok(*n+1),\n", "file_path": "ackermann_rust/src/main.rs", "rank": 54, "score": 43716.90881359682 }, { "content": "pub trait IntegerExt: Integer {\n\n fn reverse(&self) -> Result<Self, &'static str>;\n\n fn push_back(&mut self, other: &Self) -> Result<(), &'static str>;\n\n fn is_odd_digits(&self) -> Result<bool, &'static str>;\n\n}\n\n\n\nimpl<T> IntegerExt for T where T: Integer + NumCast + CheckedMul + Copy {\n\n fn reverse(&self) -> Result<T, &'static str> {\n\n let mut i: usize = NumCast::from(*self).ok_or(\"Number is to large for usize\")?;\n\n let mut res;\n\n\n\n {\n\n let (qu, rem) = i.div_rem(&10);\n\n i = qu;\n\n res = rem;\n\n }\n\n //let mut place = 10;\n\n loop {\n\n match i.div_rem(&10) {\n\n (0, rem) => {\n", "file_path": "reversible-numbers/src/main.rs", "rank": 55, "score": 42914.65096389589 }, { "content": "fn read_numbers() -> Option<Environment> {\n\n let mut line = String::new();\n\n if let Ok(_) = io::stdin().read_line(&mut line) {\n\n let v: Vec<usize> = line.split_whitespace()\n\n .map(|n| n.parse::<usize>().unwrap())\n\n .collect();\n\n if v.iter().all(|v| *v==0) {\n\n return None;\n\n }\n\n return Some(Environment::new(v));\n\n }\n\n None\n\n}\n\n\n", "file_path": "billiard/src/main.rs", "rank": 56, "score": 41959.37261177586 }, { "content": "fn gen_collatz_bound_with_sieve(bound: Range<u64>) {\n\n let mut sieve = CollatzSieve::new(); \n\n for i in bound {\n\n let mut coll = Collatz::with_sieve(i, &mut sieve);\n\n while let Some(_) = coll.next() {\n\n // Stuff\n\n }\n\n }\n\n}\n\n\n", "file_path": "collatz/benches/bench.rs", "rank": 57, "score": 37728.916460682434 }, { "content": "fn gen_collatz_bound_without_sieve(bound: Range<u64>) {\n\n for i in bound {\n\n let mut coll = Collatz::new(i);\n\n while let Some(_) = coll.next() {\n\n // stuff\n\n }\n\n }\n\n}\n\n\n", "file_path": "collatz/benches/bench.rs", "rank": 58, "score": 36970.94271979995 }, { "content": "#![feature(test)]\n\n\n\nextern crate test;\n\nextern crate slow_primes;\n\n\n", "file_path": "rust_prime/benches/extern.rs", "rank": 59, "score": 28949.84670802918 }, { "content": "error_chain!{\n\n foreign_links {\n\n Io(::std::io::Error);\n\n }\n\n}\n", "file_path": "dice_spell/src/errors.rs", "rank": 60, "score": 28912.30744644729 }, { "content": " _ => false,\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Debug)]\n\npub enum AliquotSum {\n\n Perfect,\n\n Deficient(usize),\n\n Abundant(usize),\n\n}\n\n\n\n//impl<T> Aliquot for T where T: Divisors {\n\nimpl Aliquot for usize {\n\n fn aliquot_sum(&self) -> AliquotSum {\n\n let sum = self.divisors_sum();\n\n match sum {\n\n _ if sum == *self => AliquotSum::Perfect,\n\n _ if sum < *self => AliquotSum::Deficient(sum),\n\n _ if sum > *self => AliquotSum::Abundant(sum),\n", "file_path": "non-abundant-sums/src/main.rs", "rank": 61, "score": 27450.17928518546 }, { "content": "\n\nuse num::complex::Complex64;\n\n\n\nuse std::fmt;\n\n\n\n/// Where is the complex number.\n\npub enum ComplexPlace {\n\n /// Max iterations reached, may not diverge.\n\n Max,\n\n // Period,\n\n /// Inside Cardioid, does not diverge.\n\n Cardioid,\n\n /// Inside Bulb, does not diverge.\n\n Bulb,\n\n /// Escaped after n iteration, diverges.\n\n Escapes(Complex64, usize),\n\n}\n\n\n\nimpl fmt::Debug for ComplexPlace {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 62, "score": 27448.45223411837 }, { "content": " _ => unreachable!()\n\n }\n\n }\n\n}\n\nimpl Divisors for usize {\n\n fn divisors(&self) -> Vec<usize> {\n\n if *self == 0 {\n\n return vec![];\n\n }\n\n let mut vec = vec![1];\n\n // TODO: Can we optimise this for even numbers\n\n // TODO: Use some kind of seive if number is lare enough\n\n // TODO: Use suggestions from https://stackoverflow.com/q/3545259/4284367\n\n // FIXME: Use sqrt\n\n for i in 2..(*self/2 + 1) {\n\n if self % i == 0 {\n\n vec.push(i);\n\n }\n\n }\n\n vec\n", "file_path": "non-abundant-sums/src/main.rs", "rank": 63, "score": 27446.8009593465 }, { "content": "/// EULER 23\n\n/// https://projecteuler.net/problem=23\n\n///\n\n/// Find the sum of all the positive integers which cannot be written as the sum of two abundant\n\n/// numbers.\n\n///\n\n/// TODO: \n\n/// * Make into library, could be useful.\n\n/// * Implement Aliquot for all positive integer types. One thing to notice though is if the\n\n/// value of an abundant number can overflow what is holding it. Actually I don't think it can.\n\n/// Maybe the values inside the enum should be the offset.\n\n/// * Add 'num' crate for traits.\n\n/// \n\nuse std::collections::BTreeSet;\n", "file_path": "non-abundant-sums/src/main.rs", "rank": 64, "score": 27445.052227993587 }, { "content": " }\n\n\n\n fn divisors_sum(&self) -> usize {\n\n if *self == 0 {\n\n return 0;\n\n }\n\n let mut sum = 1;\n\n // TODO: Can we optimise this for even numbers\n\n // TODO: Use some kind of seive if number is lare enough\n\n // TODO: Use suggestions from https://stackoverflow.com/q/3545259/4284367\n\n // FIXME: Use sqrt\n\n for i in 2..(*self/2 + 1) {\n\n if self % i == 0 {\n\n sum += i;\n\n }\n\n }\n\n sum\n\n\n\n }\n\n} \n", "file_path": "non-abundant-sums/src/main.rs", "rank": 65, "score": 27444.50620948641 }, { "content": " pub fn compute_iterations(&self, c: Complex64) -> ComplexPlace {\n\n let mut z = c.clone();\n\n let mut iter = 0;\n\n //let mut c = c.clone();\n\n if MandelbrotGenerator::is_in_p2_bulb(&c) {\n\n return ComplexPlace::Bulb;\n\n } else if MandelbrotGenerator::is_in_cardioid(&c) {\n\n return ComplexPlace::Cardioid;\n\n }\n\n while z.norm_sqr() <= self.escape {\n\n if iter > self.max_iterations {\n\n return ComplexPlace::Max;\n\n }\n\n z = z.powf(2.0) + c;\n\n iter += 1;\n\n }\n\n ComplexPlace::Escapes(z, iter)\n\n }\n\n}\n\n\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 66, "score": 27444.4388269547 }, { "content": " } \n\n}\n\n\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use num::complex::Complex64;\n\n #[test]\n\n fn basic_test() {\n\n let mut gen = MandelbrotGenerator::default();\n\n gen.max_iterations = 21;\n\n println!();\n\n gen.set_image_space(200, 100);\n\n //gen.set_image_space([0, 200], [0,100]);\n\n let (width, height) = gen.get_image_space();\n\n //let (width, height) = (200,100);\n\n for y in 0..height {\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 67, "score": 27443.33478978292 }, { "content": "/// ### Sources:\n\n/// * [1]: http://stackoverflow.com/questions/8381675/how-to-perform-simple-zoom-into-mandelbrot-set\n\n/// * [2]: https://en.wikibooks.org/wiki/Fractals/Iterations_in_the_complex_plane/Mandelbrot_set\n\n/// \n\npub struct MandelbrotGenerator {\n\n c_max: Complex64,\n\n c_min: Complex64,\n\n width: usize,\n\n height: usize,\n\n re_factor: f64,\n\n im_factor: f64,\n\n origin: Complex64,\n\n pub escape: f64,\n\n pub max_iterations: usize,\n\n}\n\n\n\nimpl MandelbrotGenerator {\n\n /// returns width, height\n\n pub fn get_image_space(&self) -> (usize, usize) {\n\n (self.width, self.height)\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 68, "score": 27443.23157630935 }, { "content": " use self::ComplexPlace::*;\n\n let val = match *self {\n\n Max => \"M\".into(),\n\n Cardioid => \"C\".into(),\n\n Bulb => \"B\".into(),\n\n Escapes(_, iter) => {\n\n if let Some(ref prec) = f.precision() {\n\n format!(\"{}\", \n\n (iter - 0) * (9 - 0)/ (prec - 0 ) + 0\n\n )\n\n } else {\n\n panic!(\"No prec specified\")\n\n }\n\n },\n\n };\n\n write!(f, \"{}\", val)\n\n }\n\n}\n\n\n\n// complex/image space = [-2,1]\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 69, "score": 27442.91520105496 }, { "content": "/// Mandelbrot\n\n/// \n\n/// Formula\n\n/// \n\n/// x_(k+1) = x^(2)_k - y^(2)_k + Re(c)\n\n/// \n\n/// y_(k+1) = 2 x_k * y_k + Im(c)\n\n/// \n\n/// Iterate until x*x + y*y > 2 or max iter has been reached\n\n/// \n\n/// Note that mandelbrot is symmetric on x plane, i.e height/2 is only needed.\n\n/// This means that \n\n/// ```math\n\n/// color(x,y) = color(x,y)\n\n/// ```\n\n/// \n\n/// # TODO:\n\n/// * impl perturbation (see [2]¤Perturbation_Theory)\n\n/// * use num::rational\n\n/// \n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 70, "score": 27442.85974455012 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn is_perfect() {\n\n println!(\"{:?}\", 28usize.divisors());\n\n assert!(28usize.is_perfect());\n\n }\n\n\n\n #[test]\n\n fn prime_is_deficient() {\n\n println!(\"{:?}\", 17usize.divisors());\n\n assert!(17usize.is_deficient());\n\n }\n\n #[test]\n\n fn is_abundant() {\n\n println!(\"{:?}\", 12usize.divisors());\n\n assert!(12usize.is_abundant());\n\n }\n\n\n\n}\n", "file_path": "non-abundant-sums/src/main.rs", "rank": 71, "score": 27441.303229538527 }, { "content": " pub fn y_to_im(&self, y: usize) -> f64 {\n\n self.c_min.im + y as f64 *self.im_factor + self.origin.im\n\n // ((y-self.y_space[0])/(self.y_space[1] - self.y_space[0]) * self.c_max.im - self.c_min.im) + self.c_min.im\n\n }\n\n\n\n pub fn x_to_re(&self, x: usize) -> f64 {\n\n self.c_min.re + x as f64 *self.re_factor + self.origin.re\n\n // ((x-self.x_space[0])/(self.x_space[1] - self.x_space[0]) * self.c_max.re - self.c_min.re) + self.c_min.re\n\n }\n\n \n\n pub fn is_in_p2_bulb(c: &Complex64) -> bool {\n\n (c.re + 1.0).powf(2.0) + c.im.powf(2.0) < 1.0/16.0 \n\n }\n\n\n\n pub fn is_in_cardioid(c: &Complex64) -> bool {\n\n let q = (c.re - 1.0/4.0).powf(2.0) + c.im.powf(2.0);\n\n q * (q + (c.re -1.0/4.0)) < 1.0/4.0 * c.im.powf(2.0) \n\n }\n\n\n\n /// Returns iterations of complex number\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 72, "score": 27441.27898907082 }, { "content": "impl Default for MandelbrotGenerator {\n\n fn default() -> Self {\n\n // Works like this\n\n let c_max = Complex64::new(0.5, 1.25); // (lrightx, ulefty )\n\n let c_min = Complex64::new(-2.0, -1.25); // (uleftx, lrighty)\n\n let width = 800;\n\n let height = 800;\n\n let re_factor = (c_max.re- c_min.re) / (width-1) as f64;\n\n let im_factor = (c_max.im- c_min.im) / (height-1) as f64;\n\n MandelbrotGenerator {\n\n c_max: c_max,\n\n c_min: c_min,\n\n width: width,\n\n height: height,\n\n origin: Complex64::new(0.,0.),\n\n re_factor: re_factor,\n\n im_factor: im_factor,\n\n escape: 4.0,\n\n max_iterations: 100,\n\n }\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 73, "score": 27440.54243561374 }, { "content": " let c_im = gen.y_to_im(y);\n\n for x in 0..width{\n\n let c_re = gen.x_to_re(x);\n\n let c = Complex64::new(c_re, c_im);\n\n let iterations = gen.compute_iterations(c);\n\n print!(\"{:.100?}\", iterations);\n\n }\n\n println!();\n\n }\n\n }\n\n}\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 74, "score": 27439.49407906207 }, { "content": " }\n\n\n\n ///\n\n ///\n\n ///# TODO\n\n pub fn set_image_space(&mut self, width: usize, height: usize) {\n\n self.width = width;\n\n self.height = height;\n\n self.re_factor = (self.c_max.re- self.c_min.re) / (width-1) as f64;\n\n self.im_factor = (self.c_max.im- self.c_min.im) / (height-1) as f64;\n\n //self.im_factor = (self.c_max.im- self.c_min.im) / (y_space[1]- y_space[0]-1) as f64;\n\n }\n\n pub fn zoom(&mut self, zoom_factor: f64, origin: Complex64) {\n\n self.origin = origin;\n\n self.c_max = self.c_max.unscale(zoom_factor);\n\n self.c_min = self.c_min.unscale(zoom_factor);\n\n self.re_factor = (self.c_max.re- self.c_min.re) / (self.width-1) as f64;\n\n self.im_factor = (self.c_max.im- self.c_min.im) / (self.height-1) as f64;\n\n\n\n }\n", "file_path": "mandelbrot/src/mandelbrot_gen/mod.rs", "rank": 75, "score": 27439.192250090946 }, { "content": "#[macro_use]\n\nextern crate proc_macro_hack;\n\nextern crate syn;\n\nextern crate quote;\n\nextern crate inflector;\n\n\n\n\n\nproc_macro_item_impl! {\n\n pub fn gen_elements(input: &str) -> String {\n\n let ast = syn::parse_\n\n }\n\n}\n", "file_path": "h2o/h2o_proc_impl/src/lib.rs", "rank": 76, "score": 26588.001948193545 }, { "content": "def sum_divisors(n):\n\n \"\"\"The sum of all proper divisors of n.\n\n \"\"\"\n\n if n == 1:\n\n return 1\n\n factors = prime_factors(n)\n\n count = Counter(factors)\n\n uniques = list(count.keys())\n\n denominator = reduce(mul, [n-1 for n in uniques])\n\n numerator = reduce(mul, [(n**(i+1))-1 for n, i in count.items()])\n\n result = (numerator/denominator) - n\n\n assert result.is_integer()\n", "file_path": "primes/primerelated.py", "rank": 77, "score": 25249.68943311545 }, { "content": "}\n\n\n\nimpl<T> Histogram<T> \n\n where T: std::cmp::Eq +\n\n std::hash::Hash +\n\n std::fmt::Debug\n\n{\n\n pub fn new() -> Histogram<T> {\n\n Histogram {\n\n map: HashMap::new()\n\n }\n\n }\n\n /// Returns the most occurring character(s).\n\n pub fn get_maximum(&self) -> Option<(Vec<&T>, u32)> {\n\n if self.map.is_empty() {\n\n return None\n\n }\n\n let mut max_vec: Vec<&T> = vec![];\n\n let mut max = 0;\n\n for (key, val) in self.map.iter() {\n", "file_path": "dice_spell/src/main.rs", "rank": 78, "score": 20.661375064584142 }, { "content": "extern crate num;\n\nextern crate image;\n\n\n\nmod mandelbrot_gen;\n\n\n\npub use mandelbrot_gen::{ComplexPlace, MandelbrotGenerator};\n\n\n\npub mod image_utils {\n\n use image;\n\n use num::complex::Complex64;\n\n use super::mandelbrot_gen::{MandelbrotGenerator, ComplexPlace};\n\n use image::ImageBuffer;\n\n pub fn generate_rgb(gen: &MandelbrotGenerator, x: u32,y: u32) -> image::Rgb<u8> {\n\n let c_re = gen.x_to_re(x as usize);\n\n let c_im = gen.y_to_im(y as usize);\n\n let c = Complex64::new(c_re, c_im);\n\n use self::ComplexPlace::*;\n\n match gen.compute_iterations(c) {\n\n Escapes(z,i) => {\n\n image::Rgb( match i % 16 {\n", "file_path": "mandelbrot/src/lib.rs", "rank": 79, "score": 20.6590963968606 }, { "content": " faces: Vec<char>,\n\n n: usize,\n\n}\n\n\n\nimpl Block {\n\n pub fn new(n: usize) -> Block {\n\n Block {\n\n faces: Vec::with_capacity(n),\n\n n,\n\n }\n\n }\n\n\n\n pub fn single(ch: char) -> Block {\n\n let mut block = Block::new(1);\n\n block.push(ch).expect(\"Cannot happen\");\n\n block\n\n }\n\n \n\n pub fn push(&mut self, ch: char) -> Result<()> {\n\n if self.faces.len() < self.n {\n", "file_path": "dice_spell/src/main.rs", "rank": 80, "score": 18.89417028505407 }, { "content": " pub fn as_ranking(&self) -> Ranking<Data> {\n\n let mut stack = BTreeMap::new();\n\n\n\n for (ch, count) in self.source.iter() {\n\n stack.insert(ch.clone(), (*count as f64 /self.total_entries as f64) * 100.0);\n\n }\n\n Ranking(stack)\n\n }\n\n\n\n pub fn append(&mut self, other: NGram<Data>) {\n\n for (ch, count) in other.source.iter() {\n\n *self.source.entry(ch.clone()).or_insert(1) += count.clone();\n\n }\n\n self.total_entries += other.total_entries;\n\n }\n\n \n\n pub fn cos_simularity(&self, other: &NGram<Data>) -> Option<f64> {\n\n let mut A = Vec::new();\n\n let mut B = Vec::new();\n\n for (cA, countA) in self.source.iter() {\n", "file_path": "lang-interp/src/ngram.rs", "rank": 81, "score": 18.625255837492823 }, { "content": "#![feature(question_mark)]\n\n//! Binary\n\n//!\n\n\n\nmod find_lang;\n\n\n\n#[macro_use]\n\nextern crate clap;\n\nuse clap::{App, Arg, SubCommand};\n\n\n\npub use lang_interp::{ToNGram, Language};\n\nextern crate lang_interp;\n\n\n\nuse std::fs::File;\n\nuse std::path::Path;\n\nuse std::error::Error;\n\nuse std::io::{ErrorKind, Read, Error as IoErr};\n\nuse std::collections::BTreeMap;\n\n\n", "file_path": "lang-interp/src/main.rs", "rank": 82, "score": 17.704725913831595 }, { "content": "// TODO: See if nom is valid for this\n\n#[macro_use]\n\nextern crate h2o_macros;\n\n\n\nelements_enum_impl\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n }\n\n}\n", "file_path": "h2o/h20/src/lib.rs", "rank": 83, "score": 17.62377766515522 }, { "content": " } else {\n\n unimplemented!()\n\n }\n\n }\n\n}\n\n\n\n// Not used currently\n\nimpl<T> ToNGram<T> for Vec<T> where T: Ord + Clone {\n\n fn to_ngram(self, n: u8) -> NGram<T> {\n\n unimplemented!();\n\n let mut count = BTreeMap::new();\n\n let mut entries = 0;\n\n for obj in self {\n\n entries += 1;\n\n *count.entry(obj).or_insert(0) += 1;\n\n }\n\n NGram::new(count, entries, n)\n\n }\n\n}\n", "file_path": "lang-interp/src/ngram.rs", "rank": 84, "score": 17.49935700261515 }, { "content": "\n\n\n\nimpl Morsify for str {\n\n type Error = ();\n\n fn to_morse<M>(&self, dict: M, buf: &mut [MorseCode]) -> Result<usize, Self::Error>\n\n where\n\n M: MorseDict,\n\n {\n\n let mut ind = 0;\n\n let mut inner_buffer = [MorseCode::Dot; 20];\n\n for ch in self.chars() {\n\n if ch == ' ' {\n\n if let Some(mut prev) = buf.get_mut(ind - 1) {\n\n if *prev == MorseCode::LetterGap {\n\n *prev = MorseCode::WordGap;\n\n continue;\n\n }\n\n }\n\n buf[ind] = MorseCode::WordGap;\n\n ind += 1;\n", "file_path": "morse/morse/src/lib.rs", "rank": 85, "score": 17.335651299216146 }, { "content": " Ok(buf_ind + 1)\n\n\n\n }\n\n\n\n fn morse_to_char(&self, morse: &[MorseCode]) -> Result<char, Self::Error> {\n\n let mut ind = 1usize;\n\n let mut iter = morse.iter();\n\n while let Some(code) = iter.next() {\n\n match *code {\n\n MorseCode::Dot => {\n\n ind = 2 * ind;\n\n }\n\n MorseCode::Dash => {\n\n ind = (ind * 2) + 1;\n\n }\n\n _ => return Err(()),\n\n }\n\n let next = iter.next();\n\n match *next.unwrap_or(&MorseCode::LetterGap) {\n\n MorseCode::Dot | MorseCode::Dash => return Err(()),\n", "file_path": "morse/morse/src/lib.rs", "rank": 86, "score": 16.90758463522444 }, { "content": " .max_values(1)\n\n .validator(validate_nums)\n\n )\n\n )\n\n .get_matches();\n\n \n\n let verbose = matches.is_present(\"verbose\");\n\n let do_twos = matches.is_present(\"do-twos\");\n\n match matches.subcommand() {\n\n (\"bound\", Some(sub_m)) => {\n\n let bound = parse_nums(sub_m.values_of(\"bound\").unwrap());\n\n println!(\"Got: {:?}\", bound);\n\n let range = if bound.len() == 2 {\n\n (bound[0]..(bound[1] + 1))\n\n } else {\n\n 1..(bound[0] + 1)\n\n };\n\n println!(\"Calculating collatz sequences between {:?}\", range);\n\n let mut sieve = CollatzSieve::new();\n\n let mut max = (1, 1); // Longest chain, length.\n", "file_path": "collatz/src/main.rs", "rank": 87, "score": 16.532743508258935 }, { "content": "\n\n pub fn get_grade(&self, n: usize) -> Option<Vec<(&Letter, u32)>> {\n\n if self.map.is_empty() || !self.map.keys().any(|f| f.repeat == n) {\n\n return None\n\n }\n\n\n\n let mut vec = vec![];\n\n\n\n for (key, val) in self.map.iter() {\n\n if key.repeat != n {\n\n continue;\n\n }\n\n vec.push((key, *val))\n\n }\n\n Some(vec)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Block {\n", "file_path": "dice_spell/src/main.rs", "rank": 88, "score": 16.483243506259434 }, { "content": "}\n\n\n\nimpl LuckyIter {\n\n fn new(max: usize) -> LuckyIter {\n\n LuckyIter {\n\n list: (1..max).collect(),\n\n current: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for LuckyIter {\n\n type Item = usize;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.current == 0 {\n\n let mut i = 0;\n\n self.list.drain_filter(|x| reduce(&2, &mut i, x));\n\n //println!(\"{:?}\", self.list);\n\n self.current = 1;\n\n Some(1)\n", "file_path": "lucky_numbers/src/main.rs", "rank": 89, "score": 16.1950777310133 }, { "content": " /// \n\n /// Make case agnostic\n\n pub fn is_base(&self, ch: char) -> bool {\n\n self.base == ch\n\n }\n\n\n\n pub fn new(base: char, repeat: usize) -> Letter {\n\n Letter {\n\n base,\n\n repeat,\n\n }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub struct Histogram<T>\n\n where T: std::cmp::Eq +\n\n std::hash::Hash + \n\n std::fmt::Debug\n\n{\n\n pub map: HashMap<T, u32>,\n", "file_path": "dice_spell/src/main.rs", "rank": 90, "score": 15.975469542339406 }, { "content": "//! EULER 22\n\n//! https://projecteuler.net/problem=22\n\n//!\n\n//! TODO:\n\n//! * Should my list store the names? Takes much memory\n\n#![feature(sort_unstable)]\n\nextern crate regex;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate error_chain;\n\n\n\nquick_main!(run);\n\n\n\npub mod errors {\n\n error_chain! {\n\n foreign_links {\n\n Io(::std::io::Error);\n\n } \n\n }\n\n}\n\n\n\nuse errors::*;\n\n\n\nuse regex::Regex;\n\n\n\nuse std::io::prelude::*;\n\nuse std::fs::File;\n", "file_path": "namenames_score/src/main.rs", "rank": 91, "score": 15.38925578805156 }, { "content": "#![feature(proc_macro)]\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde_yaml;\n\n\n\nmod ngram;\n\nmod language;\n\n\n\npub use ngram::{ToNGram, NGram};\n\npub use language::Language;\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n }\n\n}\n", "file_path": "lang-interp/src/lib.rs", "rank": 92, "score": 15.283544142244043 }, { "content": " .get_matches();\n\n\n\n match matches.subcommand() {\n\n (\"learn\", Some(sub_m)) => {\n\n let mut buf = open_file(sub_m.value_of(\"file\").unwrap()).unwrap();\n\n let mut ngrams = BTreeMap::new();\n\n ngrams.insert(1, buf.clone().to_ngram(1));\n\n ngrams.insert(2, buf.clone().to_ngram(2));\n\n {\n\n let lang = Language { \n\n language: sub_m.value_of(\"language\").unwrap().into(),\n\n ngrams: ngrams,\n\n };\n\n lang.write_lang(format!(\"{}.lang\", sub_m.value_of(\"language\").unwrap()));\n\n }\n\n },\n\n (\"guess\", Some(sub_m)) => {\n\n let mut buf = open_file(sub_m.value_of(\"file\").unwrap()).unwrap();\n\n \n\n let langs = find_lang::find_languages(format!(\n", "file_path": "lang-interp/src/main.rs", "rank": 93, "score": 15.19748438751596 }, { "content": " }\n\n Some(AB/(A2.sqrt() * B2.sqrt()))\n\n }\n\n pub fn cos_simularity_with_0(&self, other: NGram<Data>) -> f64 {\n\n unimplemented!()\n\n \n\n }\n\n}\n\n\n\nimpl<Data> fmt::Debug for NGram<Data> where Data: Ord + fmt::Debug + Clone {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:?}\", self.source)\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct Ranking<Data: Ord + Sized>(BTreeMap<Data, f64>);\n\n\n\nimpl<Data> Ranking<Data> where Data: Ord {\n\n pub fn similarity(&self, other: &Ranking<Data>) -> f64 {\n", "file_path": "lang-interp/src/ngram.rs", "rank": 94, "score": 15.158444313613987 }, { "content": "//! An implementation of Ar* Sieve\n\n//!\n\n//! # TODO\n\n//! * Implement a future structure for the iterator.\n\n//! * Parallelize (if possible)\n\nuse std::iter;\n\nuse std::collections::BTreeMap;\n\n// TODO: Use proper name.\n\n/// An iterator for primes.\n\npub type Prime = u64;\n\npub type Composite = u64;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ArSieve {\n\n pub sieve: Box<BTreeMap<Composite, Vec<Prime>>>,\n\n _current: u64,\n\n limit: u64,\n\n}\n\n\n\nimpl ArSieve {\n", "file_path": "rust_prime/src/naive_sieve.rs", "rank": 95, "score": 14.989073087814742 }, { "content": " pub sieve_data: BTreeMap<u64, u64>,\n\n /// Unimplemented.\n\n ///\n\n /// FIXME\n\n /// This will be used as a way of seeing how the `BTreeMap`s are called.\n\n /// This way we can probably find a better solution.\n\n _access_debug: BTreeMap<u64, u64>,\n\n}// TODO: Make generic over all Sized and Eq\n\n\n\nimpl CollatzSieve {\n\n /// Make a new sieve.\n\n pub fn new() -> CollatzSieve {\n\n CollatzSieve {\n\n sieve: BTreeMap::new(),\n\n sieve_data: BTreeMap::new(),\n\n _access_debug: BTreeMap::new(),\n\n }\n\n }\n\n /// Insert data about a certain value.\n\n ///\n", "file_path": "collatz/src/lib/lib.rs", "rank": 96, "score": 14.936692322855745 }, { "content": " continue;\n\n }\n\n let size = dict.char_to_morse(&ch, &mut inner_buffer).map_err(|e| ())?;\n\n buf[ind..ind + size].clone_from_slice(&inner_buffer[0..size]);\n\n ind += size;\n\n }\n\n Ok(ind)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn it_works() {\n\n use self::MorseCode::*;\n\n let example = \"AAAAA\";\n\n let mut buf = [MorseCode::Dot; 30];\n\n let end = example.to_morse_standard(&mut buf).unwrap();\n\n assert!(end == 20);\n", "file_path": "morse/morse/src/lib.rs", "rank": 97, "score": 14.9185837348513 }, { "content": "//! Daily programmer challenge #326\n\n//! Multifaceted alphabet blocks\n\n//! \n\n//! https://redd.it/6t0zua\n\n\n\n//! Histogram\n\n\n\n\n\nextern crate clap;\n\n#[macro_use]\n\nextern crate error_chain;\n\n\n\nmod errors;\n\n\n\nuse errors::*;\n\n\n\nuse std::fmt;\n\n\n\nuse std::collections::HashMap;\n\n\n", "file_path": "dice_spell/src/main.rs", "rank": 98, "score": 14.762207977461662 }, { "content": "//! Our binary for collatz\n\nextern crate collatz;\n\nuse collatz::{CollatzSieve, Collatz};\n\n#[macro_use]\n\nextern crate clap;\n\nuse clap::{App, AppSettings, Arg, SubCommand, Values};\n\n\n\nuse std::error::Error;\n\n\n", "file_path": "collatz/src/main.rs", "rank": 99, "score": 14.761783436919309 } ]
Rust
src/main.rs
lang-import/rs-translate
60912ec66a6518a0436112b6825383d7bb90eb14
extern crate redis; extern crate clap; extern crate rouille; use rouille::{Request, Response}; use std::{process, str}; use clap::{Arg, App}; fn list_engines(command: &str) -> Vec<String> { let output = process::Command::new(command) .arg("-S") .output().expect("list of supported engines").stdout; str::from_utf8(&output) .unwrap() .split_whitespace() .map(|x| x.trim()) .filter(|x| !x.is_empty() && !x.starts_with("*")) .map(|x| String::from(x)) .collect() } fn translate_engine(command: &str, engine: &str, lang: &str, word: &str) -> Option<String> { let lang_arg: String = ":".to_string() + lang; let output = match process::Command::new(command) .arg("-e").arg(engine).arg("-b").arg(lang_arg).arg(word) .output() { Ok(v) => v.stdout, Err(_) => return None }; if output.is_empty() { return None; } match String::from_utf8(output) { Ok(v) => Some(v), Err(_) => None } } fn translate(command: &str, engines: &[String], lang: &str, word: &str) -> Option<String> { for engine in engines { match translate_engine(&command, &engine.as_str(), &lang, &word) { Some(v) => return Some(v), None => {} }; } None } fn translate_cached(connection: &redis::Connection, command: &str, engines: &[String], lang: &str, word: &str) -> Option<String> { let value = match redis::cmd("HGET").arg(lang).arg(word).query(connection) { Err(e) => { println!("failed access the cache: {}", e); translate(&command, &engines, &lang, &word) } Ok(value) => { match value { Some(v) => return v, None => translate(&command, &engines, &lang, &word) } } }; match value { Some(v) => { match redis::cmd("HSET").arg(lang).arg(word).arg(v.as_str()).query(connection) { Err(e) => println!("failed save to cache word {} for lang {}: {}", word, lang, e), Ok(()) => {} }; Some(v) } None => None } } fn main() { let matches = App::new("Translate API") .version("1.0") .author("Alexander Baryshnikov <[email protected]>") .about("exposes trans-shell to Web") .arg(Arg::with_name("binary") .short("b") .long("bin") .help("path to binary for translate-shell") .default_value("/usr/bin/trans") .takes_value(true)) .arg(Arg::with_name("redis") .short("r") .long("redis") .help("redis URL") .default_value("redis://127.0.0.1/") .takes_value(true)) .arg(Arg::with_name("address") .short("a") .long("address") .help("binding address") .default_value("127.0.0.1:8000") .takes_value(true)) .get_matches(); let binding_addr = matches.value_of("address").unwrap(); let command: String = matches.value_of("binary").unwrap().to_string(); let redis_url = matches.value_of("redis").unwrap(); let client = redis::Client::open(redis_url).expect("connect to redis"); let engines = list_engines(command.as_str()); for engine in &engines { println!("found engine {}", engine) } println!("started server on {}", binding_addr); rouille::start_server(binding_addr, move |request: &Request| { let u = request.url(); let segments: Vec<&str> = u.as_str().split("/").collect(); if segments.len() != 5 || !(segments[1] == "translate" && segments[3] == "to") { return Response::text("bad request").with_status_code(422); } let word = segments[2]; let lang = segments[4]; let connection = client.get_connection().expect("open connection to redis"); let ans = translate_cached(&connection, command.as_str(), &engines, lang, word).unwrap(); Response::text(ans) }); }
extern crate redis; extern crate clap; extern crate rouille; use rouille::{Request, Response}; use std::{process, str}; use clap::{Arg, App};
fn translate_engine(command: &str, engine: &str, lang: &str, word: &str) -> Option<String> { let lang_arg: String = ":".to_string() + lang; let output = match process::Command::new(command) .arg("-e").arg(engine).arg("-b").arg(lang_arg).arg(word) .output() { Ok(v) => v.stdout, Err(_) => return None }; if output.is_empty() { return None; } match String::from_utf8(output) { Ok(v) => Some(v), Err(_) => None } } fn translate(command: &str, engines: &[String], lang: &str, word: &str) -> Option<String> { for engine in engines { match translate_engine(&command, &engine.as_str(), &lang, &word) { Some(v) => return Some(v), None => {} }; } None } fn translate_cached(connection: &redis::Connection, command: &str, engines: &[String], lang: &str, word: &str) -> Option<String> { let value = match redis::cmd("HGET").arg(lang).arg(word).query(connection) { Err(e) => { println!("failed access the cache: {}", e); translate(&command, &engines, &lang, &word) } Ok(value) => { match value { Some(v) => return v, None => translate(&command, &engines, &lang, &word) } } }; match value { Some(v) => { match redis::cmd("HSET").arg(lang).arg(word).arg(v.as_str()).query(connection) { Err(e) => println!("failed save to cache word {} for lang {}: {}", word, lang, e), Ok(()) => {} }; Some(v) } None => None } } fn main() { let matches = App::new("Translate API") .version("1.0") .author("Alexander Baryshnikov <[email protected]>") .about("exposes trans-shell to Web") .arg(Arg::with_name("binary") .short("b") .long("bin") .help("path to binary for translate-shell") .default_value("/usr/bin/trans") .takes_value(true)) .arg(Arg::with_name("redis") .short("r") .long("redis") .help("redis URL") .default_value("redis://127.0.0.1/") .takes_value(true)) .arg(Arg::with_name("address") .short("a") .long("address") .help("binding address") .default_value("127.0.0.1:8000") .takes_value(true)) .get_matches(); let binding_addr = matches.value_of("address").unwrap(); let command: String = matches.value_of("binary").unwrap().to_string(); let redis_url = matches.value_of("redis").unwrap(); let client = redis::Client::open(redis_url).expect("connect to redis"); let engines = list_engines(command.as_str()); for engine in &engines { println!("found engine {}", engine) } println!("started server on {}", binding_addr); rouille::start_server(binding_addr, move |request: &Request| { let u = request.url(); let segments: Vec<&str> = u.as_str().split("/").collect(); if segments.len() != 5 || !(segments[1] == "translate" && segments[3] == "to") { return Response::text("bad request").with_status_code(422); } let word = segments[2]; let lang = segments[4]; let connection = client.get_connection().expect("open connection to redis"); let ans = translate_cached(&connection, command.as_str(), &engines, lang, word).unwrap(); Response::text(ans) }); }
fn list_engines(command: &str) -> Vec<String> { let output = process::Command::new(command) .arg("-S") .output().expect("list of supported engines").stdout; str::from_utf8(&output) .unwrap() .split_whitespace() .map(|x| x.trim()) .filter(|x| !x.is_empty() && !x.starts_with("*")) .map(|x| String::from(x)) .collect() }
function_block-full_function
[]
Rust
src/usi/chiyuri.rs
muzudho/rust-kifuwarabe-wcsc30
e21fda4d648c8fa162ca3e59dd2d85dab6272fc5
use crate::entities::cosmic::playing::{Game, PosNums}; use crate::entities::cosmic::universe::Universe; use crate::entities::law::cryptographic::*; use crate::entities::law::usi::*; use crate::entities::spaceship::equipment::Beam; use crate::entities::spaceship::facility::{CommandRoom, GameRoom}; use crate::movegen::PseudoLegalMoves; use crate::position::Square; use crate::position::FILE_1; use crate::usi::Chiyuri; use crate::view::print_move_list; use rand::Rng; impl Chiyuri { pub fn do_(universe: &mut Universe, move_code: &str) { if read_move_code(&mut universe.game, move_code) { universe.game.history.decrease_moves_num(); let ply = universe.game.history.moves_num(); let move_ = universe.game.history.moves[ply as usize]; universe.game.do_move(move_); } } pub fn genmove(game: &Game) { let move_list = PseudoLegalMoves::generate(game.history.get_phase(), &game.position, true); print_move_list("genmove", &game.position, &move_list); } pub fn hash(universe: &Universe) { Beam::shoot("局面ハッシュ表示"); let s = universe.game.get_positions_hash_text(); Beam::shoot(&s); } pub fn how_much(tokens: &Vec<&str>) { let bestmove = tokens[1]; Beam::shoot(&format!("Debug | bestmove=|{}|", bestmove)); } pub fn record(universe: &Universe) { Beam::shoot("棋譜表示"); let s = universe.game.get_moves_history_debug_text(); Beam::shoot(&s); } /* TODO pub fn kiki(universe: &Universe) { // 利き数表示 let s = RestRoom::to_string(&universe.game, Phase::First); Beam::shoot(&s); let s = RestRoom::to_string(&universe.game, Phase::Second); Beam::shoot(&s); } */ pub fn list40(universe: &Universe) { Beam::shoot("----駒リスト40表示 ここから----"); universe .game .position .for_all_pieces_on_board(&mut |i, sq, pc_ex| { Beam::shoot(&format!( "[{}]{}{}", i, if let Some(sq) = sq { format!(" {:?}", sq) } else { " --".to_string() }, if let Some(piece_val) = pc_ex { format!(" {} {:?}", piece_val.piece, piece_val.num) } else { " --".to_string() } )); }); Beam::shoot("----駒リスト40表示 ここまで----"); } pub fn len0(universe: &mut Universe) { Beam::shoot("len==0"); if !&universe.dialogue_mode { universe.dialogue_mode = true; CommandRoom::print_title(); } else { let s = GameRoom::to_string(&universe.game, PosNums::Current); Beam::shoot(&s); } } pub fn pos(universe: &Universe) { let s = GameRoom::to_string(&universe.game, PosNums::Current); Beam::shoot(&s); } pub fn pos0(universe: &Universe) { let s = GameRoom::to_string(&universe.game, PosNums::Start); Beam::shoot(&s); } pub fn rand() { Beam::shoot("3<len rand"); let secret_number = rand::thread_rng().gen_range(1..101); Beam::shoot(&format!("乱数={}", secret_number)); } pub fn same(universe: &Universe) { let count = universe.game.count_same_position(); Beam::shoot(&format!("同一局面調べ count={}", count)); } pub fn startpos(universe: &mut Universe) { let tokens: Vec<&str> = POS_1.split(' ').collect(); set_position(&mut universe.game, &tokens); } pub fn teigi_conv() { Beam::shoot("teigi::convのテスト"); for ms in 1..9 { for hash in 0..10 { let sq = Square::from(FILE_1, ms); let next = push_sq_to_hash(hash, sq); let (hash_orig, sq_orig) = pop_sq_from_hash(next); Beam::shoot( &format!("push_ms_to_hash(0b{:4b},0b{:5b})=0b{:11b} pop_sq_from_hash(...)=(0b{:4b},0b{:5b})" ,hash ,ms ,next ,hash_orig ,sq_orig.number() )); } } } pub fn undo(universe: &mut Universe) { if !universe.game.undo_move() { Beam::shoot(&format!( "ply={} を、これより戻せません", universe.game.history.moves_num() )); } } }
use crate::entities::cosmic::playing::{Game, PosNums}; use crate::entities::cosmic::universe::Universe; use crate::entities::law::cryptographic::*; use crate::entities::law::usi::*; use crate::entities::spaceship::equipment::Beam; use crate::entities::spaceship::facility::{CommandRoom, GameRoom}; use crate::movegen::PseudoLegalMoves; use crate::position::Square; use crate::position::FILE_1; use crate::usi::Chiyuri; use crate::view::print_move_list; use rand::Rng; impl Chiyuri { pub fn do_(universe: &mut Universe, move_code: &str) { if read_move_code(&mut universe.game, move_code) { universe.game.history.decrease_moves_num(); let ply = universe.game.history.moves_num(); let move_ = universe.game.history.moves[ply as usize]; universe.game.do_move(move_); } } pub fn genmove(game: &Game) { let move_list = PseudoLegalMoves::generate(game.history.get_phase(), &game.position, true); print_move_list("genmove", &game.position, &move_list); } pub fn hash(universe: &Universe) { Beam::shoot("局面ハッシュ表示"); let s = universe.game.get_positions_hash_text(); Beam::shoot(&s); } pub fn how_much(tokens: &Vec<&str>) { let bestmove = tokens[1]; Beam::shoot(&format!("Debug | bestmove=|{}|", bestmove)); } pub fn record(universe: &Universe) { Beam::shoot("棋譜表示"); let s = universe.game.get_moves_history_debug_text(); Beam::shoot(&s); } /* TODO pub fn kiki(universe: &Universe) { // 利き数表示 let s = RestRoom::to_string(&universe.game, Phase::First); Beam::shoot(&s); let s = RestRoom::to_string(&universe.game, Phase::Second); Beam::shoot(&s); } */ pub fn list40(universe: &Universe) { Beam::shoot("----駒リスト40表示 ここから----"); universe .game .position .for_all_pieces_on_board(&mut |i, sq, pc_ex| { Beam::shoot(&format!( "[{}]{}{}", i, if let Some(sq) = sq { format!(" {:?}", sq) } else { " --".to_string() }, if let Some(piece_val) = pc_ex { format!(" {} {:?}", piece_val.piece, piece_val.num) } else { " --".to_string() } )); }); Beam::shoot("----駒リスト40表示 ここまで----"); } pub fn len0(universe: &mut Universe) { Beam::shoot("len==0"); if !&universe.dialogue_mode { universe.dialogue_mode = true; CommandRoom::print_title(); } else { let s = GameRoom::to_string(&universe.game, PosNums::Current); Beam::shoot(&s); } } pub fn pos(universe: &Universe) { let s = GameRoom::to_string(&universe.game, PosNums::Current); Beam::shoot(&s); } pub fn pos0(universe: &Universe) { let s = GameRoom::to_string(&universe.game, PosNums::Start); Beam::shoot(&s); } pub fn rand() { Beam::shoot("3<len rand"); let secret_number = rand::thread_rng().gen_range(1..101); Beam::shoot(&format!("乱数={}", secret_number)); } pub fn same(universe: &Universe) { let count = universe.game.count_same_position(); Beam::shoot(&format!("同一局面調べ count={}", count)); } pub fn startpos(universe: &mut Universe) { let tokens: Vec<&str> = POS_1.split(' ').collect(); set_position(&mut universe.game, &tokens); }
pub fn undo(universe: &mut Universe) { if !universe.game.undo_move() { Beam::shoot(&format!( "ply={} を、これより戻せません", universe.game.history.moves_num() )); } } }
pub fn teigi_conv() { Beam::shoot("teigi::convのテスト"); for ms in 1..9 { for hash in 0..10 { let sq = Square::from(FILE_1, ms); let next = push_sq_to_hash(hash, sq); let (hash_orig, sq_orig) = pop_sq_from_hash(next); Beam::shoot( &format!("push_ms_to_hash(0b{:4b},0b{:5b})=0b{:11b} pop_sq_from_hash(...)=(0b{:4b},0b{:5b})" ,hash ,ms ,next ,hash_orig ,sq_orig.number() )); } } }
function_block-full_function
[ { "content": "/// position コマンド読取\n\npub fn set_position(game: &mut Game, tokens: &Vec<&str>) {\n\n assert_eq!(tokens[0], \"position\");\n\n assert!(\n\n Regex::new(r\"[startpos|sfen]\").unwrap().is_match(tokens[1]),\n\n \"tokens1=[{}]\",\n\n tokens[1].to_string()\n\n );\n\n\n\n // 局面をクリアー。手目も 0 に戻します。\n\n game.clear();\n\n\n\n // # Examples\n\n //\n\n // ```\n\n // position startpos moves 7g7f 3c3d 2g2f\n\n // 0 1 2 3..\n\n // position sfen lnsgkgsnl/9/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL w - 1 moves 5a6b 7g7f 3a3b\n\n // 0 1 2 3 4 5 6 7..\n\n // ```\n\n\n", "file_path": "src/entities/law/usi.rs", "rank": 0, "score": 260936.1895931799 }, { "content": "/// position コマンド 盤上部分のみ 読取\n\n/// 初期化は既に終わらせてあります。\n\npub fn read_board(game: &mut Game, board_str: &str) {\n\n // 初期盤面\n\n let position = game.mut_starting();\n\n let mut file = FILE_9; //9筋から右方向へ読取\n\n let mut rank = RANK_1;\n\n\n\n // `/` か、`+`か、1桁の数か、1文字のアルファベットのいずれかだぜ☆(^~^)それ以外なら盤パート終了☆(^~^)\n\n enum BoardPart {\n\n /// 改行のようなものだぜ☆(^~^)\n\n NewLine,\n\n /// スペース数☆(^~^)\n\n Number(u8),\n\n /// 駒☆(^~^)+で始まるものもこっちだぜ☆(^~^)\n\n Alphabet(Piece),\n\n }\n\n\n\n let len = board_str.len();\n\n let mut starts = 0;\n\n\n\n 'ban: while 0 < (len - starts) {\n", "file_path": "src/entities/law/usi.rs", "rank": 1, "score": 224362.08401906936 }, { "content": "/// 指し手読取\n\n/// 例: 7g7f\n\n///\n\n/// 読み取った指し手は、棋譜に入れる。\n\n/// 現在の手目のところに入れ、手目のカウントアップも行う。\n\npub fn read_move_code(game: &mut Game, move_code: &str) -> bool {\n\n let len = move_code.len();\n\n // 4文字か5文字あるはず。\n\n if len < 4 {\n\n // 指し手読取終了時にここを通るぜ☆(^~^)\n\n // 残り4文字もない。\n\n return false;\n\n }\n\n\n\n let mut starts = 0;\n\n\n\n let from = match &move_code[starts..=starts] {\n\n // 1文字目が駒だったら打。2文字目は必ず「*」なはずなので読み飛ばす。\n\n \"R\" => {\n\n starts += 2;\n\n match game.history.get_phase() {\n\n Phase::First => 101,\n\n Phase::Second => 109,\n\n }\n\n }\n", "file_path": "src/entities/law/usi.rs", "rank": 2, "score": 203470.470388702 }, { "content": "/// 打はテストできない\n\npub fn _assert_in_board_as_absolute(sq: Square, hint: &str) {\n\n debug_assert!(\n\n (10 < sq.number() && sq.number() < 20)\n\n || (20 < sq.number() && sq.number() < 30)\n\n || (30 < sq.number() && sq.number() < 40)\n\n || (40 < sq.number() && sq.number() < 50)\n\n || (50 < sq.number() && sq.number() < 60)\n\n || (60 < sq.number() && sq.number() < 70)\n\n || (70 < sq.number() && sq.number() < 80)\n\n || (80 < sq.number() && sq.number() < 90)\n\n || (90 < sq.number() && sq.number() < 100),\n\n \"abs-sq=|{}| hint={}\",\n\n sq.number(),\n\n hint\n\n );\n\n}\n\n\n", "file_path": "src/position/rotation.rs", "rank": 3, "score": 192257.47288455814 }, { "content": "/// 独自コマンド☆(^~^)\n\nfn help_chiyuri(universe: &mut Universe, tokens: &Vec<&str>) {\n\n match tokens[0] {\n\n \"do\" => {\n\n // do 7g7f\n\n Chiyuri::do_(universe, tokens[1]);\n\n }\n\n \"genmove\" => {\n\n Chiyuri::genmove(&universe.game);\n\n }\n\n \"how-much\" => {\n\n Chiyuri::how_much(tokens);\n\n }\n\n \"hash\" => {\n\n Chiyuri::hash(universe);\n\n }\n\n \"record\" => {\n\n // 棋譜(指し手)の表示\n\n Chiyuri::record(universe);\n\n // L\n\n }\n", "file_path": "src/usi/mod.rs", "rank": 4, "score": 174747.86471834 }, { "content": "/// 現在の局面での、指し手の一覧を表示するぜ☆(^~^)\n\npub fn print_move_list(title: &str, position: &Position, move_list: &Vec<Move>) {\n\n Beam::shoot(&format!(\"+\\n| {}\", title));\n\n Beam::shoot(&format!(\"| Moves count={}\", move_list.len()));\n\n // 辞書順ソート\n\n let mut move_names = Vec::new();\n\n for move_ in move_list {\n\n let (_, to, _) = destructure_move(*move_);\n\n let ss_str = format!(\n\n \"{}{}\",\n\n format!(\"{}\", to_move_code(*move_)),\n\n if let Some(captured) = position.piece_at_board(to) {\n\n format!(\" ({})\", captured.piece)\n\n } else {\n\n \"\".to_string()\n\n }\n\n );\n\n move_names.push(ss_str);\n\n }\n\n // move_names.sort();\n\n move_names.sort_by(|y_str, x_str| {\n", "file_path": "src/view/mod.rs", "rank": 5, "score": 154848.58773859806 }, { "content": "pub fn square_to_hand_type(sq: Square) -> HandType {\n\n match sq.number() {\n\n 100 | 108 => HandType::King,\n\n 101 | 109 => HandType::Rook,\n\n 102 | 110 => HandType::Bishop,\n\n 103 | 111 => HandType::Gold,\n\n 104 | 112 => HandType::Silver,\n\n 105 | 113 => HandType::Knight,\n\n 106 | 114 => HandType::Lance,\n\n 107 | 115 => HandType::Pawn,\n\n _ => panic!(\"square_to_hand_type sq={}\", sq),\n\n }\n\n}\n\n\n", "file_path": "src/position/mod.rs", "rank": 6, "score": 150366.6945602512 }, { "content": "pub fn square_to_hand_piece(sq: Square) -> HandPiece {\n\n match sq.number() {\n\n 100 => HandPiece::King1,\n\n 101 => HandPiece::Rook1,\n\n 102 => HandPiece::Bishop1,\n\n 103 => HandPiece::Gold1,\n\n 104 => HandPiece::Silver1,\n\n 105 => HandPiece::Knight1,\n\n 106 => HandPiece::Lance1,\n\n 107 => HandPiece::Pawn1,\n\n 108 => HandPiece::King2,\n\n 109 => HandPiece::Rook2,\n\n 110 => HandPiece::Bishop2,\n\n 111 => HandPiece::Gold2,\n\n 112 => HandPiece::Silver2,\n\n 113 => HandPiece::Knight2,\n\n 114 => HandPiece::Lance2,\n\n 115 => HandPiece::Pawn2,\n\n _ => panic!(\"(Err.44) Hand address fail\"),\n\n }\n", "file_path": "src/position/mod.rs", "rank": 7, "score": 150366.6945602512 }, { "content": "// マスの一覧を表示するぜ☆(^~^)\n\npub fn print_sq_list(title: &str, sq_list: &Vec<Square>) {\n\n Beam::shoot(&format!(\"+\\n| {}\", title));\n\n Beam::shoot(&format!(\"| Square count={}\", sq_list.len()));\n\n // ソート\n\n let mut sq_list2 = sq_list.clone();\n\n sq_list2.sort();\n\n\n\n for (i, sq) in sq_list2.into_iter().enumerate() {\n\n Beam::shoot(&format!(\"| [{}] {}\", i, sq));\n\n }\n\n Beam::shoot(\"+\");\n\n}\n\n\n", "file_path": "src/view/mod.rs", "rank": 8, "score": 149700.04440108908 }, { "content": "/// 指し手のために、段をアルファベットにすることを想定\n\npub fn num_to_lower_case(num: usize) -> &'static str {\n\n const ALPHABETS: [&str; 9] = [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\", \"i\"];\n\n // 配列の範囲外は強制終了だぜ☆(^~^)\n\n ALPHABETS[num - 1]\n\n}\n", "file_path": "src/entities/law/cryptographic.rs", "rank": 9, "score": 144743.94596047382 }, { "content": "/// 先手の気持ちで、勝てだぜ☆(*^~^*)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `game` - 対局。\n\n/// * `sibling_best` - アルファベータ探索のベータ値。兄弟で一番良い評価値。\n\n///\n\n/// # Returns\n\n///\n\n/// Best movement, Value, Sum nodes\n\nfn search(game: &mut Game, ss: &mut SearchStack, mut alpha: i16, beta: i16) -> (CentiPawn, Move) {\n\n let mut bestmove = RESIGN_MOVE;\n\n\n\n // TODO 葉ノードなら、評価値を返して終了(^~^)\n\n if ss.id_depth <= 0 {\n\n // 葉だぜ☆(^~^)\n\n\n\n // if let Some(_captured) = move_.captured {\n\n // // TODO SEEやろうぜ☆(^~^)\n\n // SEE::go(game, &movement.destination);\n\n // }\n\n\n\n // 現局面(は相手の手番)の駒割り評価値をひっくり返したもの☆(^~^)\n\n let leaf_value: CentiPawn = game.position.material_advantage(game.history.get_phase());\n\n\n\n // 局面を評価するだけ(^~^) 指し手は返さないぜ(^~^)\n\n return (leaf_value, RESIGN_MOVE);\n\n }\n\n // TODO let mut controls = Vec::<Square>::new();\n\n\n", "file_path": "src/search/mod.rs", "rank": 10, "score": 135621.7545789816 }, { "content": "/// sfen\n\npub fn to_move_code(move_: Move) -> String {\n\n if move_ == RESIGN_MOVE {\n\n return \"resign\".to_string();\n\n }\n\n let (from, to, promote) = destructure_move(move_);\n\n\n\n if from.is_hand() {\n\n // 打\n\n let drop = from.to_drop_code();\n\n format!(\n\n \"{}{}{}{}\",\n\n drop,\n\n to.file(),\n\n num_to_lower_case(to.rank().into()),\n\n if promote { \"+\" } else { \"\" }\n\n )\n\n } else {\n\n // 盤上\n\n format!(\n\n \"{}{}{}{}{}\",\n\n from.file(),\n\n num_to_lower_case(from.rank().into()),\n\n to.file(),\n\n num_to_lower_case(to.rank().into()),\n\n if promote { \"+\" } else { \"\" }\n\n )\n\n }\n\n}\n", "file_path": "src/position/mod.rs", "rank": 11, "score": 128869.39792237733 }, { "content": "pub fn main_loop(universe: &mut Universe) {\n\n loop {\n\n let input: String = {\n\n let mut input: String = String::new();\n\n\n\n // まず最初に、コマンドライン入力を待機しろだぜ☆(^~^)\n\n match std_io::stdin().read_line(&mut input) {\n\n Ok(_n) => {}\n\n Err(e) => std::panic::panic_any(Beam::trouble(&format!(\n\n \"(Err.28) Failed to read line. / {}\",\n\n e\n\n ))),\n\n };\n\n // 末尾の改行を除こうぜ☆(^~^)\n\n // trim すると空白も消えるぜ☆(^~^)\n\n match input.trim().parse() {\n\n Ok(n) => n,\n\n Err(e) => std::panic::panic_any(Beam::trouble(&format!(\n\n \"(Err.38) Failed to parse. / {}\",\n\n e\n", "file_path": "src/usi/mod.rs", "rank": 12, "score": 127470.69935595029 }, { "content": "/// ハッシュ値を作る\n\npub fn push_sq_to_hash(hash: u64, sq: Square) -> u64 {\n\n // 0筋とか 0段とか 使ってないが、そのまま足す。\n\n // 0~100の101升と、ちょいなんで、128(=2^7) あれば十分\n\n if sq.is_square() {\n\n (hash << 7) + (sq.number() as u64)\n\n } else {\n\n panic!(\"push_sq_to_hash fail\")\n\n }\n\n}\n", "file_path": "src/entities/law/cryptographic.rs", "rank": 13, "score": 124879.1753718576 }, { "content": "pub fn test_rotation() {\n\n // 辞書象限のテスト\n\n {\n\n let mut ort = DictOrthant::from_file_and_rank(0, -1);\n\n test_dort(\"e1\", \"IOrIII\", &ort);\n\n ort = DictOrthant::from_file_and_rank(1, -1);\n\n test_dort(\"e2\", \"IV\", &ort);\n\n ort = DictOrthant::from_file_and_rank(1, 0);\n\n test_dort(\"e3\", \"IOrIII\", &ort);\n\n ort = DictOrthant::from_file_and_rank(1, 1);\n\n test_dort(\"e4\", \"IOrIII\", &ort);\n\n ort = DictOrthant::from_file_and_rank(0, 1);\n\n test_dort(\"e5\", \"IOrIII\", &ort);\n\n ort = DictOrthant::from_file_and_rank(-1, 1);\n\n test_dort(\"e6\", \"II\", &ort);\n\n ort = DictOrthant::from_file_and_rank(-1, 0);\n\n test_dort(\"e7\", \"IOrIII\", &ort);\n\n ort = DictOrthant::from_file_and_rank(-1, -1);\n\n test_dort(\"e8\", \"IOrIII\", &ort);\n\n }\n", "file_path": "src/position/rotation.rs", "rank": 14, "score": 124399.23037580105 }, { "content": "/// 全升の面積だぜ☆(^~^)駒を打つときに使うぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `callback` - 絶対番地を受け取れだぜ☆(^~^)\n\npub fn foreach_square_in_board<F1>(fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square),\n\n{\n\n for rank in RANK_1..RANK_10 {\n\n for file in (FILE_1..FILE_10).rev() {\n\n fn_make_move_list(Square::from(file, rank));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 15, "score": 122826.34810949804 }, { "content": "/// 先手から見た桂馬の打てる面積だぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `us` - 手番☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地を受け取れだぜ☆(^~^)\n\npub fn drop_knight<F1>(us: Phase, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square),\n\n{\n\n for rank in RANK_3..RANK_10 {\n\n for file in (FILE_1..FILE_10).rev() {\n\n let mut sq = Square::from(file, rank);\n\n if us == Phase::Second {\n\n sq = sq.rotate_180();\n\n }\n\n\n\n fn_make_move_list(sq);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 16, "score": 116784.22137860491 }, { "content": "fn test_dort(test_name: &str, expected: &str, actual: &DictOrthant) {\n\n debug_assert!(\n\n format!(\"{:?}\", actual) == expected,\n\n \"{}: expected={} | actual={:?}\",\n\n test_name,\n\n expected,\n\n actual\n\n );\n\n}\n", "file_path": "src/position/rotation.rs", "rank": 17, "score": 116138.12573925465 }, { "content": "fn test_d45ort(test_name: &str, expected: &str, actual: &Degree45Orthant) {\n\n debug_assert!(\n\n format!(\"{:?}\", actual) == expected,\n\n \"{}: expected={} | actual={:?}\",\n\n test_name,\n\n expected,\n\n actual\n\n );\n\n}\n", "file_path": "src/position/rotation.rs", "rank": 18, "score": 116138.12573925464 }, { "content": "fn test_rsq(test_name: &str, expected: &str, actual: &RelAdr) {\n\n debug_assert!(\n\n format!(\"{:?}\", actual) == expected,\n\n \"{}: expected={} | actual={:?}\",\n\n test_name,\n\n expected,\n\n actual\n\n );\n\n}\n\n\n", "file_path": "src/position/rotation.rs", "rank": 19, "score": 116138.12573925465 }, { "content": "/// 先手から見た歩、香車の打てる面積だぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `us` - 後手視点にしたけりゃ us.turn() しろだぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地を受け取れだぜ☆(^~^)\n\npub fn drop_pawn_lance<F1>(us: Phase, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square),\n\n{\n\n // 180°回転とかするより、for文の方を変えた方が高速だろ……☆(^~^)\n\n let (min_rank, max_rank) = if us == Phase::First {\n\n (RANK_2, RANK_10)\n\n } else {\n\n (RANK_1, RANK_9)\n\n };\n\n\n\n for rank in min_rank..max_rank {\n\n for file in (FILE_1..FILE_10).rev() {\n\n fn_make_move_list(Square::from(file, rank));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 20, "score": 114822.32023502189 }, { "content": "/// 0 なら偽、それ以外は真☆(^~^)\n\npub fn num_to_bool(n: usize) -> bool {\n\n match n {\n\n 0 => false,\n\n _ => true,\n\n }\n\n}\n", "file_path": "src/entities/law/cryptographic.rs", "rank": 21, "score": 110493.2997600913 }, { "content": "pub fn hand_type_to_square(ha: HandPiece) -> Square {\n\n match ha {\n\n HandPiece::King1 => Square(100),\n\n HandPiece::Rook1 => Square(101),\n\n HandPiece::Bishop1 => Square(102),\n\n HandPiece::Gold1 => Square(103),\n\n HandPiece::Silver1 => Square(104),\n\n HandPiece::Knight1 => Square(105),\n\n HandPiece::Lance1 => Square(106),\n\n HandPiece::Pawn1 => Square(107),\n\n HandPiece::King2 => Square(108),\n\n HandPiece::Rook2 => Square(109),\n\n HandPiece::Bishop2 => Square(110),\n\n HandPiece::Gold2 => Square(111),\n\n HandPiece::Silver2 => Square(112),\n\n HandPiece::Knight2 => Square(113),\n\n HandPiece::Lance2 => Square(114),\n\n HandPiece::Pawn2 => Square(115),\n\n // _ => panic!(\"(Err.44) Hand address fail\"),\n\n }\n\n}\n", "file_path": "src/position/mod.rs", "rank": 22, "score": 104144.7043100309 }, { "content": "pub fn destructure_move(m: Move) -> (Square, Square, bool) {\n\n // 移動元マス\n\n // .pdd dddd dsss ssss - m\n\n // 0000 0000 0111 1111 - Mask 0x007f\n\n let from = Square((m & 0x007f) as u8);\n\n\n\n // 移動先マス\n\n // .pdd dddd dsss ssss - m\n\n // 0011 1111 1000 0000 - Mask 0x3f80\n\n // 演算子の優先順位は `&` より `>>` の方が高いことに注意(^~^)\n\n let to = Square(((m & 0x3f80) >> 7) as u8);\n\n\n\n // 成\n\n // .pdd dddd dsss ssss - m\n\n // 0100 0000 0000 0000 - Mask 0x4000\n\n let promote = ((m & 0x4000) >> 14) == 1;\n\n\n\n return (from, to, promote);\n\n}\n\n\n", "file_path": "src/position/mod.rs", "rank": 23, "score": 104137.85638813798 }, { "content": "/// ハッシュ値から作る\n\npub fn pop_sq_from_hash(hash: u64) -> (u64, Square) {\n\n // 0筋とか 0段とか 使ってないが、そのまま足す。\n\n // 0~100の101升と、ちょいなんで、128(=2^7) あれば十分\n\n let sq = Square::new((hash & 0b111_1111) as u8);\n\n if sq.is_square() {\n\n (hash >> 7, sq)\n\n } else {\n\n panic!(\"pop_sq_from_hash fail\")\n\n }\n\n}\n\n\n", "file_path": "src/entities/law/cryptographic.rs", "rank": 24, "score": 101178.9979265513 }, { "content": "/// 情報表示\n\npub fn print_info(\n\n display: &mut DestinationDisplay,\n\n depth: Option<usize>,\n\n state_nodes_nps: Option<(u64, u64)>,\n\n value: Option<CentiPawn>,\n\n move_: Option<Move>,\n\n pv_string: &Option<PvString>,\n\n) {\n\n // TODO 評価値が自分のか相手のか調べてないぜ☆(^~^)\n\n Beam::shoot(&format!(\n\n \"info{}{}{}{} currmove {}{}\",\n\n // 思考を開始してからのミリ秒☆(^~^)\n\n if let Some(pv_string_val) = pv_string {\n\n match pv_string_val {\n\n PvString::PV(msec, _pv) => format!(\" time {}\", msec),\n\n PvString::String(_x) => \"\".to_string(),\n\n }\n\n } else {\n\n \"\".to_string()\n\n },\n", "file_path": "src/view/mod.rs", "rank": 25, "score": 93179.9058651311 }, { "content": "/// 反復深化探索だぜ☆(^~^)\n\npub fn iterative_deepening_search(\n\n universe: &mut Universe,\n\n ss: &mut SearchStack,\n\n think_sec: u64,\n\n) -> (CentiPawn, Move) {\n\n universe.game.info.clear();\n\n ss.think_sec = think_sec;\n\n // ss.think_sec = rand::thread_rng()\n\n // .gen_range(universe.option_min_think_sec as u64..universe.option_max_think_sec as u64);\n\n\n\n ss.us = universe.game.history.get_phase();\n\n\n\n // アルファベータ探索\n\n let mut alpha = -VALUE_INFINITE;\n\n let beta = VALUE_INFINITE;\n\n let mut bestmove = RESIGN_MOVE;\n\n\n\n // 一番深く潜ったときの最善手を選ぼうぜ☆(^~^)\n\n for depth in 0..(universe.option_max_depth + 1) {\n\n ss.id_max_depth = depth;\n", "file_path": "src/search/mod.rs", "rank": 26, "score": 90890.51652011849 }, { "content": "/// 盤上の竜の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_dragon<F1>(from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n {\n\n let fn_pass_destination =\n\n &mut |to, move_range| fn_make_move_list(to, Promotability::Deny, move_range, None);\n\n\n\n for mobility in PieceType::PR.mobility().iter() {\n\n // 先後同型(^~^)\n\n push_piece_moves(None, from, *mobility, fn_pass_destination);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 27, "score": 83829.7263920612 }, { "content": "/// 盤上の玉の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_king<F1>(from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n let fn_pass_destination = &mut |to, _move_range| {\n\n fn_make_move_list(to, Promotability::Deny, MoveRange::Adjacent, None)\n\n };\n\n\n\n for mobility in PieceType::K.mobility().iter() {\n\n // 先後同型\n\n push_piece_moves(None, from, *mobility, fn_pass_destination);\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 28, "score": 83829.7263920612 }, { "content": "/// 盤上の馬の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_horse<F1>(from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n let fn_pass_destination =\n\n &mut |to, move_range| fn_make_move_list(to, Promotability::Deny, move_range, None);\n\n\n\n for mobility in PieceType::PB.mobility().iter() {\n\n // 先後同型(^~^)\n\n push_piece_moves(None, from, *mobility, fn_pass_destination);\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 29, "score": 83829.7263920612 }, { "content": "pub fn turn_person(person: &Person) -> Person {\n\n use self::Person::*;\n\n match *person {\n\n Friend => Opponent,\n\n Opponent => Friend,\n\n }\n\n}\n\n*/\n\n\n\n/// 局面ハッシュを作るときに、フェーズ用に配列があって、それのサイズに使ってるぜ☆(^~^)\n\npub const PHASE_FIRST: usize = 0;\n\npub const PHASE_SECOND: usize = 1;\n\npub const PHASE_LEN: usize = 2;\n\n\n\n/// 先後。単純にプレイヤー1を先手、プレイヤー2を後手とする。\n\n/// 駒落ち戦での通称 上手/下手 の場合、上手は先手、下手は後手とする。\n\n#[derive(Clone, Copy, PartialEq)]\n\npub enum Phase {\n\n First,\n\n Second,\n", "file_path": "src/entities/cosmic/recording.rs", "rank": 30, "score": 79577.64195483459 }, { "content": "/// 盤上の飛の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_rook<F1>(us: Phase, from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n let fn_pass_destination =\n\n &mut |to, _move_range| Promoting::bishop_rook(us, from, to, fn_make_move_list);\n\n for mobility in PieceType::R.mobility().iter() {\n\n push_piece_moves(\n\n None, //&Some(us),// 先後同型なのでは(^~^)?\n\n from,\n\n *mobility,\n\n fn_pass_destination,\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 31, "score": 78756.31623163458 }, { "content": "/// 盤上の角の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_bishop<F1>(us: Phase, from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n let fn_pass_destination =\n\n &mut |to, _move_range| Promoting::bishop_rook(us, from, to, fn_make_move_list);\n\n for mobility in PieceType::B.mobility().iter() {\n\n push_piece_moves(\n\n None, //&Some(us),// 先後同型なのでは(^~^)?\n\n from,\n\n *mobility,\n\n fn_pass_destination,\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 32, "score": 78756.31623163458 }, { "content": "/// 先手から見た盤上の桂の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `us` - 後手視点にしたけりゃ us.turn() しろだぜ☆(^~^)\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_knight<F1>(us: Phase, from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n let fn_pass_destination = &mut |to, _move_range| {\n\n Promoting::knight(\n\n us,\n\n to,\n\n fn_make_move_list,\n\n Some(MovePermission::from_knight(us)),\n\n )\n\n };\n\n\n\n for mobility in PieceType::N.mobility().iter() {\n\n push_piece_moves(Some(us), from, *mobility, fn_pass_destination);\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 33, "score": 78756.09118580552 }, { "content": "/// 先手から見た盤上の金、と、杏、圭、全の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `us` - 後手視点にしたけりゃ us.turn() しろだぜ☆(^~^)\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_gold<F1>(us: Phase, from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool, // FnMut\n\n{\n\n let fn_pass_destination = &mut |to, _move_range| {\n\n fn_make_move_list(to, Promotability::Deny, MoveRange::Adjacent, None)\n\n };\n\n\n\n for mobility in PieceType::G.mobility().iter() {\n\n push_piece_moves(Some(us), from, *mobility, fn_pass_destination);\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 34, "score": 78756.09118580552 }, { "content": "/// 先手から見た盤上の銀の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `us` - 後手視点にしたけりゃ us.turn() しろだぜ☆(^~^)\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_silver<F1>(us: Phase, from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n let fn_pass_destination =\n\n &mut |to, _move_range| Promoting::silver(us, from, to, fn_make_move_list);\n\n\n\n for mobility in PieceType::S.mobility().iter() {\n\n push_piece_moves(Some(us), from, *mobility, fn_pass_destination);\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 35, "score": 78756.09118580552 }, { "content": "/// 先手から見た盤上の歩の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `us` - 後手視点にしたけりゃ us.turn() しろだぜ☆(^~^)\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_pawn<F1>(us: Phase, from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n let fn_pass_destination = &mut |to, _move_range| {\n\n Promoting::pawn_lance(\n\n us,\n\n to,\n\n fn_make_move_list,\n\n Some(MovePermission::from_pawn_or_lance(us)),\n\n )\n\n };\n\n\n\n for mobility in PieceType::P.mobility().iter() {\n\n push_piece_moves(Some(us), from, *mobility, fn_pass_destination);\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 36, "score": 78756.09118580552 }, { "content": "/// 先手から見た盤上の香の動けるマスだぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `us` - 後手視点にしたけりゃ us.turn() しろだぜ☆(^~^)\n\n/// * `from` - 移動元升だぜ☆(^~^)\n\n/// * `fn_make_move_list` - 絶対番地、成れるか、動き方、移動できるかを受け取れだぜ☆(^~^)\n\nfn gen_lance<F1>(us: Phase, from: Square, fn_make_move_list: &mut F1)\n\nwhere\n\n F1: FnMut(Square, Promotability, MoveRange, Option<MovePermission>) -> bool,\n\n{\n\n let fn_pass_destination = &mut |to, _move_range| {\n\n Promoting::pawn_lance(\n\n us,\n\n to,\n\n fn_make_move_list,\n\n Some(MovePermission::from_pawn_or_lance(us)),\n\n )\n\n };\n\n\n\n for mobility in PieceType::L.mobility().iter() {\n\n push_piece_moves(Some(us), from, *mobility, fn_pass_destination);\n\n }\n\n}\n\n\n", "file_path": "src/movegen/mod.rs", "rank": 37, "score": 78756.09118580552 }, { "content": "/// ハッシュ値から作る\n\npub fn pop_bool_from_hash(hash: u64) -> (u64, bool) {\n\n let b_num = num_to_bool((hash & 0b1) as usize);\n\n (hash >> 7, b_num)\n\n}\n\n*/\n\n\n", "file_path": "src/entities/law/cryptographic.rs", "rank": 38, "score": 74826.180159196 }, { "content": "/// ハッシュ値を作る\n\npub fn push_bool_to_hash(hash: u64, b: bool) -> u64 {\n\n // bool は i32 だが、hash は u64 なので u64 に合わせるぜ☆(*^~^*)\n\n (hash << 7) + b as u64\n\n}\n", "file_path": "src/entities/law/cryptographic.rs", "rank": 39, "score": 73598.6936726365 }, { "content": "/// 初期値として 移動元マス、移動先マス、成りの有無 を指定してください\n\npub fn new_move(from: Square, to: Square, promote: bool) -> Move {\n\n let mut num: u16;\n\n\n\n // 移動元マス\n\n // .... .... .sss ssss\n\n // 11~99: 盤\n\n // 100~115: 持駒\n\n num = from.number() as u16;\n\n\n\n // 移動先マス\n\n // ..dd dddd d... ....\n\n num += (to.number() as u16) << 7;\n\n\n\n if promote {\n\n // 成\n\n // .p.. .... .... ....\n\n num += 0x4000;\n\n }\n\n\n\n return num;\n\n}\n\n\n", "file_path": "src/entities/move_.rs", "rank": 40, "score": 73135.90306853234 }, { "content": "/// ハッシュ値から作る\n\npub fn pop_drop_from_hash(hash: u64) -> (u64, Option<HandType>) {\n\n // 使ってるのは8種類なんで、8(=2^3) で OK\n\n (hash >> 3, HandType::from_u64(hash & 0b111))\n\n}\n\n*/\n\n\n\n/// コーディングを短くするためのものだぜ☆(^~^)\n\nimpl HandType {\n\n // pub fn promotion_value(self) -> CentiPawn {\n\n // NINE_299792458.promotion_value[self as usize]\n\n // }\n\n pub fn captured_value(self) -> CentiPawn {\n\n NINE_299792458.hand_type_to_captured_value[self as usize]\n\n }\n\n}\n\n\n\n/// コーディングを短くするためのものだぜ☆(^~^)\n\nimpl Angle {\n\n /*\n\n /// 時計回り(Clockwise)☆(^~^)\n", "file_path": "src/entities/law/speed_of_light.rs", "rank": 41, "score": 69241.90239786697 }, { "content": "/// to_move_object - 移動元マス、移動先マス、成りの有無\n\n///\n\n/// # Returns\n\n///\n\n/// `Option<Square>` - from. 移動元升。Dropのときは None だぜ☆(^~^)\n\n/// `Square` - to. 移動先升\n\n/// `bool` - promote. 移動後に成るなら真\n\n/// `Option<HandType>` - drop. 打の場合、打った駒種類\n\npub fn to_move_object(num: Move) -> (Option<Square>, Square, bool, Option<HandType>) {\n\n let (from, to, promote) = destructure_move(num);\n\n\n\n if from.is_board() {\n\n // 盤上\n\n return (Some(from), to, promote, None);\n\n } else {\n\n // 打\n\n let hand = square_to_hand_type(from);\n\n\n\n return (None, to, promote, Some(hand));\n\n }\n\n}\n", "file_path": "src/entities/move_.rs", "rank": 42, "score": 66243.99314792267 }, { "content": "/// ハッシュ値を作る\n\npub fn push_drop_to_hash(hash: u64, piece_type_o: Option<HandType>) -> u64 {\n\n let num = if let Some(piece_type) = piece_type_o {\n\n // 持ち駒の型は 7つ + 持ち駒無しの 1つ なんで、8(=2^3) で OK\n\n piece_type as u64\n\n } else {\n\n // None の変わりに 玉を使うぜ☆(^~^)\n\n HandType::King as u64\n\n };\n\n (hash << 3) + num\n\n}\n\n*/\n\n\n\n/*\n", "file_path": "src/entities/law/speed_of_light.rs", "rank": 43, "score": 65619.35788395091 }, { "content": "fn main() {\n\n // 宇宙☆(^~^)変化するぜ☆(^~^)\n\n let mut universe: Universe = Universe::default();\n\n\n\n // ビッグバン\n\n universe.big_bang();\n\n\n\n // 「何が見えんの?」\n\n Yumemi::look_into_the_telescope();\n\n\n\n main_loop(&mut universe);\n\n // [Ctrl]+[C] で強制終了\n\n}\n", "file_path": "src/main.rs", "rank": 44, "score": 47285.0524215886 }, { "content": " /// 指し手生成で使うぜ☆(^~^)\n\n pub fn last_hand(&self, adr: HandPiece) -> Option<&PieceEx> {\n\n self.hands[adr as usize].last()\n\n }\n\n /// 持駒の枚数\n\n pub fn count_hand(&self, adr: HandPiece) -> usize {\n\n self.hands[adr as usize].len()\n\n }\n\n\n\n /// 局面ハッシュを作り直す\n\n pub fn create_hash(&self, game: &Game) -> u64 {\n\n let mut hash: u64 = 0;\n\n\n\n // 盤上の駒\n\n for rank in RANK_1..RANK_10 {\n\n for file in (FILE_1..FILE_10).rev() {\n\n let sq = Square::from(file, rank);\n\n if let Some(pc_ex) = self.piece_at_board(sq) {\n\n hash ^= game.hash_seed.piece_hash[sq.number() as usize][pc_ex.piece as usize];\n\n }\n", "file_path": "src/position/position.rs", "rank": 45, "score": 43758.51589465209 }, { "content": " pub fn copy_from(&mut self, position: &Position) {\n\n self.board = position.board.clone();\n\n self.pc_num_to_location = position.pc_num_to_location;\n\n self.hand_index = position.hand_index.clone();\n\n self.hands = position.hands.clone();\n\n // TODO self.controls = position.controls.clone();\n\n }\n\n\n\n /* TODO\n\n pub fn add_control(&mut self, phase: Phase, adr: Square, offset: isize) {\n\n self.controls[phase as usize].add(adr.address(), offset);\n\n }\n\n\n\n pub fn get_control(&self, phase: Phase, adr: Square) -> isize {\n\n self.controls[phase as usize].get(adr.address())\n\n }\n\n */\n\n\n\n /* TODO\n\n /// TODO 初期局面の利きを数えようぜ☆(^~^)?\n", "file_path": "src/position/position.rs", "rank": 46, "score": 43757.32452059906 }, { "content": "\n\n /// 盤上を検索するのではなく、40個の駒を検索するぜ☆(^~^)\n\n pub fn for_all_pieces_on_board<F>(&self, piece_get: &mut F)\n\n where\n\n F: FnMut(usize, Option<Square>, Option<PieceEx>),\n\n {\n\n for (i, sq) in self.pc_num_to_location.iter().enumerate() {\n\n if sq.is_board() {\n\n // 盤上の駒☆(^~^)\n\n if let Some(pc_ex) = self.piece_at_board(*sq) {\n\n piece_get(i, Some(*sq), Some(pc_ex));\n\n } else {\n\n panic!(\"sq={:?}\", sq)\n\n }\n\n } else if sq.is_hand() {\n\n // TODO 持ち駒☆(^~^)\n\n piece_get(i, None, None);\n\n } else {\n\n std::panic::panic_any(Beam::trouble(\n\n \"(Err.624) なんで駒が作業中なんだぜ☆(^~^)!\",\n", "file_path": "src/position/position.rs", "rank": 47, "score": 43756.57688428352 }, { "content": " }\n\n\n\n fn len(&self) -> usize {\n\n self.count\n\n }\n\n}\n\nimpl fmt::Display for HandTypeStack {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let mut buffer = String::new();\n\n for i in 0..=self.count {\n\n buffer.push_str(&format!(\n\n \"({}, {:?}) \",\n\n self.items[i].piece, self.items[i].num\n\n ));\n\n }\n\n write!(f, \"{}\", buffer.trim_end())\n\n }\n\n}\n", "file_path": "src/position/position.rs", "rank": 48, "score": 43755.60965038159 }, { "content": " pub fn push_to_board(&mut self, sq: Square, pc_ex: Option<PieceEx>) {\n\n if let Some(piece_val) = pc_ex {\n\n self.board[sq.number() as usize] = pc_ex;\n\n self.pc_num_to_location[piece_val.num as usize] = sq;\n\n } else {\n\n self.board[sq.number() as usize] = None;\n\n }\n\n }\n\n /// 盤上から駒を無くし、その駒を返り値で返すぜ☆(^~^)\n\n pub fn pop_from_board(&mut self, sq: Square) -> Option<PieceEx> {\n\n // 取り出すピースは複製するぜ☆(^~^)\n\n let pc_ex = self.board[sq.number() as usize].clone();\n\n if let Some(piece_val) = pc_ex {\n\n self.board[sq.number() as usize] = None;\n\n self.pc_num_to_location[piece_val.num as usize] = SQUARE_NONE;\n\n }\n\n pc_ex\n\n }\n\n /// 盤に駒か空升を置いていきます。\n\n pub fn push_piece_on_init(&mut self, file: u8, rank: u8, pc_ex: Option<Piece>) {\n", "file_path": "src/position/position.rs", "rank": 49, "score": 43754.658656426516 }, { "content": " ))\n\n }\n\n }\n\n }\n\n\n\n /// 盤上を検索するのではなく、40個の駒を検索するぜ☆(^~^)\n\n pub fn for_some_pieces_on_list40<F>(&self, us: Phase, piece_get: &mut F)\n\n where\n\n F: FnMut(Square, PieceEx),\n\n {\n\n // 駒の背番号\n\n for pc_num in Nine299792458::piece_numbers().iter() {\n\n let sq = self.pc_num_to_location[*pc_num as usize];\n\n if sq.is_board() {\n\n // 盤上の駒☆(^~^)\n\n if let Some(pc_ex) = self.piece_at_board(sq) {\n\n if pc_ex.piece.phase() == us {\n\n piece_get(sq, pc_ex);\n\n }\n\n } else {\n", "file_path": "src/position/position.rs", "rank": 50, "score": 43754.398873851016 }, { "content": " }\n\n }\n\n\n\n // 持ち駒ハッシュ\n\n HandPieces::for_all(&mut |hand_pc| {\n\n let count = self.count_hand(hand_pc);\n\n debug_assert!(\n\n count <= HAND_MAX,\n\n \"持ち駒 {:?} の枚数 {} <= {}\",\n\n hand_pc,\n\n count,\n\n HAND_MAX\n\n );\n\n hash ^= game.hash_seed.hand_hash[hand_pc as usize][count as usize];\n\n });\n\n\n\n // 手番ハッシュ はここでは算出しないぜ☆(^~^)\n\n\n\n hash\n\n }\n", "file_path": "src/position/position.rs", "rank": 51, "score": 43754.0164075516 }, { "content": " for rank in RANK_1..RANK_10 {\n\n let sq = Square::from(file, rank);\n\n if let Some(pc_ex) = self.piece_at_board(sq) {\n\n if pc_ex.piece.phase() == phase && pc_ex.piece.type_() == PieceType::P {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n }\n\n /// 升で指定して駒を取得\n\n pub fn piece_at_board(&self, sq: Square) -> Option<PieceEx> {\n\n self.board[sq.number() as usize]\n\n }\n\n /// 駒の背番号で指定して場所を取得\n\n pub fn location_at(&self, adr: PieceNum) -> Square {\n\n self.pc_num_to_location[adr as usize]\n\n }\n\n\n\n /// 升で指定して駒を置く\n", "file_path": "src/position/position.rs", "rank": 52, "score": 43753.00208871335 }, { "content": " }\n\n }\n\n }\n\n value\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct HandTypeStack {\n\n items: [PieceEx; HAND_MAX],\n\n count: usize,\n\n}\n\nimpl Default for HandTypeStack {\n\n fn default() -> Self {\n\n HandTypeStack {\n\n // ゴミ値で埋めるぜ☆(^~^)\n\n items: [PieceEx::new(Piece::K1, PieceNum::King1); HAND_MAX],\n\n count: 0,\n\n }\n\n }\n", "file_path": "src/position/position.rs", "rank": 53, "score": 43751.44254758335 }, { "content": "}\n\nimpl HandTypeStack {\n\n fn push(&mut self, pc_ex: &PieceEx) {\n\n self.items[self.count] = *pc_ex;\n\n self.count += 1;\n\n }\n\n\n\n fn pop(&mut self) -> PieceEx {\n\n self.count -= 1;\n\n let pc_ex = self.items[self.count];\n\n // ゴミ値は消さないぜ☆(^~^)\n\n pc_ex\n\n }\n\n\n\n fn last(&self) -> Option<&PieceEx> {\n\n if 0 < self.count {\n\n Some(&self.items[self.count - 1])\n\n } else {\n\n None\n\n }\n", "file_path": "src/position/position.rs", "rank": 54, "score": 43751.39484614744 }, { "content": "use std::fmt;\n\n\n\n/// 背番号付きの駒の数。\n\npub const PIECE_NUM_LEN: usize = 40;\n\n\n\n/// 駒に背番号を付けたものだぜ☆(^~^)\n\n#[derive(Clone, Copy, FromPrimitive, Debug, PartialEq)]\n\npub enum PieceNum {\n\n // 1 先手玉\n\n King1,\n\n // 2 後手玉\n\n King2,\n\n // 3 金\n\n Gold3,\n\n // 4 金\n\n Gold4,\n\n // 5 金\n\n Gold5,\n\n // 6 金\n\n Gold6,\n", "file_path": "src/position/position.rs", "rank": 55, "score": 43750.069635139 }, { "content": "//!\n\n//! 駒 と 盤\n\n//!\n\nuse crate::entities::cosmic::playing::Game;\n\nuse crate::entities::cosmic::recording::Phase;\n\nuse crate::entities::cosmic::smart::features::HAND_ADDRESS_LEN;\n\nuse crate::entities::cosmic::smart::features::HAND_ADDRESS_TYPE_LEN;\n\nuse crate::entities::cosmic::smart::features::{HandPiece, PieceType, HAND_MAX};\n\nuse crate::entities::law::speed_of_light::{HandPieces, Nine299792458};\n\nuse crate::entities::spaceship::equipment::Beam;\n\nuse crate::movegen::PieceEx;\n\nuse crate::position::hand_type_to_square;\n\nuse crate::position::square_to_hand_piece;\n\nuse crate::position::Square;\n\nuse crate::position::SQUARE_NONE;\n\nuse crate::position::{BOARD_MEMORY_AREA, FILE_0, FILE_1, FILE_10, RANK_0, RANK_1, RANK_10};\n\nuse crate::search::CentiPawn;\n\nuse crate::take1base::Piece;\n\nuse num_derive::FromPrimitive;\n\nuse num_traits::FromPrimitive;\n", "file_path": "src/position/position.rs", "rank": 56, "score": 43749.87569384455 }, { "content": " // 37 歩\n\n Pawn37,\n\n // 38 歩\n\n Pawn38,\n\n // 39 歩\n\n Pawn39,\n\n // 40 歩\n\n Pawn40,\n\n}\n\n\n\n/// 現局面、または初期局面☆(^~^)\n\n/// でかいのでコピーもクローンも不可☆(^~^)!\n\n/// 10の位を筋、1の位を段とする。\n\n/// 0筋、0段は未使用\n\npub struct Position {\n\n // いわゆる盤☆(^~^)\n\n board: [Option<PieceEx>; BOARD_MEMORY_AREA as usize],\n\n /// 背番号 to 駒の居場所☆(^~^)\n\n pc_num_to_location: [Square; PIECE_NUM_LEN],\n\n hand_index: [usize; HAND_ADDRESS_TYPE_LEN],\n", "file_path": "src/position/position.rs", "rank": 57, "score": 43749.552094870494 }, { "content": " if let Some(pc_num) = PieceNum::from_usize(pc_num) {\n\n self.hands[ha as usize].push(&PieceEx::new(piece, pc_num));\n\n } else {\n\n panic!(\"pc_num={}\", pc_num)\n\n }\n\n self.hand_index[hand_piece as usize] += 1;\n\n\n\n // Beam::shoot(&format!(\"# hand[{}]{} pc_num={}\", i, piece, pc_num));\n\n }\n\n }\n\n pub fn push_hand(&mut self, hand: &PieceEx) {\n\n let adr = hand.piece.hand_piece();\n\n self.hands[adr as usize].push(hand);\n\n self.pc_num_to_location[hand.num as usize] = hand_type_to_square(adr);\n\n }\n\n pub fn pop_hand(&mut self, ha: HandPiece) -> PieceEx {\n\n let pc_ex = self.hands[ha as usize].pop();\n\n self.pc_num_to_location[pc_ex.num as usize] = SQUARE_NONE;\n\n pc_ex\n\n }\n", "file_path": "src/position/position.rs", "rank": 58, "score": 43749.271071036266 }, { "content": " HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n ],\n\n // TODO controls: [ControlBoard::default(); PHASE_LEN],\n\n }\n\n }\n\n}\n\nimpl Position {\n\n pub fn clear(&mut self) {\n\n self.board = [\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n", "file_path": "src/position/position.rs", "rank": 59, "score": 43748.437796232385 }, { "content": "\n\n /// 手番側の駒割評価値\n\n pub fn material_advantage(&self, us: Phase) -> CentiPawn {\n\n let mut value = 0;\n\n for pc_num in 0..PIECE_NUM_LEN {\n\n let sq = self.pc_num_to_location[pc_num];\n\n if sq.is_board() {\n\n if let Some(pc_ex) = self.piece_at_board(sq) {\n\n if us == pc_ex.piece.phase() {\n\n value += pc_ex.piece.hand_type().captured_value();\n\n } else {\n\n value -= pc_ex.piece.hand_type().captured_value();\n\n }\n\n }\n\n } else if sq.is_hand() {\n\n let hand_piece = square_to_hand_piece(sq);\n\n if us == hand_piece.phase() {\n\n value += hand_piece.type_().captured_value();\n\n } else {\n\n value -= hand_piece.type_().captured_value();\n", "file_path": "src/position/position.rs", "rank": 60, "score": 43747.917452113914 }, { "content": " let hand_piece = piece.hand_piece().type_();\n\n self.pc_num_to_location[self.hand_index[hand_piece as usize]] = from;\n\n if let Some(pn) = PieceNum::from_usize(self.hand_index[hand_piece as usize]) {\n\n self.hand_index[hand_piece as usize] += 1;\n\n pn\n\n } else {\n\n panic!(\"hand_index={}\", self.hand_index[hand_piece as usize])\n\n }\n\n }\n\n };\n\n self.push_to_board(Square::from(file, rank), Some(PieceEx::new(piece, pc_num)));\n\n }\n\n }\n\n /// 駒台に置く\n\n pub fn push_hand_on_init(&mut self, piece: Piece, number: u8) {\n\n for _i in 0..number {\n\n let ha = piece.hand_piece();\n\n let hand_piece = ha.type_();\n\n let pc_num = self.hand_index[hand_piece as usize];\n\n self.pc_num_to_location[pc_num] = hand_type_to_square(ha);\n", "file_path": "src/position/position.rs", "rank": 61, "score": 43747.70197580832 }, { "content": " /// 持ち駒☆(^~^)TODO 固定長サイズのスタックを用意したいぜ☆(^~^)\n\n pub hands: [HandTypeStack; HAND_ADDRESS_LEN],\n\n /* TODO\n\n /// 利きの数☆(^~^)\n\n controls: [ControlBoard; PHASE_LEN],\n\n */\n\n}\n\nimpl Default for Position {\n\n fn default() -> Self {\n\n Position {\n\n // 盤上\n\n board: [\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None,\n", "file_path": "src/position/position.rs", "rank": 62, "score": 43746.89002633566 }, { "content": " pub fn init_controls(&mut self) {\n\n Area::for_all(&mut |source| {\n\n // そこに置いてある駒を調べようぜ☆(^~^)?\n\n if let Some(pc_ex) = self.piece_at_board(&source) {\n\n // 駒の利きを調べようぜ☆(^~^)?\n\n for mobility in pc_ex.piece.type_().mobility() {\n\n match mobility.move_range {\n\n MoveRange::Adjacent => {\n\n let mut cur = source.clone();\n\n let mut rel = RelAdr::new(1, 0);\n\n rel.rotate(mobility.angle);\n\n if pc_ex.piece.phase() == Phase::Second {\n\n rel.rotate_180();\n\n }\n\n if !cur.offset(&rel).wall() {\n\n self.add_control(pc_ex.piece.phase(), &cur, 1);\n\n }\n\n }\n\n MoveRange::Sliding => {\n\n let mut cur = source.clone();\n", "file_path": "src/position/position.rs", "rank": 63, "score": 43746.29520346709 }, { "content": " MoveRange::Knight => {\n\n let mut cur = source.clone();\n\n let mut rel = RelAdr::new(1, 0);\n\n rel.rotate(mobility.angle).double_rank();\n\n if pc_ex.piece.phase() == Phase::Second {\n\n rel.rotate_180();\n\n }\n\n if !cur.offset(&rel).wall() {\n\n self.add_control(pc_ex.piece.phase(), &cur, 1);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n });\n\n }\n\n */\n\n\n\n /// 歩が置いてあるか確認\n\n pub fn exists_pawn_on_file(&self, phase: Phase, file: u8) -> bool {\n", "file_path": "src/position/position.rs", "rank": 64, "score": 43745.780256987964 }, { "content": " ],\n\n pc_num_to_location: [SQUARE_NONE; PIECE_NUM_LEN],\n\n hand_index: [\n\n PieceNum::King1 as usize,\n\n PieceNum::Rook21 as usize,\n\n PieceNum::Bishop19 as usize,\n\n PieceNum::Gold3 as usize,\n\n PieceNum::Silver7 as usize,\n\n PieceNum::Knight11 as usize,\n\n PieceNum::Lance15 as usize,\n\n PieceNum::Pawn23 as usize,\n\n ],\n\n // 持ち駒\n\n hands: [\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n", "file_path": "src/position/position.rs", "rank": 65, "score": 43745.0502719185 }, { "content": " None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None, None, None, None, None, None, None, None, None, None, None, None, None,\n\n None, None,\n\n ];\n\n self.pc_num_to_location = [SQUARE_NONE; PIECE_NUM_LEN];\n\n self.hand_index = [\n\n PieceNum::King1 as usize,\n\n PieceNum::Rook21 as usize,\n\n PieceNum::Bishop19 as usize,\n\n PieceNum::Gold3 as usize,\n\n PieceNum::Silver7 as usize,\n\n PieceNum::Knight11 as usize,\n\n PieceNum::Lance15 as usize,\n\n PieceNum::Pawn23 as usize,\n\n ];\n\n // 持ち駒☆(^~^)\n\n self.hands = [\n", "file_path": "src/position/position.rs", "rank": 66, "score": 43744.52146062984 }, { "content": " panic!(\"sq={:?}\", sq)\n\n }\n\n } else if sq.is_hand() {\n\n // 持ち駒はここで調べるのは無駄な気がするよな☆(^~^)持ち駒に歩が18個とか☆(^~^)\n\n } else {\n\n std::panic::panic_any(Beam::trouble(&format!(\n\n \"(Err.650) 駒{:?} が盤にも駒台にも無いぜ☆(^~^)!\",\n\n pc_num\n\n )))\n\n }\n\n }\n\n\n\n const FIRST_SECOND: [[HandPiece; HAND_ADDRESS_TYPE_LEN - 1]; 2] = [\n\n [\n\n // King なし\n\n HandPiece::Rook1,\n\n HandPiece::Bishop1,\n\n HandPiece::Gold1,\n\n HandPiece::Silver1,\n\n HandPiece::Knight1,\n", "file_path": "src/position/position.rs", "rank": 67, "score": 43744.507502588225 }, { "content": " if !(FILE_0 < file && file < FILE_10 && RANK_0 < rank && rank < RANK_10) {\n\n std::panic::panic_any(Beam::trouble(&format!(\n\n \"(Err.323) 盤上の初期化で盤の外を指定するのは止めろだぜ☆(^~^)! ({}, {})\",\n\n file, rank\n\n )))\n\n }\n\n\n\n if let Some(piece) = pc_ex {\n\n let from = Square::from(file, rank);\n\n let pc_num = match piece {\n\n // 玉だけ、先後を確定させようぜ☆(^~^)\n\n Piece::K1 => {\n\n self.pc_num_to_location[PieceNum::King1 as usize] = from;\n\n PieceNum::King1\n\n }\n\n Piece::K2 => {\n\n self.pc_num_to_location[PieceNum::King2 as usize] = from;\n\n PieceNum::King2\n\n }\n\n _ => {\n", "file_path": "src/position/position.rs", "rank": 68, "score": 43743.6124848373 }, { "content": " let mut rel = RelAdr::new(1, 0);\n\n rel.rotate(mobility.angle);\n\n if pc_ex.piece.phase() == Phase::Second {\n\n rel.rotate_180();\n\n }\n\n for _i in 0..8 {\n\n if !cur.offset(&rel).wall() {\n\n // とりあえず盤の上なら隣に利きは通るぜ☆(^~^)\n\n self.add_control(pc_ex.piece.phase(), &cur, 1);\n\n\n\n // 利きを調べたいだけなんで、味方/敵問わず駒が有れば終了だぜ☆(^~^)\n\n if let Some(_collision_piece) = self.piece_at_board(&cur) {\n\n break;\n\n }\n\n } else {\n\n // 壁に利きは通らないぜ☆(^~^)\n\n break;\n\n }\n\n }\n\n }\n", "file_path": "src/position/position.rs", "rank": 69, "score": 43742.990348991625 }, { "content": " HandPiece::Lance1,\n\n HandPiece::Pawn1,\n\n ],\n\n [\n\n // King なし\n\n HandPiece::Rook2,\n\n HandPiece::Bishop2,\n\n HandPiece::Gold2,\n\n HandPiece::Silver2,\n\n HandPiece::Knight2,\n\n HandPiece::Lance2,\n\n HandPiece::Pawn2,\n\n ],\n\n ];\n\n for ha in &FIRST_SECOND[us as usize] {\n\n if let Some(pc_ex) = self.last_hand(*ha) {\n\n piece_get(hand_type_to_square(*ha), *pc_ex);\n\n }\n\n }\n\n }\n", "file_path": "src/position/position.rs", "rank": 70, "score": 43742.74944470396 }, { "content": " // 17 香\n\n Lance17,\n\n // 18 香\n\n Lance18,\n\n // 19 角\n\n Bishop19,\n\n // 20 角\n\n Bishop20,\n\n // 21 飛\n\n Rook21,\n\n // 22 飛\n\n Rook22,\n\n // 23 歩\n\n Pawn23,\n\n // 24 歩\n\n Pawn24,\n\n // 25 歩\n\n Pawn25,\n\n // 26 歩\n\n Pawn26,\n", "file_path": "src/position/position.rs", "rank": 71, "score": 43740.5312942746 }, { "content": " // 7 銀\n\n Silver7,\n\n // 8 銀\n\n Silver8,\n\n // 9 銀\n\n Silver9,\n\n // 10 銀\n\n Silver10,\n\n // 11 桂\n\n Knight11,\n\n // 12 桂\n\n Knight12,\n\n // 13 桂\n\n Knight13,\n\n // 14 桂\n\n Knight14,\n\n // 15 香\n\n Lance15,\n\n // 16 香\n\n Lance16,\n", "file_path": "src/position/position.rs", "rank": 72, "score": 43740.5312942746 }, { "content": " HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n HandTypeStack::default(),\n\n ];\n\n }\n\n\n\n /// 開始盤面を、現盤面にコピーしたいときに使うぜ☆(^~^)\n", "file_path": "src/position/position.rs", "rank": 73, "score": 43740.5312942746 }, { "content": " // 27 歩\n\n Pawn27,\n\n // 28 歩\n\n Pawn28,\n\n // 29 歩\n\n Pawn29,\n\n // 30 歩\n\n Pawn30,\n\n // 31 歩\n\n Pawn31,\n\n // 32 歩\n\n Pawn32,\n\n // 33 歩\n\n Pawn33,\n\n // 34 歩\n\n Pawn34,\n\n // 35 歩\n\n Pawn35,\n\n // 36 歩\n\n Pawn36,\n", "file_path": "src/position/position.rs", "rank": 74, "score": 43740.5312942746 }, { "content": "/// 盤上の駒を指すぜ☆(^~^)\n\n///\n\n/// # Arguments\n\n///\n\n/// * `us` - 先手か後手か、関係ないか☆(^~^)先後同型なら None ☆(^~^)\n\n/// * `start` - 移動元升☆(^~^)\n\n/// * `square` - 升☆(^~^)\n\n/// * `mobility` - 動き方☆(^~^)\n\n/// * `fn_pass_destination` - 絶対番地を受け取れだぜ☆(^~^)\n\nfn push_piece_moves<F1>(\n\n us: Option<Phase>,\n\n start: Square,\n\n mobility: Mobility,\n\n fn_pass_destination: &mut F1,\n\n) where\n\n F1: FnMut(Square, MoveRange) -> bool,\n\n{\n\n // 後手なら 180°ひっくり返す。 us が指定されていないとき、先後同型と見做して回転させません\n\n let angle = if let Some(us) = us {\n\n if us == Phase::First {\n\n mobility.angle\n\n } else {\n\n // 先後同型でない駒は、後手なら180°回転だぜ☆(^~^)\n\n mobility.angle.rotate180()\n\n }\n\n } else {\n\n // 先後同型だからそのままだぜ☆(^~^)\n\n mobility.angle\n\n };\n", "file_path": "src/movegen/mod.rs", "rank": 75, "score": 41300.605280564494 }, { "content": "use crate::position::RelAdr;\n\nuse crate::position::Square;\n\nuse crate::position::{FILE_0, FILE_10, RANK_0, RANK_10};\n\nuse std::fmt;\n\n\n\nimpl Square {\n\n pub fn new(number: u8) -> Self {\n\n Square(number)\n\n }\n\n pub fn from(file: u8, rank: u8) -> Self {\n\n Square(file * 10 + rank)\n\n }\n\n /// 指定の方角へ進むぜ(^~^)\n\n pub fn go_forward(&self, r: &RelAdr) -> Self {\n\n // TODO rankの符号はどうだったか……☆(^~^) 絶対番地の使い方をしてれば問題ないだろ☆(^~^)\n\n // TODO sum は負数になることもあり、そのときは明らかにイリーガルだぜ☆(^~^)\n\n let sum = (self.0 as i8 + r.number()) as u8;\n\n // Initialize.\n\n let mut rank = sum % 10;\n\n let mut file = 0;\n", "file_path": "src/position/square.rs", "rank": 83, "score": 35176.29435240716 }, { "content": " pub fn to_drop_code(&self) -> &str {\n\n match self.0 {\n\n 101 | 109 => \"R*\",\n\n 102 | 110 => \"B*\",\n\n 103 | 111 => \"G*\",\n\n 104 | 112 => \"S*\",\n\n 105 | 113 => \"N*\",\n\n 106 | 114 => \"L*\",\n\n 107 | 115 => \"P*\",\n\n _ => panic!(\"(Err.46) drop fail\"),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Square {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.number())\n\n }\n\n}\n\nimpl fmt::Debug for Square {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"({}x {}y {}sq)\", self.file(), self.rank(), self.number())\n\n }\n\n}\n", "file_path": "src/position/square.rs", "rank": 84, "score": 35170.34323614807 }, { "content": "pub mod position;\n\npub mod rel_square;\n\npub mod rotation;\n\npub mod square;\n\n\n\nuse crate::entities::cosmic::smart::features::HandPiece;\n\nuse crate::entities::cosmic::smart::features::HandType;\n\nuse crate::entities::law::cryptographic::num_to_lower_case;\n\nuse crate::record::RESIGN_MOVE;\n\nuse crate::take1base::Move;\n\n\n\n//\n\n// 盤、升、筋、段\n\n//\n\n\n\n// #[allow(non_camel_case_types)]\n\n// pub type isquare = isize;\n\n\n\n// 配列サイズなので 1 大きめだぜ☆(^~^)\n\npub const BOARD_MEMORY_AREA: u8 = 100;\n", "file_path": "src/position/mod.rs", "rank": 85, "score": 35167.1885938001 }, { "content": "//! A 40 41 42 43 44 45 46 47 48 49 E\n\n//! S 50 51 51 53 54 55 56 57 58 59 S\n\n//! T 60 61 62 63 64 65 66 67 68 69 T\n\n//! 70 71 72 73 74 75 76 77 78 79\n\n//! 80 81 82 83 84 85 86 87 88 89\n\n//! 90 91 92 93 94 95 96 97 98 99\n\n//! Source\n\n//!\n\n//! None is 0.\n\nuse crate::entities::law::speed_of_light::Nine299792458;\n\nuse crate::position::RelAdr;\n\nuse crate::position::Square;\n\nuse std::cmp::max;\n\n\n\n/// 打はテストできない\n", "file_path": "src/position/rotation.rs", "rank": 86, "score": 35165.206124015596 }, { "content": " let mut r = RelAdr::new(0, -1);\n\n test_rsq(\"g1\", \"(0x -1y rel-1sq)\", &r);\n\n r.rotate_ccw(Angle::Ccw45);\n\n test_rsq(\"g2\", \"(1x -1y rel9sq)\", &r);\n\n r.double_rank();\n\n test_rsq(\"g3\", \"(1x -2y rel8sq)\", &r);\n\n\n\n let mut r = RelAdr::new(0, -1);\n\n test_rsq(\"g4\", \"(0x -1y rel-1sq)\", &r);\n\n r.rotate_ccw(Angle::Ccw315);\n\n test_rsq(\"g5\", \"(-1x -1y rel-11sq)\", &r);\n\n r.double_rank();\n\n test_rsq(\"g6\", \"(-1x -2y rel-12sq)\", &r);\n\n\n\n let mut r = RelAdr::new(0, 1);\n\n test_rsq(\"g7\", \"(0x 1y rel1sq)\", &r);\n\n r.rotate_ccw(Angle::Ccw45);\n\n test_rsq(\"g8\", \"(-1x 1y rel-9sq)\", &r);\n\n r.double_rank();\n\n test_rsq(\"g9\", \"(-1x 2y rel-8sq)\", &r);\n", "file_path": "src/position/rotation.rs", "rank": 87, "score": 35164.35314017353 }, { "content": "\n\n/// 筋、段は 1 から始まる、という明示。\n\n/// usize が速い☆(^~^)\n\npub const FILE_0: u8 = 0;\n\npub const FILE_1: u8 = 1;\n\npub const FILE_9: u8 = 9;\n\npub const FILE_10: u8 = 10;\n\n// pub const FILE_11: u8 = 11;\n\npub const RANK_0: u8 = 0;\n\npub const RANK_1: u8 = 1;\n\npub const RANK_2: u8 = 2;\n\npub const RANK_3: u8 = 3;\n\npub const RANK_4: u8 = 4;\n\n// pub const RANK_5: u8 = 5;\n\npub const RANK_6: u8 = 6;\n\npub const RANK_7: u8 = 7;\n\npub const RANK_8: u8 = 8; //うさぎの打てる段の上限\n\npub const RANK_9: u8 = 9;\n\npub const RANK_10: u8 = 10;\n\n// pub const RANK_11: u8 = 11;\n", "file_path": "src/position/mod.rs", "rank": 88, "score": 35164.08617746417 }, { "content": " } else {\n\n Degree45Orthant::CoIIIOrCoIV\n\n }\n\n }\n\n}\n\n\n\npub const ANGLE_LEN: usize = 8;\n\n/// Counterclockwise(反時計回り)での回転方向。 45°ずつ☆(^~^)\n\n#[derive(Clone, Copy, Debug)]\n\npub enum Angle {\n\n /// 西。\n\n Ccw0,\n\n /// 南西。\n\n Ccw45,\n\n /// 南。\n\n Ccw90,\n\n /// 南東。\n\n Ccw135,\n\n /// 東。\n\n Ccw180,\n\n /// 北東。\n\n Ccw225,\n\n /// 北。\n\n Ccw270,\n\n /// 北西。\n\n Ccw315,\n\n}\n", "file_path": "src/position/rotation.rs", "rank": 89, "score": 35163.55391908291 }, { "content": " r.rotate_90_ccw();\n\n test_rsq(\"c4\", \"(-1x 0y rel-10sq)\", &r);\n\n r.rotate_90_ccw();\n\n test_rsq(\"c5\", \"(0x -1y rel-1sq)\", &r);\n\n }\n\n // 90°回転のテスト<その2>\n\n {\n\n let mut r = RelAdr::new(1, -1);\n\n test_rsq(\"d1\", \"(1x -1y rel9sq)\", &r);\n\n r.rotate_90_ccw();\n\n test_rsq(\"d2\", \"(1x 1y rel11sq)\", &r);\n\n r.rotate_90_ccw();\n\n test_rsq(\"d3\", \"(-1x 1y rel-9sq)\", &r);\n\n r.rotate_90_ccw();\n\n test_rsq(\"d4\", \"(-1x -1y rel-11sq)\", &r);\n\n r.rotate_90_ccw();\n\n test_rsq(\"d5\", \"(1x -1y rel9sq)\", &r);\n\n }\n\n // 桂馬のテスト\n\n {\n", "file_path": "src/position/rotation.rs", "rank": 90, "score": 35163.23439666612 }, { "content": " /// 第4象限。x=0, y=0 ともに含みません。\n\n IV,\n\n /// 第1象限と第三象限。区別しません。x=0, y=0 ともに含みます。\n\n IOrIII,\n\n}\n\nimpl DictOrthant {\n\n pub fn from_file_and_rank(file: isize, rank: isize) -> Self {\n\n if 0 <= file * rank {\n\n DictOrthant::IOrIII\n\n } else if file < 0 {\n\n DictOrthant::II\n\n } else {\n\n DictOrthant::IV\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Degree45Orthant {\n\n /// 正第4象限と、正第1象限☆(^~^)\n", "file_path": "src/position/rotation.rs", "rank": 91, "score": 35163.21470745326 }, { "content": "\n\n let mut r = RelAdr::new(0, 1);\n\n test_rsq(\"g10\", \"(0x 1y rel1sq)\", &r);\n\n r.rotate_ccw(Angle::Ccw315);\n\n test_rsq(\"g11\", \"(1x 1y rel11sq)\", &r);\n\n r.double_rank();\n\n test_rsq(\"g12\", \"(1x 2y rel12sq)\", &r);\n\n }\n\n // 角度指定回転のテスト(北から)\n\n {\n\n // 0\n\n let mut r = RelAdr::new(0, -1);\n\n test_rsq(\"h1\", \"(0x -1y rel-1sq)\", &r);\n\n r.rotate_ccw(Angle::Ccw0);\n\n test_rsq(\"h2\", \"(0x -1y rel-1sq)\", &r);\n\n\n\n // 45\n\n r = RelAdr::new(0, -1);\n\n r.rotate_ccw(Angle::Ccw45);\n\n test_rsq(\"h3\", \"(1x -1y rel9sq)\", &r);\n", "file_path": "src/position/rotation.rs", "rank": 92, "score": 35163.12488775849 }, { "content": " test_rsq(\"a4\", \"(1x 1y rel11sq)\", &r);\n\n r.rotate_45_ccw();\n\n test_rsq(\"a5\", \"(0x 1y rel1sq)\", &r);\n\n r.rotate_45_ccw();\n\n test_rsq(\"a6\", \"(-1x 1y rel-9sq)\", &r);\n\n r.rotate_45_ccw();\n\n test_rsq(\"a7\", \"(-1x 0y rel-10sq)\", &r);\n\n r.rotate_45_ccw();\n\n test_rsq(\"a8\", \"(-1x -1y rel-11sq)\", &r);\n\n r.rotate_45_ccw();\n\n test_rsq(\"a9\", \"(0x -1y rel-1sq)\", &r);\n\n }\n\n // 90°回転のテスト<その1>\n\n {\n\n let mut r = RelAdr::new(0, -1);\n\n test_rsq(\"c1\", \"(0x -1y rel-1sq)\", &r);\n\n r.rotate_90_ccw();\n\n test_rsq(\"c2\", \"(1x 0y rel10sq)\", &r);\n\n r.rotate_90_ccw();\n\n test_rsq(\"c3\", \"(0x 1y rel1sq)\", &r);\n", "file_path": "src/position/rotation.rs", "rank": 93, "score": 35163.07355863598 }, { "content": "\n\n // 270\n\n r = RelAdr::new(0, -1);\n\n r.rotate_ccw(Angle::Ccw270);\n\n test_rsq(\"h8\", \"(-1x 0y rel-10sq)\", &r);\n\n\n\n // 315\n\n r = RelAdr::new(0, -1);\n\n r.rotate_ccw(Angle::Ccw315);\n\n test_rsq(\"h9\", \"(-1x -1y rel-11sq)\", &r);\n\n }\n\n // 角度指定回転のテスト(南から)\n\n {\n\n // 0\n\n let mut r = RelAdr::new(0, 1);\n\n test_rsq(\"h1\", \"(0x 1y rel1sq)\", &r);\n\n r.rotate_ccw(Angle::Ccw0);\n\n test_rsq(\"h2\", \"(0x 1y rel1sq)\", &r);\n\n\n\n // 45\n", "file_path": "src/position/rotation.rs", "rank": 94, "score": 35162.712994753536 }, { "content": " // 相対番地のテスト\n\n {\n\n test_rsq(\"b1\", \"(0x -1y rel-1sq)\", &RelAdr::new(0, -1));\n\n test_rsq(\"b2\", \"(1x -1y rel9sq)\", &RelAdr::new(1, -1));\n\n test_rsq(\"b3\", \"(1x 0y rel10sq)\", &Nine299792458::west());\n\n test_rsq(\"b4\", \"(1x 1y rel11sq)\", &RelAdr::new(1, 1));\n\n test_rsq(\"b5\", \"(0x 1y rel1sq)\", &RelAdr::new(0, 1));\n\n test_rsq(\"b6\", \"(-1x 1y rel-9sq)\", &RelAdr::new(-1, 1));\n\n test_rsq(\"b7\", \"(-1x 0y rel-10sq)\", &RelAdr::new(-1, 0));\n\n test_rsq(\"b8\", \"(-1x -1y rel-11sq)\", &RelAdr::new(-1, -1));\n\n }\n\n // 45°回転のテスト\n\n {\n\n let mut r = RelAdr::new(0, -1);\n\n test_rsq(\"a1\", \"(0x -1y rel-1sq)\", &r);\n\n r.rotate_45_ccw();\n\n test_rsq(\"a2\", \"(1x -1y rel9sq)\", &r);\n\n r.rotate_45_ccw();\n\n test_rsq(\"a3\", \"(1x 0y rel10sq)\", &r);\n\n r.rotate_45_ccw();\n", "file_path": "src/position/rotation.rs", "rank": 95, "score": 35162.37935749151 }, { "content": "\n\n/// 引き算もするところでは unsigned ではダメなところもある☆(^~^)\n\n// pub const I_FILE_0: i8 = 0;\n\n// pub const I_FILE_1: i8 = 1;\n\n// pub const I_FILE_9: i8 = 9;\n\n// pub const I_FILE_10: i8 = 10;\n\n// pub const I_RANK_0: i8 = 0;\n\n// pub const I_RANK_1: i8 = 1;\n\n// pub const I_RANK_2: i8 = 2;\n\n// pub const I_RANK_3: i8 = 3;\n\n// pub const I_RANK_4: i8 = 4;\n\n// pub const I_RANK_6: i8 = 6;\n\n// pub const I_RANK_7: i8 = 7;\n\n// pub const I_RANK_8: i8 = 8; //うさぎの打てる段の上限\n\n// pub const I_RANK_9: i8 = 9;\n\n// pub const I_RANK_10: i8 = 10;\n\n\n\n/// マス番号。\n\n/// 100以上は持駒。 K1=100, R1=101 .. P2=115\n\n/// Square is shogi coordinate. file*10+rank.\n", "file_path": "src/position/mod.rs", "rank": 96, "score": 35161.91383561994 }, { "content": " pub fn number(&self) -> u8 {\n\n self.0\n\n }\n\n /// 盤上のマスなら真。(調べ方は、ざっくり)\n\n pub fn is_board(&self) -> bool {\n\n 11 <= self.0 && self.0 < 100\n\n }\n\n /// 持駒なら真\n\n pub fn is_hand(&self) -> bool {\n\n 100 <= self.0\n\n }\n\n // /// マスでないなら真\n\n // pub fn is_none_square(&self) -> bool {\n\n // self.0 == SQUARE_NONE\n\n // }\n\n /// マス、または持駒なら真\n\n pub fn is_square(&self) -> bool {\n\n (11 <= self.0 && self.0 < 20)\n\n || (21 <= self.0 && self.0 < 30)\n\n || (31 <= self.0 && self.0 < 40)\n", "file_path": "src/position/square.rs", "rank": 97, "score": 35161.75620587344 }, { "content": " || (41 <= self.0 && self.0 < 50)\n\n || (51 <= self.0 && self.0 < 60)\n\n || (61 <= self.0 && self.0 < 70)\n\n || (71 <= self.0 && self.0 < 80)\n\n || (81 <= self.0 && self.0 < 90)\n\n || (91 <= self.0 && self.0 < 100)\n\n || (100 <= self.0 && self.0 < 116)\n\n }\n\n\n\n pub fn rank(&self) -> u8 {\n\n self.0 % 10\n\n }\n\n pub fn file(&self) -> u8 {\n\n self.0 / 10\n\n }\n\n /// 壁の中にいる☆(^~^)\n\n pub fn wall(&self) -> bool {\n\n self.file() % 10 == 0 || self.rank() % 10 == 0\n\n }\n\n\n", "file_path": "src/position/square.rs", "rank": 98, "score": 35161.59119324081 }, { "content": " IVOrI,\n\n /// コ第1象限と、コ第2象限☆(^~^)\n\n CoIOrCoII,\n\n /// 正第2象限と、正第3象限☆(^~^)\n\n IIOrIII,\n\n /// コ第3象限と、コ第4象限☆(^~^)\n\n CoIIIOrCoIV,\n\n}\n\nimpl Degree45Orthant {\n\n /// Arguments\n\n /// ---------\n\n /// * `r` - (Relative file, relative rank).\n\n pub fn new(r: &RelAdr) -> Self {\n\n let range = max(r.file().abs(), r.rank().abs());\n\n if r.file() == range {\n\n Degree45Orthant::IVOrI\n\n } else if r.file() == -range {\n\n Degree45Orthant::IIOrIII\n\n } else if r.rank() == range {\n\n Degree45Orthant::CoIOrCoII\n", "file_path": "src/position/rotation.rs", "rank": 99, "score": 35161.32037050533 } ]
Rust
rust/src/solutions/day14.rs
efrees/adventofcode2019
9267fdff07f0144d57b659744b83cc0c6b7ecb7b
use regex::Regex; use std::collections::HashMap; #[derive(Eq, PartialEq, Hash)] struct Chemical { count: u32, name: String, } pub fn solve() { println!("Day 14"); let raw_reactions = adventlib::read_input_lines("day14input.txt"); let reactions_list: Vec<_> = raw_reactions.iter().map(|m| parse_reaction(m)).collect(); let reactions: HashMap<_, _> = reactions_list.iter().map(|(g, v)| (&*g.name, v)).collect(); let mut reaction_output_count: HashMap<_, _> = reactions_list .iter() .map(|(g, _)| (&*g.name, g.count)) .collect(); let mut remnants_by_chemical: HashMap<_, u64> = reactions_list.iter().map(|(g, _)| (&*g.name, 0)).collect(); reaction_output_count.insert("FUEL", 1); let mut ore_required = compute_total_ore_requirements( "FUEL", 1, &reactions, &reaction_output_count, &mut remnants_by_chemical, ); println!("ORE required for one FUEL (part 1): {}", ore_required); let ore_supply = 1_000_000_000_000_u64; let mut potential_fuel_min = ore_supply / ore_required as u64; let mut potential_fuel_max = potential_fuel_min * 2; let mut search_interval = potential_fuel_min / 2; while search_interval > 0 { let next_to_check = potential_fuel_min + search_interval; ore_required = compute_total_ore_requirements( "FUEL", next_to_check, &reactions, &reaction_output_count, &mut remnants_by_chemical, ); if ore_required > ore_supply { potential_fuel_max = next_to_check - 1; } else { potential_fuel_min = next_to_check; } search_interval = (potential_fuel_max - potential_fuel_min) / 2; } println!("Total FUEL possible (part 2): {}", potential_fuel_min); } fn compute_total_ore_requirements( name: &str, amount: u64, reactions: &HashMap<&str, &Vec<Chemical>>, reaction_outputs: &HashMap<&str, u32>, remnant_totals: &mut HashMap<&str, u64>, ) -> u64 { if name == "ORE" { return amount; } let reaction = reactions .get(name) .expect(&format!("Missing reaction for {}", name)); let mut goal = amount; let mut remnant = remnant_totals.get(name).cloned().unwrap_or(0) as u64; goal -= std::cmp::min(remnant, amount); remnant -= std::cmp::min(remnant, amount); let mut requirement = 0; if goal > 0 { let recipe_output = reaction_outputs.get(&name).cloned().unwrap() as u64; let recipe_count = (goal + recipe_output - 1) / recipe_output; for input in reaction.iter() { requirement += compute_total_ore_requirements( &input.name, input.count as u64 * recipe_count, reactions, reaction_outputs, remnant_totals, ); remnant = recipe_output * recipe_count - goal; } } *remnant_totals.get_mut(name).unwrap() = remnant; return requirement; } fn parse_reaction(raw_reaction: &String) -> (Chemical, Vec<Chemical>) { let sides_of_equation: Vec<_> = raw_reaction.split(" => ").collect(); let result = parse_chemical(sides_of_equation[1]); let inputs: Vec<_> = sides_of_equation[0] .split(", ") .map(|raw| parse_chemical(raw)) .collect(); return (result, inputs); } fn parse_chemical(raw_chemical: &str) -> Chemical { lazy_static! { static ref PATTERN: Regex = Regex::new(r"\s*(\d+) (\w+)").expect("pattern for parsing"); } let captures = PATTERN .captures(raw_chemical) .expect("Line should match format"); return Chemical { count: captures[1] .parse() .expect("First part of chemical must be a number."), name: captures[2].to_string(), }; }
use regex::Regex; use std::collections::HashMap; #[derive(Eq, PartialEq, Hash)] struct Chemical { count: u32, name: String, } pub fn solve() { println!("Day 14"); let raw_reactions = adventlib::read_input_lines("day14input.txt"); let reactions_list: Vec<_> = raw_reactions.iter().map(|m| parse_reaction(m)).collect(); let reactions: HashMap<_, _> = reactions_list.iter().map(|(g, v)| (&*g.name, v)).collect(); let mut reaction_output_count: HashMap<_, _> = reactions_list .iter() .map(|(g, _)| (&*g.name, g.count)) .collect(); let mut remnants_by_chemical: HashMap<_, u64> = reactions_list.iter().map(|(g, _)| (&*g.name, 0)).collect(); reaction_output_count.insert("FUEL", 1); let mut ore_required = compute_total_ore_requirements( "FUEL", 1, &reactions, &reaction_output_count, &mut remnants_by_chemical, ); println!("ORE required for one FUEL (part 1): {}", ore_required); let ore_supply = 1_000_000_000_000_u64; let mut potential_fuel_min = ore_supply / ore_required as u64; let mut potential_fuel_max = potential_fuel_min * 2; let mut search_interval = potential_fuel_min / 2; while search_interval > 0 { let next_to_check = potential_fuel_min + search_interval; ore_required = compute_total_ore_requirements( "FUEL", next_to_check, &reactions, &reaction_output_count, &mut remnants_by_chemical, ); if ore_required > ore_supply { potential_fuel_max = next_to_check - 1; } else { potential_fuel_min = next_to_check; } search_interval = (potential_fuel_max - potential_fuel_min) / 2; } println!("Total FUEL possible (part 2): {}", potential_fuel_min); } fn compute_total_ore_requirements( name: &str, amount: u64, reactions: &HashMap<&str, &Vec<Chemical>>, reaction_outputs: &HashMap<&str, u32>, remnant_totals: &mut HashMap<&str, u64>, ) -> u64 { if name == "ORE" { return amount; } let reaction = reactions .get(name) .expect(&format!("Missing reaction for {}", name)); let mut goal = amount; let mut remnant = remnant_totals.get(name).cloned().unwrap_or(0) as u64; goal -= std::cmp::min(remnant, amount); remnant -= std::cmp::min(remnant, amount); let mut requirement = 0; if goal > 0 { let recipe_output = reaction_outputs.get(&name).cloned().unwrap() as u64; let recipe_count = (goal + recipe_output - 1) / recipe_output; for input in reaction.iter() { requirement +=
; remnant = recipe_output * recipe_count - goal; } } *remnant_totals.get_mut(name).unwrap() = remnant; return requirement; } fn parse_reaction(raw_reaction: &String) -> (Chemical, Vec<Chemical>) { let sides_of_equation: Vec<_> = raw_reaction.split(" => ").collect(); let result = parse_chemical(sides_of_equation[1]); let inputs: Vec<_> = sides_of_equation[0] .split(", ") .map(|raw| parse_chemical(raw)) .collect(); return (result, inputs); } fn parse_chemical(raw_chemical: &str) -> Chemical { lazy_static! { static ref PATTERN: Regex = Regex::new(r"\s*(\d+) (\w+)").expect("pattern for parsing"); } let captures = PATTERN .captures(raw_chemical) .expect("Line should match format"); return Chemical { count: captures[1] .parse() .expect("First part of chemical must be a number."), name: captures[2].to_string(), }; }
compute_total_ore_requirements( &input.name, input.count as u64 * recipe_count, reactions, reaction_outputs, remnant_totals, )
call_expression
[ { "content": "pub fn read_input_raw(filename: &str) -> String {\n\n let filename = \"inputs/\".to_owned() + filename;\n\n let mut file = File::open(filename).expect(\"Could not find input file\");\n\n let mut string = String::new();\n\n file.read_to_string(&mut string)\n\n .expect(\"Could not read file\");\n\n return string;\n\n}\n\n\n", "file_path": "rust/src/adventlib/lib.rs", "rank": 0, "score": 213752.9167270688 }, { "content": "pub fn read_input_lines(filename: &str) -> Vec<String> {\n\n let string = read_input_raw(filename);\n\n return string.lines().map(|x| x.to_string()).collect();\n\n}\n\n\n", "file_path": "rust/src/adventlib/lib.rs", "rank": 1, "score": 207221.57341778633 }, { "content": "pub fn read_input_tokenized(filename: &str) -> Vec<String> {\n\n let string = read_input_raw(filename);\n\n return string.split_whitespace().map(|x| x.to_string()).collect();\n\n}\n", "file_path": "rust/src/adventlib/lib.rs", "rank": 2, "score": 207221.57341778633 }, { "content": "fn count_orbits_to_center(start_object: &str, edges: &HashMap<&str, &str>) -> i32 {\n\n if edges.contains_key(start_object) {\n\n return 1 + count_orbits_to_center(edges.get(start_object).unwrap(), edges);\n\n }\n\n return 0;\n\n}\n\n\n", "file_path": "rust/src/solutions/day06.rs", "rank": 4, "score": 149299.04548003216 }, { "content": "fn run_amplifier_returning_output(raw_program: &String, phase: u8, input: i64) -> i64 {\n\n let mut computer = create_computer(raw_program);\n\n return run_computer_for_input(&mut computer, vec![phase as i64, input]);\n\n}\n\n\n", "file_path": "rust/src/solutions/day07.rs", "rank": 5, "score": 145812.56528193539 }, { "content": "pub fn parse_program(raw_program: &String) -> Vec<i64> {\n\n let int_parser = |x: &str| x.parse::<i64>().unwrap();\n\n return raw_program.trim().split(',').map(int_parser).collect();\n\n}\n\n\n\nimpl Computer<i64> {\n\n pub fn new() -> Computer<i64> {\n\n Computer {\n\n program_state: vec![99],\n\n instr_ptr: 0,\n\n rel_base: 0,\n\n input_stream: vec![],\n\n run_state: RunState::Initial,\n\n }\n\n }\n\n\n\n pub fn for_program(program_state: Vec<i64>) -> Computer<i64> {\n\n Computer {\n\n program_state: program_state,\n\n instr_ptr: 0,\n", "file_path": "rust/src/intcode.rs", "rank": 6, "score": 130958.89153682045 }, { "content": "fn run_computer_for_input(computer: &mut Computer<i64>, input: Vec<i64>) -> i64 {\n\n computer.set_input_stream(input);\n\n return computer.run_program().unwrap();\n\n}\n\n\n", "file_path": "rust/src/solutions/day07.rs", "rank": 7, "score": 129465.86554097873 }, { "content": "fn distance_between(object1: &str, object2: &str, direct_edges: &HashMap<&str, &str>) -> i32 {\n\n let mut object1_distances = HashMap::new();\n\n object1_distances.insert(object1, 0);\n\n\n\n let mut searching_dist = 0;\n\n let mut searching_object = object1;\n\n while direct_edges.contains_key(searching_object) {\n\n let next_object = direct_edges.get(searching_object).unwrap();\n\n object1_distances.insert(next_object, searching_dist + 1);\n\n searching_object = next_object;\n\n searching_dist += 1;\n\n }\n\n\n\n searching_dist = 0;\n\n searching_object = object2;\n\n\n\n while !object1_distances.contains_key(searching_object) {\n\n searching_object = direct_edges.get(searching_object).unwrap();\n\n searching_dist += 1;\n\n }\n\n\n\n return object1_distances.get(searching_object).unwrap() + searching_dist;\n\n}\n", "file_path": "rust/src/solutions/day06.rs", "rank": 8, "score": 127175.29193381571 }, { "content": "pub fn solve() {\n\n println!(\"Day 2\");\n\n\n\n let raw_program = adventlib::read_input_raw(\"day02input.txt\");\n\n\n\n let int_parser = |x: &str| x.parse::<i64>().unwrap();\n\n let program_state: Vec<_> = raw_program.trim().split(',').map(int_parser).collect();\n\n\n\n let mut computer = Computer::for_program(program_state);\n\n computer.set_noun_and_verb(12, 2);\n\n\n\n computer.run_program();\n\n let output = computer.get_value_at_zero();\n\n\n\n println!(\"Output (part 1): {}\", output);\n\n\n\n let mut noun = 0;\n\n let mut verb = 0;\n\n 'outer: while noun < 100 {\n\n verb = 0;\n", "file_path": "rust/src/solutions/day02.rs", "rank": 11, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 17\");\n\n\n\n let raw_program = adventlib::read_input_raw(\"day17input.txt\");\n\n let mut computer = Computer::for_raw_program(&raw_program);\n\n let mut output = Vec::new();\n\n computer.run_program_with_output(&mut output);\n\n\n\n let image = render_scaffolding(output);\n\n print!(\"{}\", image);\n\n\n\n let lines: Vec<_> = image.split_whitespace().collect();\n\n let mut alignment_parameter_sum = 0;\n\n for row in 0..lines.len() {\n\n for col in 0..lines[row].len() {\n\n if is_intersection(&lines, row, col) {\n\n alignment_parameter_sum += alignment_parameter(row, col);\n\n }\n\n }\n\n }\n", "file_path": "rust/src/solutions/day17.rs", "rank": 12, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 3\");\n\n\n\n let lines = adventlib::read_input_lines(\"day03input.txt\");\n\n\n\n assert_eq!(lines.len(), 2);\n\n\n\n let mut first_line_points = HashMap::new();\n\n let mut intersections = HashMap::new();\n\n\n\n let mut cur_x = 0;\n\n let mut cur_y = 0;\n\n let mut total_steps = 0;\n\n\n\n let first_line = &lines[0];\n\n for segment in first_line.split(',') {\n\n let direction = &segment[0..1];\n\n let mut dist = get_distance(&segment);\n\n while dist > 0 {\n\n match direction {\n", "file_path": "rust/src/solutions/day03.rs", "rank": 13, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 4\");\n\n\n\n //input: 136760-595730\n\n let range_min = 136760;\n\n let range_max = 595730;\n\n\n\n assert!(digits_only_increase(111111));\n\n assert!(includes_doubled_digit(111111));\n\n assert!(digits_only_increase(123789));\n\n assert!(!includes_doubled_digit(123789));\n\n assert!(!digits_only_increase(223450));\n\n assert!(includes_doubled_digit(223450));\n\n\n\n let mut qualified_count_1 = 0;\n\n let mut qualified_count_2 = 0;\n\n for candidate in range_min..range_max + 1 {\n\n if digits_only_increase(candidate) && includes_doubled_digit(candidate) {\n\n qualified_count_1 += 1;\n\n }\n\n if digits_only_increase(candidate) && includes_exactly_doubled_digit(candidate) {\n\n qualified_count_2 += 1;\n\n }\n\n }\n\n\n\n println!(\"Qualified passwords (part 1): {}\", qualified_count_1);\n\n println!(\"Qualified passwords (part 2): {}\", qualified_count_2);\n\n}\n\n\n", "file_path": "rust/src/solutions/day04.rs", "rank": 14, "score": 124778.12692674267 }, { "content": "pub fn solve() {\n\n println!(\"Day 8\");\n\n\n\n let image_data = adventlib::read_input_raw(\"day08input.txt\");\n\n\n\n let image_w = 25;\n\n let image_h = 6;\n\n let layer_size = image_w * image_h;\n\n let mut result_image: Vec<char> = std::iter::repeat('2').take(layer_size).collect();\n\n\n\n let layers = image_data.len() / layer_size;\n\n let mut min_zeros = 9999;\n\n let mut checksum = 9999;\n\n for l in 0..layers {\n\n let layer_data = &image_data[l * layer_size + 0..l * layer_size + layer_size];\n\n let mut zeros = 0;\n\n let mut ones = 0;\n\n let mut twos = 0;\n\n for (i, c) in layer_data.char_indices() {\n\n match c {\n", "file_path": "rust/src/solutions/day08.rs", "rank": 15, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 5\");\n\n\n\n let raw_program = adventlib::read_input_raw(\"day05input.txt\");\n\n let diagnostic_input = vec![1];\n\n\n\n let program_state = parse_program(&raw_program);\n\n\n\n let mut computer = Computer::for_program(program_state);\n\n computer.set_input_stream(diagnostic_input);\n\n let last_output = computer.run_program();\n\n\n\n println!(\"Diagnostic code (part 1): {}\", last_output.unwrap());\n\n\n\n let program_state = parse_program(&raw_program);\n\n\n\n computer.load_program(program_state);\n\n computer.set_input_stream(vec![5]);\n\n let last_output = computer.run_program();\n\n\n\n println!(\"Diagnostic code (part 2): {}\", last_output.unwrap());\n\n}\n\n\n", "file_path": "rust/src/solutions/day05.rs", "rank": 16, "score": 124778.12692674267 }, { "content": "pub fn solve() {\n\n println!(\"Day 18\");\n\n\n\n let raw_map = adventlib::read_input_lines(\"day18input.txt\");\n\n\n\n let mut target_key_count = 0;\n\n let mut initial_position = None;\n\n let mut map = SparseGrid::new();\n\n for row in 0..raw_map.len() {\n\n let mut col = 0;\n\n for ch in raw_map[row].chars() {\n\n let cur_point = Point::new(col, row as i64);\n\n map.insert(cur_point, ch);\n\n\n\n if ch == '@' {\n\n initial_position = Some(cur_point);\n\n }\n\n\n\n if ch.is_ascii_lowercase() {\n\n target_key_count += 1;\n", "file_path": "rust/src/solutions/day18.rs", "rank": 17, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 16\");\n\n\n\n let initial_state_raw = adventlib::read_input_raw(\"day16input.txt\");\n\n let initial_state: Vec<_> = initial_state_raw\n\n .trim()\n\n .as_bytes()\n\n .iter()\n\n .map(|&b| b - 48)\n\n .collect();\n\n let mut current_state: Vec<_> = initial_state.iter().cloned().collect();\n\n\n\n for _phase in 1..=100 {\n\n current_state = compute_next_state_full(current_state);\n\n }\n\n\n\n let first_eight = get_string(&current_state, 0, 8);\n\n println!(\"After 100 phases (part 1): {}\", first_eight);\n\n\n\n let original_len = current_state.len();\n", "file_path": "rust/src/solutions/day16.rs", "rank": 18, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 13\");\n\n\n\n let raw_program = adventlib::read_input_raw(\"day13input.txt\");\n\n\n\n let mut screen_state = SparseGrid::<u8>::new();\n\n let program_state = parse_program(&raw_program);\n\n run_arcade_game(&mut screen_state, program_state).expect(\"Game errored\");\n\n\n\n let block_count = screen_state.iter().filter(|(_k, &v)| v == 2).count();\n\n screen_state.print(&render_tile);\n\n println!(\"Number of block tiles (part 1): {}\", block_count);\n\n\n\n loop {\n\n let mut screen_state = SparseGrid::<u8>::new();\n\n let mut program_state = parse_program(&raw_program);\n\n program_state[0] = 2;\n\n let score = run_arcade_game(&mut screen_state, program_state).expect(\"Game errored\");\n\n\n\n println!(\"Final score (part 2): {}\", score);\n\n\n\n if !ENABLE_MANUAL_CONTROL {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 19, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 15\");\n\n\n\n let raw_program = adventlib::read_input_raw(\"day15input.txt\");\n\n\n\n let mut grid_state = SparseGrid::<u8>::new();\n\n explore_map(&mut grid_state, &raw_program);\n\n\n\n grid_state.print(&render_grid_cell);\n\n\n\n let shortest_path = find_shortest_path_to_target(&grid_state);\n\n\n\n println!(\"Fewest moves (part 1): {}\", shortest_path);\n\n\n\n let max_distance_from_oxygen = find_max_distance_from_oxygen(&grid_state);\n\n println!(\n\n \"Minutes to fill with oxygen (part 2): {}\",\n\n max_distance_from_oxygen\n\n );\n\n}\n", "file_path": "rust/src/solutions/day15.rs", "rank": 20, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 7\");\n\n\n\n let raw_program = adventlib::read_input_raw(\"day07input.txt\");\n\n\n\n let phases = vec![0, 1, 2, 3, 4];\n\n let phase_permutations = get_permutations_of(phases);\n\n\n\n let mut max_signal = 0;\n\n for perm in phase_permutations {\n\n let signal = run_all_amplifiers(&raw_program, perm);\n\n\n\n if signal > max_signal {\n\n max_signal = signal;\n\n }\n\n }\n\n\n\n println!(\"Max signal (part 2): {}\", max_signal);\n\n\n\n let phases = vec![5, 6, 7, 8, 9];\n", "file_path": "rust/src/solutions/day07.rs", "rank": 21, "score": 124778.12692674267 }, { "content": "pub fn solve() {\n\n println!(\"Day 12\");\n\n\n\n let raw_moons = adventlib::read_input_lines(\"day12input.txt\");\n\n let mut moons: Vec<_> = raw_moons.iter().map(|m| parse_moon(m)).collect();\n\n let mut velocities: Vec<_> = vec![0; moons.len()]\n\n .iter()\n\n .map(|_| Point3d::new(0, 0, 0))\n\n .collect();\n\n\n\n for _time_step in 0..1000 {\n\n for (i, moon) in moons.iter().enumerate() {\n\n let x_pull = moons\n\n .iter()\n\n .map(|&other| normalized_compare(other.x, moon.x))\n\n .sum();\n\n let y_pull = moons\n\n .iter()\n\n .map(|&other| normalized_compare(other.y, moon.y))\n\n .sum();\n", "file_path": "rust/src/solutions/day12.rs", "rank": 22, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 10\");\n\n\n\n let raw_map = adventlib::read_input_lines(\"day10input.txt\");\n\n\n\n let asteroid_locations = get_asteroid_locations(&raw_map);\n\n let mut max_visible = 0;\n\n let mut max_location = Point::new(0, 0);\n\n\n\n for i in 0..asteroid_locations.len() {\n\n let observing_asteroid = asteroid_locations.get(i).unwrap();\n\n let mut cur_asteroid_angles = HashSet::new();\n\n for j in 0..asteroid_locations.len() {\n\n if i == j {\n\n continue; // don't count self\n\n }\n\n\n\n // Reference the angle to each asteroid as the smallest integral vector in its direction\n\n // to ensure asteroids in the same direction will have the same key.\n\n let candidate = asteroid_locations.get(j).unwrap();\n", "file_path": "rust/src/solutions/day10.rs", "rank": 23, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 6\");\n\n\n\n let lines = adventlib::read_input_lines(\"day06input.txt\");\n\n let mut direct_edges = HashMap::new();\n\n let mut objects = HashSet::new();\n\n\n\n for line in lines.iter() {\n\n let nodes: Vec<_> = line.split(')').collect();\n\n direct_edges.insert(nodes[1], nodes[0]);\n\n objects.insert(nodes[1]);\n\n }\n\n\n\n let mut total_count = 0;\n\n\n\n for obj in objects {\n\n total_count += count_orbits_to_center(obj, &direct_edges);\n\n }\n\n\n\n println!(\"Total orbit count: {}\", total_count);\n\n\n\n let our_start = direct_edges.get(\"YOU\").unwrap();\n\n let his_start = direct_edges.get(\"SAN\").unwrap();\n\n let distance = distance_between(our_start, his_start, &direct_edges);\n\n\n\n println!(\"Orbit transfers required: {}\", distance);\n\n}\n\n\n", "file_path": "rust/src/solutions/day06.rs", "rank": 24, "score": 124778.12692674267 }, { "content": "pub fn solve() {\n\n println!(\"Day 9\");\n\n\n\n let raw_program = adventlib::read_input_raw(\"day09input.txt\");\n\n\n\n let mut computer = Computer::new();\n\n\n\n let program_state = parse_program(&raw_program);\n\n\n\n computer.load_program(program_state);\n\n computer.set_input_stream(vec![1]);\n\n let last_output = computer.run_program();\n\n\n\n println!(\"BOOST keycode (part 1): {}\", last_output.unwrap());\n\n\n\n let program_state = parse_program(&raw_program);\n\n\n\n computer.load_program(program_state);\n\n computer.set_input_stream(vec![2]);\n\n let last_output = computer.run_program();\n\n\n\n println!(\"Coordinates (part 2): {}\", last_output.unwrap());\n\n}\n", "file_path": "rust/src/solutions/day09.rs", "rank": 25, "score": 124778.12692674267 }, { "content": "pub fn solve() {\n\n println!(\"Day 11\");\n\n\n\n let raw_program = adventlib::read_input_raw(\"day11input.txt\");\n\n\n\n let mut grid_state = SparseGrid::<i64>::new();\n\n paint_grid(&mut grid_state, &raw_program);\n\n\n\n println!(\"Number of cells touched (part 1): {}\", grid_state.len());\n\n\n\n let mut grid_state = SparseGrid::<i64>::new();\n\n grid_state.insert(Point::new(0, 0), 1);\n\n paint_grid(&mut grid_state, &raw_program);\n\n\n\n println!(\"Letters traced by robot (part 2):\");\n\n let cell_printer = |c: Option<&i64>| {\n\n if c.cloned().unwrap_or(0) == 1 {\n\n '#'\n\n } else {\n\n ' '\n\n }\n\n };\n\n grid_state.print(&cell_printer)\n\n}\n\n\n", "file_path": "rust/src/solutions/day11.rs", "rank": 26, "score": 124778.12692674264 }, { "content": "pub fn solve() {\n\n println!(\"Day 1\");\n\n\n\n let lines = adventlib::read_input_lines(\"day01input.txt\");\n\n\n\n let int_parser = |x: &String| x.parse::<i64>().unwrap();\n\n let total: i64 = lines.iter().map(int_parser).map(fuel_for_mass).sum();\n\n\n\n println!(\"Total fuel requirements (initial): {}\", total);\n\n\n\n let total: i64 = lines\n\n .iter()\n\n .map(int_parser)\n\n .map(converged_fuel_for_mass)\n\n .sum();\n\n\n\n println!(\"Total fuel requirements (iterated): {}\", total);\n\n}\n\n\n", "file_path": "rust/src/solutions/day01.rs", "rank": 27, "score": 124778.12692674264 }, { "content": "fn paint_grid(grid_state: &mut SparseGrid<i64>, raw_program: &String) {\n\n let program_state = parse_program(raw_program);\n\n let mut computer = Computer::for_program(program_state);\n\n\n\n let mut cur_direction = Direction::Up;\n\n let mut cur_position = Point::new(0, 0);\n\n\n\n while computer.run_state != RunState::Halted {\n\n let mut outputs = Vec::with_capacity(2);\n\n\n\n let cur_color = grid_state.get(&cur_position).cloned().unwrap_or(0);\n\n computer.set_input_stream(vec![cur_color]);\n\n\n\n computer.run_program_with_output(&mut outputs);\n\n grid_state.insert(cur_position, outputs[0]);\n\n\n\n cur_direction = match outputs[1] {\n\n 0 => cur_direction.turn_left(),\n\n 1 => cur_direction.turn_right(),\n\n _ => panic!(\"Unexpected output: {:#?}\", outputs),\n\n };\n\n\n\n cur_position = cur_position.vec_add(&cur_direction.as_vector());\n\n }\n\n}\n", "file_path": "rust/src/solutions/day11.rs", "rank": 28, "score": 123637.10147083222 }, { "content": "fn explore_map(grid_state: &mut SparseGrid<u8>, raw_program: &String) {\n\n let program_state = parse_program(raw_program);\n\n let mut computer = Computer::for_program(program_state);\n\n\n\n let cur_position = Point::origin();\n\n\n\n grid_state.insert(cur_position, MAP_START);\n\n explore_map_rec(grid_state, &mut computer, &cur_position);\n\n}\n\n\n", "file_path": "rust/src/solutions/day15.rs", "rank": 29, "score": 123637.10147083222 }, { "content": "fn collect_permutations(phases: &mut Vec<u8>, size: usize, permutations: &mut Vec<Vec<u8>>) {\n\n if size == 1 {\n\n permutations.push(phases.clone());\n\n return;\n\n }\n\n\n\n for i in 0..size {\n\n collect_permutations(phases, size - 1, permutations);\n\n\n\n // if size is odd, swap first and last element\n\n if size % 2 == 1 {\n\n let temp = phases[0];\n\n phases[0] = phases[size - 1];\n\n phases[size - 1] = temp;\n\n }\n\n // If size is even, swap ith and last element\n\n else {\n\n let temp = phases[i];\n\n phases[i] = phases[size - 1];\n\n phases[size - 1] = temp;\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/src/solutions/day07.rs", "rank": 30, "score": 110547.91492212645 }, { "content": "fn get_movement_instruction_string() -> String {\n\n // Determined by manually tracing the path\n\n let mut input_instructions = String::from(\"A,A,B,C,B,C,B,A,C,A\\n\");\n\n input_instructions.extend(\"R,8,L,12,R,8\\n\".chars());\n\n input_instructions.extend(\"L,10,L,10,R,8\\n\".chars());\n\n input_instructions.extend(\"L,12,L,12,L,10,R,10\\n\".chars());\n\n input_instructions.push(if ENABLE_CONTINUOUS_DISPLAY { 'y' } else { 'n' });\n\n input_instructions.push('\\n');\n\n input_instructions\n\n}\n", "file_path": "rust/src/solutions/day17.rs", "rank": 31, "score": 97932.30862938294 }, { "content": "fn time_with_label(f: &dyn Fn(), label: &str) {\n\n let now = Instant::now();\n\n f();\n\n let duration = now.elapsed();\n\n println!(\n\n \"{} {}.{:09}s\\n\",\n\n label,\n\n duration.as_secs(),\n\n duration.subsec_nanos()\n\n );\n\n}\n", "file_path": "rust/src/main.rs", "rank": 34, "score": 93579.8428974174 }, { "content": "struct CircleListNode<T> {\n\n value: T,\n\n next_key: usize,\n\n prev_key: usize,\n\n}\n\n\n\n#[derive(Eq, PartialEq, Copy, Clone, Debug)]\n\npub struct CircleListPointer(usize);\n\n\n\nimpl<T> CircleList<T> {\n\n pub fn new() -> CircleList<T> {\n\n Self::with_capacity(0)\n\n }\n\n\n\n pub fn with_capacity(capacity: usize) -> CircleList<T> {\n\n CircleList::<T> {\n\n nodes: Slab::with_capacity(capacity),\n\n last: Option::None,\n\n }\n\n }\n", "file_path": "rust/src/adventlib/collections.rs", "rank": 35, "score": 89031.15038469393 }, { "content": "fn get_distance(segment: &str) -> i32 {\n\n segment[1..].parse().unwrap()\n\n}\n", "file_path": "rust/src/solutions/day03.rs", "rank": 36, "score": 87671.03247594068 }, { "content": "fn simulate_dimension_until_loop(mut locations: Vec<i64>, mut velocities: Vec<i64>) -> (i64, i64) {\n\n // Same simulation, but independent for x, y, and z\n\n // Hoping to find lcm of separate loops\n\n\n\n let mut seen_states = HashMap::new();\n\n let mut time_steps = 0_i64;\n\n\n\n let mut last_state = represent_state(&locations, &velocities);\n\n while !seen_states.contains_key(&last_state) {\n\n seen_states.insert(last_state, time_steps);\n\n time_steps += 1;\n\n\n\n for (i, moon) in locations.iter().enumerate() {\n\n let pull: i64 = locations\n\n .iter()\n\n .map(|&other| normalized_compare(other, *moon))\n\n .sum();\n\n\n\n let cur_velocity = velocities[i];\n\n let new_velocity = cur_velocity + pull;\n", "file_path": "rust/src/solutions/day12.rs", "rank": 37, "score": 86778.53575858689 }, { "content": "fn parse_moon(raw_moon: &String) -> Point3d {\n\n lazy_static! {\n\n static ref PATTERN: Regex =\n\n Regex::new(r\"<.=(-?\\d+), .=(-?\\d+), .=(-?\\d+)>\").expect(\"pattern for parsing\");\n\n }\n\n\n\n let captures = PATTERN\n\n .captures(raw_moon)\n\n .expect(\"Line should match format\");\n\n Point3d::new(\n\n captures[1].parse().unwrap(),\n\n captures[2].parse().unwrap(),\n\n captures[3].parse().unwrap(),\n\n )\n\n}\n\n\n", "file_path": "rust/src/solutions/day12.rs", "rank": 38, "score": 85274.23374404255 }, { "content": "fn get_string(state: &Vec<u8>, skip: usize, take: usize) -> String {\n\n state\n\n .iter()\n\n .skip(skip)\n\n .take(take)\n\n .map(|&b| (b + 48) as char)\n\n .collect()\n\n}\n", "file_path": "rust/src/solutions/day16.rs", "rank": 39, "score": 82557.57680989776 }, { "content": "fn parse_program(raw_program: &String) -> Vec<i64> {\n\n let int_parser = |x: &str| x.parse::<i64>().unwrap();\n\n return raw_program.trim().split(',').map(int_parser).collect();\n\n}\n", "file_path": "rust/src/solutions/day07.rs", "rank": 40, "score": 82540.99972275196 }, { "content": "fn create_computer(raw_program: &String) -> Computer<i64> {\n\n let program_state = parse_program(&raw_program);\n\n return Computer::for_program(program_state);\n\n}\n\n\n", "file_path": "rust/src/solutions/day07.rs", "rank": 41, "score": 82540.99972275196 }, { "content": "fn parse_program(raw_program: &String) -> Vec<i64> {\n\n let int_parser = |x: &str| x.parse::<i64>().unwrap();\n\n return raw_program.trim().split(',').map(int_parser).collect();\n\n}\n", "file_path": "rust/src/solutions/day05.rs", "rank": 42, "score": 82540.99972275196 }, { "content": "fn render_scaffolding(image_data: Vec<i64>) -> String {\n\n image_data.iter().map(|&b| (b as u8 as char)).collect()\n\n}\n\n\n", "file_path": "rust/src/solutions/day17.rs", "rank": 43, "score": 82540.99972275196 }, { "content": "fn get_asteroid_locations(raw_map: &Vec<String>) -> Vec<Point> {\n\n let mut asteroid_locations = Vec::with_capacity(300);\n\n let mut cur_y = 0;\n\n for line in raw_map {\n\n let mut cur_x = 0;\n\n for ch in line.chars() {\n\n if ch == '#' {\n\n asteroid_locations.push(Point::new(cur_x, cur_y))\n\n }\n\n cur_x += 1;\n\n }\n\n\n\n cur_y += 1;\n\n }\n\n return asteroid_locations;\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 44, "score": 78946.34752487342 }, { "content": "fn run_all_amplifiers(raw_program: &String, phases: Vec<u8>) -> i64 {\n\n let first_input_value = 0;\n\n\n\n let mut output = run_amplifier_returning_output(raw_program, phases[0], first_input_value);\n\n output = run_amplifier_returning_output(raw_program, phases[1], output);\n\n output = run_amplifier_returning_output(raw_program, phases[2], output);\n\n output = run_amplifier_returning_output(raw_program, phases[3], output);\n\n return run_amplifier_returning_output(raw_program, phases[4], output);\n\n}\n\n\n", "file_path": "rust/src/solutions/day07.rs", "rank": 45, "score": 77636.31961717863 }, { "content": "fn get_permutations_of(mut phases: Vec<u8>) -> Vec<Vec<u8>> {\n\n let mut phase_permutations = Vec::new();\n\n let size = phases.len();\n\n collect_permutations(&mut phases, size, &mut phase_permutations);\n\n return phase_permutations;\n\n}\n\n\n", "file_path": "rust/src/solutions/day07.rs", "rank": 46, "score": 77550.53705718191 }, { "content": "fn run_amplifiers_in_loop(raw_program: &String, phases: Vec<u8>) -> i64 {\n\n let first_input_value = 0;\n\n let mut is_first_run = true;\n\n let mut computers = vec![\n\n create_computer(raw_program),\n\n create_computer(raw_program),\n\n create_computer(raw_program),\n\n create_computer(raw_program),\n\n create_computer(raw_program),\n\n ];\n\n\n\n let mut next_input = first_input_value;\n\n while computers[4].run_state != RunState::Halted {\n\n for i in 0..5 {\n\n let input = if is_first_run {\n\n vec![phases[i] as i64, next_input]\n\n } else {\n\n vec![next_input]\n\n };\n\n next_input = run_computer_for_input(&mut computers[i], input);\n\n }\n\n is_first_run = false;\n\n }\n\n\n\n return next_input;\n\n}\n\n\n", "file_path": "rust/src/solutions/day07.rs", "rank": 47, "score": 76623.79866095039 }, { "content": "fn represent_state(locations: &Vec<i64>, velocities: &Vec<i64>) -> String {\n\n // Assuming 4 points\n\n return format!(\n\n \"{},{},{},{},{},{},{},{}\",\n\n locations[0],\n\n locations[1],\n\n locations[2],\n\n locations[3],\n\n velocities[0],\n\n velocities[1],\n\n velocities[2],\n\n velocities[3]\n\n );\n\n}\n", "file_path": "rust/src/solutions/day12.rs", "rank": 48, "score": 76436.4051445093 }, { "content": "fn is_intersection(lines: &Vec<&str>, row: usize, col: usize) -> bool {\n\n if lines[row].chars().nth(col) != Some('#') {\n\n return false;\n\n }\n\n\n\n let mut neighbor_count = 0;\n\n if row > 0 && lines[row - 1].chars().nth(col) == Some('#') {\n\n neighbor_count += 1;\n\n }\n\n if row < lines.len() - 1 && lines[row + 1].chars().nth(col) == Some('#') {\n\n neighbor_count += 1;\n\n }\n\n if col > 0 && lines[row].chars().nth(col - 1) == Some('#') {\n\n neighbor_count += 1;\n\n }\n\n if lines[row].chars().nth(col + 1) == Some('#') {\n\n neighbor_count += 1;\n\n }\n\n\n\n return neighbor_count >= 3;\n\n}\n\n\n", "file_path": "rust/src/solutions/day17.rs", "rank": 49, "score": 76333.13541049007 }, { "content": "fn is_navigable(grid_state: &SparseGrid<char>, location: &Point, keys: &String) -> bool {\n\n grid_state\n\n .get(location)\n\n .map(|&cell| {\n\n cell == '.'\n\n || cell == '@'\n\n || cell.is_ascii_lowercase()\n\n || keys.contains(ascii_to_lowercase(cell).unwrap_or(cell))\n\n })\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "rust/src/solutions/day18.rs", "rank": 50, "score": 73349.1666955189 }, { "content": "fn compute_next_state_second_half(current_state: &Vec<u8>, next_state: &mut Vec<u8>) {\n\n // For a position N_target past the midpoint of the data, the only non-zero contributers\n\n // from the source are the contiguous sequence of elements from N_target to the end.\n\n //\n\n // These are all included by multiplication with +1 from the base pattern, so we can\n\n // also assume there are no subtractions and no reason to track more than one digit\n\n // while we incrementally build up an answer.\n\n let mut last_digit_of_sum = 0;\n\n for target_index in (0..current_state.len()).rev() {\n\n last_digit_of_sum = (last_digit_of_sum + current_state[target_index]) % 10;\n\n next_state[target_index] = last_digit_of_sum;\n\n }\n\n}\n\n\n", "file_path": "rust/src/solutions/day16.rs", "rank": 51, "score": 72834.62545685537 }, { "content": "fn run_arcade_game(screen: &mut SparseGrid<u8>, program_state: Vec<i64>) -> Result<i64> {\n\n let mut computer = Computer::for_program(program_state);\n\n let mut score = 0;\n\n\n\n let mut ball_position_before = find_ball(screen);\n\n let mut next_input = 0;\n\n\n\n if RENDER_GAME_FRAMES {\n\n start_game_window()?\n\n };\n\n\n\n while computer.run_state != RunState::Halted {\n\n let mut outputs = Vec::new();\n\n computer.set_input_stream(vec![next_input]);\n\n computer.run_program_with_output(&mut outputs);\n\n\n\n for i in (0..outputs.len()).step_by(3) {\n\n let output_location = Point::new(outputs[i], outputs[i + 1]);\n\n let output_argument = outputs[i + 2];\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 52, "score": 70659.41457726131 }, { "content": "fn solve_all() {\n\n time(&day01::solve);\n\n time(&day02::solve);\n\n time(&day03::solve);\n\n time(&day04::solve);\n\n time(&day05::solve);\n\n time(&day06::solve);\n\n time(&day07::solve);\n\n time(&day08::solve);\n\n time(&day09::solve);\n\n time(&day10::solve);\n\n time(&day11::solve);\n\n time(&day12::solve);\n\n time(&day13::solve);\n\n time(&day14::solve);\n\n time(&day15::solve);\n\n time(&day16::solve);\n\n time(&day17::solve);\n\n time(&day18::solve);\n\n}\n\n\n", "file_path": "rust/src/main.rs", "rank": 53, "score": 70013.10459725185 }, { "content": "fn collect_asteroids_by_direction(\n\n asteroid_locations: &Vec<Point>,\n\n reference_point: &Point,\n\n) -> HashMap<Point, HashSet<Point>> {\n\n let mut asteroids_by_direction = HashMap::new();\n\n\n\n for asteroid in asteroid_locations.iter() {\n\n let raw_vector = asteroid.vec_subtract(reference_point);\n\n\n\n if raw_vector.x == 0 && raw_vector.y == 0 {\n\n continue;\n\n }\n\n\n\n // Reference the angle to each asteroid as the smallest integral vector in its direction\n\n // to ensure asteroids in the same direction will end up in the same set.\n\n let reduced_vector = reduce_vector(raw_vector);\n\n let set_for_angle = asteroids_by_direction\n\n .entry(reduced_vector)\n\n .or_insert(HashSet::<Point>::new());\n\n set_for_angle.insert(*asteroid);\n\n }\n\n\n\n return asteroids_by_direction;\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 54, "score": 66097.37253095594 }, { "content": "fn fuel_for_mass(mass: i64) -> i64 {\n\n mass / 3 - 2\n\n}\n\n\n", "file_path": "rust/src/solutions/day01.rs", "rank": 55, "score": 62244.59652460622 }, { "content": "fn converged_fuel_for_mass(mass: i64) -> i64 {\n\n let mut mass_including_fuel = mass;\n\n let mut next_fuel_mass = fuel_for_mass(mass);\n\n\n\n while next_fuel_mass > 0 {\n\n mass_including_fuel += next_fuel_mass;\n\n next_fuel_mass = fuel_for_mass(next_fuel_mass);\n\n }\n\n\n\n return mass_including_fuel - mass;\n\n}\n", "file_path": "rust/src/solutions/day01.rs", "rank": 56, "score": 61024.52715604682 }, { "content": "fn time(f: &dyn Fn()) {\n\n time_with_label(f, \"Solved in\");\n\n}\n\n\n", "file_path": "rust/src/main.rs", "rank": 57, "score": 43188.03819712088 }, { "content": "fn main() {\n\n time_with_label(&solve_all, \"Total time: \");\n\n}\n\n\n", "file_path": "rust/src/main.rs", "rank": 58, "score": 37522.200595220515 }, { "content": "#[test]\n\nfn lowercase_an_a() {\n\n assert_eq!(ascii_to_lowercase('A'), Some('a'));\n\n}\n\n\n", "file_path": "rust/src/solutions/day18.rs", "rank": 59, "score": 36809.932469105144 }, { "content": "fn bfs_until_condition(\n\n grid_state: &SparseGrid<u8>,\n\n search_start: Point,\n\n stop_condition: &dyn (Fn(&Point) -> bool),\n\n) -> i32 {\n\n let mut search_nodes = vec![(search_start, 0)];\n\n let mut already_reached = HashSet::new();\n\n\n\n let mut last_search_depth = -1;\n\n\n\n while search_nodes.len() > 0 {\n\n let (search_loc, search_depth) = search_nodes.remove(0);\n\n\n\n already_reached.insert(search_loc);\n\n last_search_depth = search_depth;\n\n\n\n if stop_condition(&search_loc) {\n\n break;\n\n }\n\n\n", "file_path": "rust/src/solutions/day15.rs", "rank": 60, "score": 36132.85686730976 }, { "content": "#[test]\n\nfn gcd_with_zero() {\n\n assert_eq!(gcd(12, 0), 12);\n\n assert_eq!(gcd(0, 7), 7);\n\n}\n", "file_path": "rust/src/solutions/day10.rs", "rank": 61, "score": 36132.85686730976 }, { "content": "#[test]\n\nfn gcd_less_than_both() {\n\n assert_eq!(gcd(12, 8), 4);\n\n assert_eq!(gcd(23, 7), 1);\n\n assert_eq!(gcd(90, 150), 30);\n\n assert_eq!(gcd(55, 25), 5);\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 62, "score": 36132.85686730976 }, { "content": "fn bfs_until_condition(\n\n grid_state: &SparseGrid<char>,\n\n search_start: Point,\n\n stop_condition: &dyn (Fn(&Point, &String) -> bool),\n\n) -> i32 {\n\n let mut search_nodes = vec![(search_start, 0, String::new())];\n\n let mut visited_with_keys = HashMap::new();\n\n\n\n let mut last_search_depth = -1;\n\n\n\n while search_nodes.len() > 0 {\n\n let (search_loc, search_depth, mut keys) = search_nodes.remove(0);\n\n\n\n let visited = visited_with_keys\n\n .entry(keys.clone())\n\n .or_insert(HashSet::new());\n\n\n\n if visited.contains(&search_loc) {\n\n continue;\n\n }\n", "file_path": "rust/src/solutions/day18.rs", "rank": 63, "score": 36132.85686730976 }, { "content": "#[test]\n\nfn gcd_reversed_arguments() {\n\n assert_eq!(gcd(1, 4), 1);\n\n assert_eq!(gcd(2, 4), 2);\n\n assert_eq!(gcd(3, 12), 3);\n\n assert_eq!(gcd(4, 12), 4);\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 64, "score": 35488.42842567921 }, { "content": "fn explore_map_rec(\n\n grid_state: &mut SparseGrid<u8>,\n\n computer: &mut Computer<i64>,\n\n cur_position: &Point,\n\n) {\n\n let directions = vec![\n\n Direction::Left, // West\n\n Direction::Up, // North\n\n Direction::Right, // East\n\n Direction::Down, // South\n\n ];\n\n\n\n for direction in directions {\n\n let next_step = direction.as_vector().vec_add(&cur_position);\n\n if grid_state.get(&next_step) != None {\n\n continue; // already been there\n\n }\n\n\n\n let move_command = get_move_command(direction);\n\n let mut outputs = Vec::with_capacity(1);\n", "file_path": "rust/src/solutions/day15.rs", "rank": 65, "score": 35488.42842567921 }, { "content": "#[test]\n\nfn gcd_simple_cases() {\n\n assert_eq!(gcd(4, 1), 1);\n\n assert_eq!(gcd(4, 2), 2);\n\n assert_eq!(gcd(12, 3), 3);\n\n assert_eq!(gcd(12, 4), 4);\n\n assert_eq!(gcd(12, 12), 12);\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 66, "score": 35488.42842567921 }, { "content": "#[test]\n\nfn door_navigable_with_key() {\n\n let mut grid = SparseGrid::new();\n\n let location = Point::new(1, 2);\n\n grid.insert(location, 'A');\n\n let keys = \"abc\".to_owned();\n\n\n\n assert!(is_navigable(&grid, &location, &keys));\n\n}\n\n\n", "file_path": "rust/src/solutions/day18.rs", "rank": 67, "score": 35488.42842567921 }, { "content": "#[test]\n\nfn door_unnavigable_without_key() {\n\n let mut grid = SparseGrid::new();\n\n let location = Point::new(1, 2);\n\n grid.insert(location, 'A');\n\n let keys = \"bc\".to_owned();\n\n\n\n assert!(!is_navigable(&grid, &location, &keys));\n\n}\n", "file_path": "rust/src/solutions/day18.rs", "rank": 68, "score": 34874.34146569236 }, { "content": "fn start_game_window() -> Result<()> {\n\n execute!(\n\n stdout(),\n\n terminal::EnterAlternateScreen,\n\n terminal::Clear(terminal::ClearType::All),\n\n cursor::SavePosition,\n\n cursor::Hide,\n\n )\n\n}\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 69, "score": 34345.908795135096 }, { "content": "fn clear_event_stream() -> Result<()> {\n\n while poll(Duration::from_millis(1))? {\n\n read()?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "rust/src/solutions/day13.rs", "rank": 70, "score": 34345.908795135096 }, { "content": "fn close_game_window() -> Result<()> {\n\n execute!(stdout(), terminal::LeaveAlternateScreen)\n\n}\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 71, "score": 34345.908795135096 }, { "content": "fn read_direction_from_keyboard() -> Result<i64> {\n\n clear_event_stream()?;\n\n match read()? {\n\n Event::Key(event) => match event.code {\n\n KeyCode::Left => Ok(-1),\n\n KeyCode::Right => Ok(1),\n\n _ => Ok(0),\n\n },\n\n _ => Ok(0),\n\n }\n\n}\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 72, "score": 33287.87082326218 }, { "content": "extern crate slab;\n\n\n\nuse self::slab::Slab;\n\n\n\npub struct CircleList<T> {\n\n nodes: Slab<CircleListNode<T>>,\n\n pub last: Option<CircleListPointer>,\n\n}\n\n\n", "file_path": "rust/src/adventlib/collections.rs", "rank": 73, "score": 33000.33331793436 }, { "content": "\n\n pub fn insert(&mut self, value: T) {\n\n match self.last {\n\n None => {\n\n let next_slot = self.nodes.vacant_entry();\n\n let next_key = next_slot.key();\n\n next_slot.insert(CircleListNode {\n\n value,\n\n next_key: next_key,\n\n prev_key: next_key,\n\n });\n\n self.last = Some(CircleListPointer(next_key));\n\n }\n\n Some(cur_ptr) => self.insert_after(cur_ptr, value),\n\n };\n\n }\n\n\n\n pub fn insert_after(&mut self, cur_ptr: CircleListPointer, value: T) {\n\n let next_key = self.nodes[cur_ptr.0].next_key;\n\n let prev_key = cur_ptr.0;\n", "file_path": "rust/src/adventlib/collections.rs", "rank": 74, "score": 32999.225216960476 }, { "content": "\n\n pub fn remove(&mut self, cur_ptr: CircleListPointer) -> T {\n\n let rem_node = self.nodes.remove(cur_ptr.0);\n\n if rem_node.prev_key != rem_node.next_key {\n\n self.nodes[rem_node.next_key].prev_key = rem_node.prev_key;\n\n self.nodes[rem_node.prev_key].next_key = rem_node.next_key;\n\n self.last = Some(CircleListPointer(rem_node.next_key));\n\n } else {\n\n self.last = None;\n\n }\n\n\n\n return rem_node.value;\n\n }\n\n}\n\n\n\nimpl<T> CircleList<T>\n\nwhere\n\n T: Copy,\n\n{\n\n pub fn get_value(&self, cur_ptr: CircleListPointer) -> Option<T> {\n\n return match self.nodes.get(cur_ptr.0) {\n\n Some(node) => Some(node.value),\n\n None => None,\n\n };\n\n }\n\n}\n", "file_path": "rust/src/adventlib/collections.rs", "rank": 75, "score": 32998.170218358566 }, { "content": " let new_key = self.nodes.insert(CircleListNode {\n\n value,\n\n next_key: next_key,\n\n prev_key: prev_key,\n\n });\n\n\n\n self.nodes[next_key].prev_key = new_key;\n\n self.nodes[prev_key].next_key = new_key;\n\n self.last = Some(CircleListPointer(new_key));\n\n }\n\n\n\n pub fn next_node(&self, cur_ptr: CircleListPointer) -> CircleListPointer {\n\n let cur_node = &self.nodes[cur_ptr.0];\n\n CircleListPointer(cur_node.next_key)\n\n }\n\n\n\n pub fn prev_node(&self, cur_ptr: CircleListPointer) -> CircleListPointer {\n\n let cur_node = &self.nodes[cur_ptr.0];\n\n CircleListPointer(cur_node.prev_key)\n\n }\n", "file_path": "rust/src/adventlib/collections.rs", "rank": 76, "score": 32997.15382245871 }, { "content": "fn digits_only_increase(candidate: i32) -> bool {\n\n // in other words, they only *decrease* going backwards\n\n let mut last_digit = candidate % 10;\n\n let mut remaining_digits = candidate / 10;\n\n while remaining_digits > 0 {\n\n if (remaining_digits % 10) > last_digit {\n\n return false;\n\n }\n\n last_digit = remaining_digits % 10;\n\n remaining_digits /= 10;\n\n }\n\n\n\n return true;\n\n}\n\n\n", "file_path": "rust/src/solutions/day04.rs", "rank": 77, "score": 32863.38284019023 }, { "content": "fn angle_from_negative_y_axis(a: &Point) -> f64 {\n\n let mut angle = (a.y as f64).atan2(a.x as f64);\n\n\n\n // shift third quadrant to the end of the rotation\n\n if angle < -f64::consts::FRAC_PI_2 {\n\n angle += f64::consts::PI * 2.0;\n\n }\n\n\n\n // shift negative y-axis to zero (mostly for sanity)\n\n return angle + f64::consts::FRAC_PI_2;\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 78, "score": 32863.38284019023 }, { "content": "fn reduce_vector(vector: Point) -> Point {\n\n let gcd = gcd(vector.x.abs(), vector.y.abs());\n\n Point::new(vector.x / gcd, vector.y / gcd)\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 79, "score": 32863.38284019023 }, { "content": "fn gcd(a: i64, b: i64) -> i64 {\n\n if a == b || b == 0 {\n\n return a;\n\n }\n\n\n\n if a < b {\n\n return gcd(b, a);\n\n }\n\n\n\n if a % b == 0 {\n\n return b;\n\n }\n\n\n\n return gcd(b, a % b);\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 80, "score": 32717.108813052473 }, { "content": "fn ascii_to_lowercase(c: char) -> Option<char> {\n\n c.to_lowercase().nth(0)\n\n}\n\n\n", "file_path": "rust/src/solutions/day18.rs", "rank": 81, "score": 32479.72248809038 }, { "content": "fn get_move_command(direction: Direction) -> i64 {\n\n let cmd = match direction {\n\n Direction::Up => MOVE_NORTH,\n\n Direction::Down => MOVE_SOUTH,\n\n Direction::Left => MOVE_WEST,\n\n Direction::Right => MOVE_EAST,\n\n };\n\n cmd as i64\n\n}\n\n\n", "file_path": "rust/src/solutions/day15.rs", "rank": 82, "score": 32303.88644986117 }, { "content": "fn includes_doubled_digit(candidate: i32) -> bool {\n\n // in other words, they only *decrease* going backwards\n\n let mut last_digit = candidate % 10;\n\n let mut remaining_digits = candidate / 10;\n\n while remaining_digits > 0 {\n\n if remaining_digits % 10 == last_digit {\n\n return true;\n\n }\n\n last_digit = remaining_digits % 10;\n\n remaining_digits /= 10;\n\n }\n\n\n\n return false;\n\n}\n\n\n", "file_path": "rust/src/solutions/day04.rs", "rank": 83, "score": 32303.88644986117 }, { "content": "fn normalized_compare(a: i64, b: i64) -> i64 {\n\n if a < b {\n\n -1\n\n } else if a > b {\n\n 1\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "rust/src/solutions/day12.rs", "rank": 84, "score": 32131.269778047565 }, { "content": "fn clockwise_order_from_up(a: &Point, b: &Point) -> Ordering {\n\n // Important: \"up\" in the problem is toward negative y, and\n\n // clockwise is in the direction from negative y toward positive x.\n\n // This corresponds to counter-clockwise rotation in a right-handed coordinate system.\n\n let angle_a = angle_from_negative_y_axis(a);\n\n let angle_b = angle_from_negative_y_axis(b);\n\n return angle_a.partial_cmp(&angle_b).unwrap();\n\n}\n\n\n", "file_path": "rust/src/solutions/day10.rs", "rank": 85, "score": 32131.269778047565 }, { "content": "fn includes_exactly_doubled_digit(candidate: i32) -> bool {\n\n // in other words, they only *decrease* going backwards\n\n let mut group_digit = candidate % 10;\n\n let mut group_count = 1;\n\n let mut remaining_digits = candidate / 10;\n\n while remaining_digits > 0 {\n\n if remaining_digits % 10 == group_digit {\n\n group_count += 1;\n\n } else {\n\n group_count = 1;\n\n group_digit = remaining_digits % 10;\n\n }\n\n\n\n remaining_digits /= 10;\n\n\n\n if group_count == 2 && remaining_digits % 10 != group_digit {\n\n return true;\n\n }\n\n }\n\n\n\n return false;\n\n}\n", "file_path": "rust/src/solutions/day04.rs", "rank": 86, "score": 31768.995000710813 }, { "content": "fn render_tile(tile_code: Option<&u8>) -> char {\n\n match tile_code.cloned().unwrap_or(0) {\n\n 0 => ' ',\n\n 1 => '|',\n\n 2 => '#',\n\n 3 => '_',\n\n 4 => '*',\n\n x => panic!(format!(\"Unexpected tile code: {}\", x)),\n\n }\n\n}\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 87, "score": 31385.334648610966 }, { "content": "fn render_grid_cell(cell: Option<&u8>) -> char {\n\n match cell {\n\n Some(&x) if x == MAP_OPEN => '.',\n\n Some(&x) if x == MAP_WALL => '#',\n\n Some(&x) if x == MAP_START => '^',\n\n Some(&x) if x == MAP_OXYGEN => '$',\n\n Some(_) | None => ' ',\n\n }\n\n}\n", "file_path": "rust/src/solutions/day15.rs", "rank": 88, "score": 31385.334648610966 }, { "content": "fn alignment_parameter(row: usize, col: usize) -> usize {\n\n row * col\n\n}\n\n\n", "file_path": "rust/src/solutions/day17.rs", "rank": 89, "score": 31036.88193856815 }, { "content": "fn find_ball(screen: &SparseGrid<u8>) -> Option<Point> {\n\n return screen\n\n .iter()\n\n .find(|(&_k, &v)| v == 4)\n\n .map(|(&k, _v)| k)\n\n .clone();\n\n}\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 90, "score": 30525.007276513516 }, { "content": "fn find_max_distance_from_oxygen(grid_state: &SparseGrid<u8>) -> i32 {\n\n let search_start = grid_state\n\n .find_location_of(&MAP_OXYGEN)\n\n .expect(\"Must have explored oxygen system.\");\n\n\n\n return bfs_until_condition(grid_state, search_start, &|_| false);\n\n}\n\n\n", "file_path": "rust/src/solutions/day15.rs", "rank": 91, "score": 29913.06360841976 }, { "content": "fn find_shortest_path_to_target(grid_state: &SparseGrid<u8>) -> i32 {\n\n let search_start = Point::origin();\n\n let search_target = grid_state\n\n .find_location_of(&MAP_OXYGEN)\n\n .expect(\"Must have explored oxygen system.\");\n\n\n\n return bfs_until_condition(grid_state, search_start, &|&p| p == search_target);\n\n}\n\n\n", "file_path": "rust/src/solutions/day15.rs", "rank": 92, "score": 29913.06360841976 }, { "content": "fn render_frame(frame: &SparseGrid<u8>, score: i64) -> Result<()> {\n\n thread::sleep(Duration::from_millis(300));\n\n\n\n let rendered_screen = frame.render_to_string(&render_tile);\n\n execute!(\n\n stdout(),\n\n cursor::RestorePosition,\n\n style::Print(&rendered_screen),\n\n cursor::RestorePosition,\n\n style::Print(format!(\"{}\", score)),\n\n )\n\n}\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 93, "score": 29716.816206693085 }, { "content": "fn is_navigable(grid_state: &SparseGrid<u8>, location: &Point) -> bool {\n\n grid_state\n\n .get(location)\n\n .map(|&cell| cell != MAP_WALL)\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "rust/src/solutions/day15.rs", "rank": 94, "score": 29716.816206693085 }, { "content": "fn compute_next_state_full(current_state: Vec<u8>) -> Vec<u8> {\n\n let mut next_state = Vec::with_capacity(current_state.len());\n\n for target_pos in 1..=current_state.len() {\n\n let target_value: i32 = current_state\n\n .iter()\n\n .enumerate()\n\n .map(|(i, &val)| {\n\n let base_seq_pos = ((i + 1) / target_pos) % 4;\n\n val as i32 * BASE_SEQ[base_seq_pos] as i32\n\n })\n\n .sum();\n\n\n\n next_state.push((target_value.abs() % 10) as u8);\n\n }\n\n\n\n return next_state;\n\n}\n\n\n", "file_path": "rust/src/solutions/day16.rs", "rank": 95, "score": 29564.610898376945 }, { "content": "fn get_total_energy(moons: &Vec<Point3d>, velocities: &Vec<Point3d>) -> i64 {\n\n return moons\n\n .iter()\n\n .zip(velocities.iter())\n\n .map(|(m, v)| {\n\n m.manhattan_dist_to(&Point3d::origin()) * v.manhattan_dist_to(&Point3d::origin())\n\n })\n\n .sum();\n\n}\n\n\n", "file_path": "rust/src/solutions/day12.rs", "rank": 96, "score": 28955.572289268486 }, { "content": "fn decide_move_direction(screen: &SparseGrid<u8>, previous_ball_pos: Option<Point>) -> i64 {\n\n if ENABLE_MANUAL_CONTROL {\n\n return read_direction_from_keyboard().unwrap();\n\n }\n\n let ball_position = find_ball(screen).expect(\"There should be a ball\");\n\n let paddle_position = screen\n\n .iter()\n\n .find(|(&_k, &v)| v == 3)\n\n .expect(\"There should be a paddle\")\n\n .0;\n\n\n\n let predicted_ball_pos = match previous_ball_pos {\n\n Some(prev) => ball_position.vec_add(&ball_position).vec_subtract(&prev),\n\n None => ball_position,\n\n };\n\n\n\n return if predicted_ball_pos.x < paddle_position.x {\n\n -1\n\n } else if predicted_ball_pos.x > paddle_position.x {\n\n 1\n\n } else {\n\n 0\n\n };\n\n}\n\n\n", "file_path": "rust/src/solutions/day13.rs", "rank": 97, "score": 27654.90002176565 } ]
Rust
radogost/rlox/src/resolver.rs
stormasm/lox
ec1d179f6c478c40b7eb10304e40a19995ef09e6
use crate::error::{LoxError, Result}; use crate::statement::{Expr, ExprId, Stmt}; use std::collections::HashMap; #[derive(Copy, Clone, PartialEq, Eq)] enum FunctionType { None, Method, Function, Initializer, } #[derive(Copy, Clone, PartialEq, Eq)] enum ClassType { None, Class, SubClass, } pub type Depth = u64; struct Resolver<'a> { scopes: Vec<HashMap<&'a str, bool>>, expr_id_to_depth: HashMap<ExprId, Depth>, current_function: FunctionType, current_class: ClassType, } impl<'a> Resolver<'a> { fn new() -> Self { Self { scopes: Vec::new(), expr_id_to_depth: HashMap::new(), current_function: FunctionType::None, current_class: ClassType::None, } } fn resolve(&mut self, statements: &'a [Stmt]) -> Result<HashMap<ExprId, Depth>> { self.resolve_statements(statements)?; Ok(std::mem::take(&mut self.expr_id_to_depth)) } fn resolve_statements(&mut self, stmts: &'a [Stmt]) -> Result<()> { for stmt in stmts { self.resolve_statement(stmt)?; } Ok(()) } fn resolve_statement(&mut self, stmt: &'a Stmt) -> Result<()> { match stmt { Stmt::Block { statements } => { self.begin_scope(); self.resolve_statements(statements.as_ref())?; self.end_scope(); } Stmt::Var { name, initializer } => { self.declare(name); self.define(name); if let Some(initializer) = initializer { self.resolve_expression(initializer)?; } } Stmt::Function { name, parameters, body, } => { self.resolve_function(name, parameters, body, FunctionType::Function)?; } Stmt::Expression { expression } => { self.resolve_expression(expression)?; } Stmt::If { condition, then_branch, else_branch, } => { self.resolve_expression(condition)?; self.resolve_statement(then_branch.as_ref())?; if let Some(stmt) = else_branch { self.resolve_statement(stmt)?; } } Stmt::Print { expression } => self.resolve_expression(expression)?, Stmt::Return { value } => { if self.current_function == FunctionType::None { return Err(LoxError::ResolverError( "Cannot return from top-level code.", )); } if let Some(value) = value { if self.current_function == FunctionType::Initializer { return Err(LoxError::ResolverError( "Cannot return a value from an initializer.", )); } self.resolve_expression(value)?; } } Stmt::While { condition, body } => { self.resolve_expression(condition)?; self.resolve_statement(body)?; } Stmt::Class { name, superclass, methods, } => { let enclosing_class = self.current_class; self.current_class = ClassType::Class; self.declare(name); self.define(name); if let Some(superclass) = superclass { if let Expr::Variable { id: _, name: superclass_name, } = superclass.as_ref() { if name == superclass_name { return Err(LoxError::ResolverError( "A class cannot inherit from itself.", )); } } self.current_class = ClassType::SubClass; self.resolve_expression(superclass)?; self.begin_scope(); self.scopes .last_mut() .map(|scope| scope.insert("super", true)); } self.begin_scope(); self.scopes .last_mut() .map(|scope| scope.insert("this", true)); for method in methods.as_ref() { if let Stmt::Function { name, parameters, body, } = method { let function_type = if name == "init" { FunctionType::Initializer } else { FunctionType::Method }; self.resolve_function(name, parameters, body, function_type)?; } else { unreachable!() } } self.end_scope(); if superclass.is_some() { self.end_scope(); } self.current_class = enclosing_class; } }; Ok(()) } fn resolve_function( &mut self, name: &'a str, parameters: &'a Vec<String>, body: &'a [Stmt], function_type: FunctionType, ) -> Result<()> { self.declare(name); self.define(name); let enclosing_function = self.current_function; self.current_function = function_type; self.begin_scope(); for param in parameters { self.declare(&param); self.define(&param); } self.resolve_statements(body)?; self.end_scope(); self.current_function = enclosing_function; Ok(()) } fn resolve_expression(&mut self, expr: &'a Expr) -> Result<()> { match expr { Expr::Variable { id, name } => { if let Some(scope) = self.scopes.last() { if scope.get::<str>(name) == Some(&false) { return Err(LoxError::ResolverError( "Cannot read local variable in ints own initializer", )); } self.resolve_local(*id, name); } } Expr::This { id, keyword } => { if self.current_class == ClassType::None { return Err(LoxError::ResolverError( "Cannot use 'this' outside of a class.", )); } self.resolve_local(*id, keyword); } Expr::Super { id, keyword, method: _, } => { if self.current_class == ClassType::None { return Err(LoxError::ResolverError( "Cannot use 'super' outside of a class.", )); } if self.current_class != ClassType::SubClass { return Err(LoxError::ResolverError( "Cannot use 'super' in a class with no superclass.", )); } self.resolve_local(*id, keyword); } Expr::Assign { id, value, name } => { self.resolve_expression(value)?; self.resolve_local(*id, name); } Expr::Binary { left, token_type: _, right, } => { self.resolve_expression(left)?; self.resolve_expression(right)?; } Expr::Call { callee, arguments } => { self.resolve_expression(callee)?; for arg in arguments.as_ref() { self.resolve_expression(arg)?; } } Expr::Get { object, name: _ } => { self.resolve_expression(object)?; } Expr::Set { object, name: _, value, } => { self.resolve_expression(object)?; self.resolve_expression(value)?; } Expr::Grouping { expression } => { self.resolve_expression(expression)?; } Expr::Logical { left, operator: _, right, } => { self.resolve_expression(left)?; self.resolve_expression(right)?; } Expr::Unary { token_type: _, right, } => { self.resolve_expression(right)?; } Expr::Nil | Expr::Boolean(_) | Expr::Number(_) | Expr::String(_) => {} }; Ok(()) } fn begin_scope(&mut self) { self.scopes.push(HashMap::new()); } fn end_scope(&mut self) { self.scopes.pop(); } fn declare(&mut self, name: &'a str) { self.scopes .last_mut() .map(|scope| scope.insert(name, false)); } fn define(&mut self, name: &'a str) { self.scopes.last_mut().map(|scope| scope.insert(name, true)); } fn resolve_local(&mut self, expr_id: ExprId, name: &'a str) { self.scopes .iter_mut() .rev() .enumerate() .find(|(_, scope)| scope.contains_key(name)) .map(|(depth, _)| (expr_id, depth as u64)) .map(|(expr_id, depth)| self.expr_id_to_depth.insert(expr_id, depth)); } } pub fn resolve(statements: &[Stmt]) -> Result<HashMap<ExprId, Depth>> { let mut resolver = Resolver::new(); resolver.resolve(statements) } #[cfg(test)] mod tests { use super::{resolve, Depth}; use crate::error::{LoxError, Result}; use crate::lexer; use crate::parser; use crate::statement::ExprId; use std::collections::HashMap; fn scopes(source: &'static str) -> Result<HashMap<ExprId, Depth>> { let (tokens, lexer_errors) = lexer::lex(source); assert_eq!(lexer_errors.len(), 0); let (statements, parser_errors) = parser::parse(&tokens); assert_eq!(parser_errors.len(), 0); resolve(&statements) } #[test] fn invalid_return_statement() { let source = "return 42;"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot return from top-level code.") ); } #[test] fn valid_return_statement() { let source = r#" fun test() { return 42; } "#; let scopes = scopes(source); assert_eq!(scopes.is_ok(), true); } #[test] fn invalid_this() { let source = "var a = this;"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'this' outside of a class.") ); } #[test] fn cannot_return_from_initializer() { let source = r#" class Foo { init() { return "invalid"; } } "#; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot return a value from an initializer.") ); } #[test] fn cannot_use_super_outside_of_class() { let source = "super.foo();"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'super' outside of a class.") ); } #[test] fn cannot_use_super_in_non_subclass() { let source = r#" class Foo { foo() { super.foo(); } } "#; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'super' in a class with no superclass.") ); } }
use crate::error::{LoxError, Result}; use crate::statement::{Expr, ExprId, Stmt}; use std::collections::HashMap; #[derive(Copy, Clone, PartialEq, Eq)] enum FunctionType { None, Method, Function, Initializer, } #[derive(Copy, Clone, PartialEq, Eq)] enum ClassType { None, Class, SubClass, } pub type Depth = u64; struct Resolver<'a> { scopes: Vec<HashMap<&'a str, bool>>, expr_id_to_depth: HashMap<ExprId, Depth>, current_function: FunctionType, current_class: ClassType, } impl<'a> Resolver<'a> { fn new() -> Self { Self { scopes: Vec::new(), expr_id_to_depth: HashMap::new(), current_function: FunctionType::None, current_class: ClassType::None, } } fn resolve(&mut self, statements: &'a [Stmt]) -> Result<HashMap<ExprId, Depth>> { self.resolve_statements(statements)?; Ok(std::mem::take(&mut self.expr_id_to_depth)) } fn resolve_statements(&mut self, stmts: &'a [Stmt]) -> Result<()> { for stmt in stmts { self.resolve_statement(stmt)?; } Ok(()) } fn resolve_statement(&mut self, stmt: &'a Stmt) -> Result<()> { match stmt { Stmt::Block { statements } => { self.begin_scope(); self.resolve_statements(statements.as_ref())?; self.end_scope(); } Stmt::Var { name, initializer } => { self.declare(name); self.define(name); if let Some(initializer) = initializer { self.resolve_expression(initializer)?; } } Stmt::Function { name, parameters, body, } => { self.resolve_function(name, parameters, body, FunctionType::Function)?; } Stmt::Expression { expression } => { self.resolve_expression(expression)?; } Stmt::If { condition, then_branch, else_branch, } => { self.resolve_expression(condition)?; self.resolve_statement(then_branch.as_ref())?; if let Some(stmt) = else_branch { self.resolve_statement(stmt)?; } } Stmt::Print { expression } => self.resolve_expression(expression)?, Stmt::Return { value } => { if self.current_function == FunctionType::None { return Err(LoxError::ResolverError( "Cannot return from top-level code.", )); } if let Some(value) = value { if self.current_function == FunctionType::Initializer { return Err(LoxError::ResolverError( "Cannot return a value from an initializer.", )); } self.resolve_expression(value)?; } } Stmt::While { condition, body } => { self.resolve_expression(condition)?; self.resolve_statement(body)?; } Stmt::Class { name, superclass, methods, } => { let enclosing_class = self.current_class; self.current_class = ClassType::Class; self.declare(name); self.define(name); if let Some(superclass) = superclass { if let Expr::Variable { id: _, name: superclass_name, } = superclass.as_ref() { if name == superclass_name { return Err(LoxError::ResolverError( "A class cannot inherit from itself.", )); } } self.current_class = ClassType::SubClass; self.resolve_expression(superclass)?; self.begin_scope(); self.scopes .last_mut() .map(|scope| scope.insert("super", true)); } self.begin_scope(); self.scopes .last_mut() .map(|scope| scope.insert("this", true)); for method in methods.as_ref() { if let Stmt::Function { name, parameters, body, } = method { let function_type = if name == "init" { FunctionType::Initializer } else { FunctionType::Method }; self.resolve_function(name, parameters, body, function_type)?; } else { unreachable!() } } self.end_scope(); if superclass.is_some() { self.end_scope(); } self.current_class = enclosing_class; } }; Ok(()) } fn resolve_function( &mut self, name: &'a str, parameters: &'a Vec<String>, body: &'a [Stmt], function_type: FunctionType, ) -> Result<()> { self.declare(name); self.define(name); let enclosing_function = self.current_function; self.current_function = function_type; self.begin_scope(); for param in parameters { self.declare(&param); self.define(&param); } self.resolve_statements(body)?; self.end_scope(); self.current_function = enclosing_function; Ok(()) } fn resolve_expression(&mut self, expr: &'a Expr) -> Result<()> { match expr { Expr::Variable { id, name } => { if let Some(scope) = self.scopes.last() { if scope.get::<str>(name) == Some(&false) { return Err(LoxError::ResolverError( "Cannot read local variable in ints own initializer", )); } self.resolve_local(*id, name); } } Expr::This { id, keyword } => { if self.current_class == ClassType::None { return Err(LoxError::ResolverError( "Cannot use 'this' outside of a class.", )); } self.resolve_local(*id, keyword); } Expr::Super { id, keyword, method: _, } => { if self.current_class == ClassType::None { return Err(LoxError::ResolverError( "Cannot use 'super' outside of a class.", )); } if self.current_class != ClassType::SubClass { return Err(LoxError::ResolverError( "Cannot use 'super' in a class with no superclass.", )); } self.resolve_local(*id, keyword); } Expr::Assign { id, value, name } => { self.resolve_expression(value)?; self.resolve_local(*id, name); } Expr::Binary { left, token_type: _, right, } => { self.resolve_expression(left)?; self.resolve_expression(right)?; } Expr::Call { callee, arguments } => { self.resolve_expression(callee)?; for arg in arguments.as_ref() { self.resolve_expression(arg)?; } } Expr::Get { object, name: _ } => { self.resolve_expression(object)?; } Expr::Set { object, name: _, value, } => { self.resolve_expression(object)?; self.resolve_expression(value)?; } Expr::Grouping { expression } => { self.resolve_expression(expression)?; } Expr::Logical { left, operator: _, right, } => { self.resolve_expression(left)?; self.resolve_expression(right)?; } Expr::Unary { token_type: _, right, } => { self.resolve_expression(right)?; } Expr::Nil | Expr::Boolean(_) | Expr::Number(_) | Expr::String(_) => {} }; Ok(()) } fn begin_scope(&mut self) { self.scopes.push(HashMap::new()); } fn end_scope(&mut self) { self.scopes.pop(); } fn declare(&mut self, name: &'a str) { self.scopes .last_mut() .map(|scope| scope.insert(name, false)); } fn define(&mut self, name: &'a str) { self.scopes.last_mut().map(|scope| scope.insert(name, true)); } fn resolve_local(&mut self, expr_id: ExprId, name: &'a str) { self.scopes .iter_mut() .rev() .enumerate() .find(|(_, scope)| scope.contains_key(name)) .map(|(depth, _)| (expr_id, depth as u64)) .map(|(expr_id, depth)| self.expr_id_to_depth.insert(expr_id, depth)); } } pub fn resolve(statements: &[Stmt]) -> Result<HashMap<ExprId, Depth>> { let mut resolver = Resolver::new(); resolver.resolve(statements) } #[cfg(test)] mod tests { use super::{resolve, Depth}; use crate::error::{LoxError, Result}; use crate::lexer; use crate::parser; use crate::statement::ExprId; use std::collections::HashMap; fn scopes(source: &'static str) -> Result<HashMap<ExprId, Depth>> { let (tokens, lexer_errors) = lexer::lex(source); assert_eq!(lexer_errors.len(), 0); let (statements, parser_errors) = parser::parse(&tokens); assert_eq!(parser_errors.len(), 0); resolve(&statements) } #[test] fn invalid_return_statement() { let source = "return 42;"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot return from top-level code.") ); } #[test]
#[test] fn invalid_this() { let source = "var a = this;"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'this' outside of a class.") ); } #[test] fn cannot_return_from_initializer() { let source = r#" class Foo { init() { return "invalid"; } } "#; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot return a value from an initializer.") ); } #[test] fn cannot_use_super_outside_of_class() { let source = "super.foo();"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'super' outside of a class.") ); } #[test] fn cannot_use_super_in_non_subclass() { let source = r#" class Foo { foo() { super.foo(); } } "#; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'super' in a class with no superclass.") ); } }
fn valid_return_statement() { let source = r#" fun test() { return 42; } "#; let scopes = scopes(source); assert_eq!(scopes.is_ok(), true); }
function_block-full_function
[ { "content": "#[derive(PartialEq, Clone, Copy)]\n\nenum ClassType {\n\n None,\n\n Class,\n\n Subclass,\n\n}\n\n\n\npub struct LexicalScopesResolver {\n\n // Note that this doesn't track globals at all\n\n scopes: Vec<FnvHashMap<Identifier, VariableDefinition>>,\n\n current_function: Option<FunctionKind>,\n\n current_class: ClassType,\n\n lexical_scopes: LexicalScopes,\n\n}\n\n\n\nimpl LexicalScopesResolver {\n\n pub fn new() -> LexicalScopesResolver {\n\n LexicalScopesResolver {\n\n scopes: vec![],\n\n current_function: None,\n\n current_class: ClassType::None,\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 1, "score": 179864.16950586054 }, { "content": "#[derive(PartialEq)]\n\nenum VariableDefinition {\n\n Undefined,\n\n Declared,\n\n Defined,\n\n}\n\n\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 4, "score": 153313.6330389572 }, { "content": "pub fn disassemble<T>(chunk: &Chunk, name: &str, out: &mut LineWriter<T>) -> Result<(), Error>\n\nwhere\n\n T: Write,\n\n{\n\n writeln!(out, \"== {} ==\", name)?;\n\n let mut line = 0;\n\n for (i, instruction) in chunk.instructions.iter().enumerate() {\n\n // Note that this is not printing offsets as the book does.\n\n // Using the OpCode enum all the opcodes have the same size.\n\n // It is not space-efficient, but for now it's fine\n\n write!(out, \"{:04}\", i)?;\n\n if line == chunk.lines[i] {\n\n write!(out, \" |\")?;\n\n } else {\n\n line = chunk.lines[i];\n\n write!(out, \"{:4}\", line)?;\n\n }\n\n write!(out, \" \")?;\n\n disassemble_instruction(instruction, chunk, out)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "rulox/src/vm/bytecode.rs", "rank": 5, "score": 148535.24739022023 }, { "content": "pub fn lex(source: &str) -> (Vec<Token>, Vec<LoxError>) {\n\n let lexer = Lexer::new(source);\n\n\n\n let (tokens, errors): (Vec<_>, Vec<_>) = lexer.partition(Result::is_ok);\n\n let tokens = tokens.into_iter().map(Result::unwrap).collect();\n\n let errors = errors.into_iter().map(Result::unwrap_err).collect();\n\n\n\n (tokens, errors)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::lex;\n\n use super::Token;\n\n use super::TokenType;\n\n\n\n #[test]\n\n fn foo() {\n\n let source = r#\"\n", "file_path": "radogost/rlox/src/lexer.rs", "rank": 6, "score": 141272.9254292674 }, { "content": "pub fn scan(source: &str) -> (Vec<TokenWithContext>, Vec<ScannerError>) {\n\n let mut tokens = Vec::new();\n\n let mut errors = Vec::new();\n\n for result in scan_into_iterator(source) {\n\n match result {\n\n Ok(token_with_context) => {\n\n match token_with_context.token {\n\n // Ignoring tokens we don't care about\n\n Token::Comment | Token::Whitespace => {}\n\n _ => tokens.push(token_with_context),\n\n };\n\n }\n\n Err(error) => errors.push(error),\n\n }\n\n }\n\n (tokens, errors)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "rulox/src/frontend/scanner.rs", "rank": 7, "score": 129611.98473647333 }, { "content": "type Rule<'a, I> = fn(&mut Parser<'a, I>) -> Result<(), ParsingError>;\n\n\n\nimpl<'a, I> Parser<'a, I>\n\nwhere\n\n I: Iterator<Item = Result<TokenWithContext, ScannerError>>,\n\n{\n\n fn new(chunk: &'a mut Chunk, tokens: I) -> Parser<I> {\n\n Parser {\n\n chunk,\n\n tokens: tokens.peekable(),\n\n errors: vec![],\n\n }\n\n }\n\n\n\n /// Ignores irrelevant (comments and whitespaces) and invalid\n\n /// tokens.\n\n /// When invalid tokens are encountered a corresponding error\n\n /// is generated so that they can be reported and compilation\n\n /// fails.\n\n fn skip_to_valid(&mut self) -> () {\n", "file_path": "rulox/src/vm/compiler.rs", "rank": 8, "score": 121302.22710994873 }, { "content": "/// Compiles a text producing either the corresponding chunk of bytecode\n\n/// or an error.\n\n/// Error reporting tries to be smart and to minimize reports adopting a\n\n/// \"recovery logic\".\n\npub fn compile(text: &str) -> Result<Chunk, Vec<CompilationError>> {\n\n let mut chunk = Chunk::default();\n\n let tokens = scan_into_iterator(text);\n\n {\n\n {\n\n let parser = Parser::new(&mut chunk, tokens);\n\n let _ = parser.parse()?;\n\n // TODO: assert that we consumed everything\n\n }\n\n // Line is meaningless, but this is temporary to see some results\n\n // while the implementation is in progress.\n\n chunk.add_instruction(OpCode::Return, 0);\n\n }\n\n Ok(chunk)\n\n}\n", "file_path": "rulox/src/vm/compiler.rs", "rank": 9, "score": 119161.08091473192 }, { "content": "pub fn parse<'a>(tokens: &'a Vec<Token<'a>>) -> (Vec<Stmt>, Vec<LoxError>) {\n\n let parser = Parser::new(tokens);\n\n let (expressions, errors): (Vec<_>, Vec<_>) = parser.partition(Result::is_ok);\n\n let expressions = expressions.into_iter().map(Result::unwrap).collect();\n\n let errors = errors.into_iter().map(Result::unwrap_err).collect();\n\n\n\n (expressions, errors)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::parse;\n\n use super::{Expr, Stmt};\n\n use crate::lexer;\n\n use crate::token::TokenType;\n\n\n\n #[test]\n\n fn simple_mathematical_expression() {\n\n let source = \"(3 + 4) * 6;\";\n", "file_path": "radogost/rlox/src/parser.rs", "rank": 10, "score": 118329.4597684013 }, { "content": "fn next_id() -> u64 {\n\n NEXT_EXPRESSION_ID.fetch_add(1, Ordering::Relaxed)\n\n}\n\n\n", "file_path": "radogost/rlox/src/parser.rs", "rank": 11, "score": 116649.99193857922 }, { "content": "type RunResult = Result<(), RunError>;\n\n\n", "file_path": "rulox/src/user_interface/mod.rs", "rank": 12, "score": 112185.47198939194 }, { "content": "#[derive(Debug)]\n\nenum LoxError {\n\n InputError(Vec<InputError>),\n\n LexicalScopesResolutionError(Vec<LexicalScopesResolutionError>),\n\n RuntimeError(RuntimeError),\n\n}\n\n\n", "file_path": "rulox/src/treewalk/mod.rs", "rank": 13, "score": 107811.49988036187 }, { "content": "#[derive(Debug)]\n\nenum InputError {\n\n ScannerError(scanner::ScannerError),\n\n ParserError(ParseError),\n\n}\n\n\n\npub struct TreeWalkRuloxInterpreter {\n\n parser: Parser,\n\n lexical_scope_resolver: LexicalScopesResolver,\n\n interpreter: StatementInterpreter,\n\n}\n\n\n\nimpl Default for TreeWalkRuloxInterpreter {\n\n fn default() -> TreeWalkRuloxInterpreter {\n\n let mut identifier_map = IdentifierMap::new();\n\n let environment = Environment::new_with_natives(&mut identifier_map);\n\n let parser = Parser::new(identifier_map);\n\n TreeWalkRuloxInterpreter {\n\n parser,\n\n lexical_scope_resolver: LexicalScopesResolver::new(),\n\n interpreter: StatementInterpreter::new(environment),\n", "file_path": "rulox/src/treewalk/mod.rs", "rank": 14, "score": 107811.49988036187 }, { "content": "pub fn trace<T>(chunk: &Chunk, writer: &mut LineWriter<T>) -> Result<(), RuntimeError>\n\nwhere\n\n T: Write,\n\n{\n\n let mut vm = Vm::new(chunk);\n\n while {\n\n vm.trace(writer).map_err(RuntimeError::TracingError)?;\n\n vm.interpret_next()?\n\n } {}\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use proptest::collection::*;\n\n use proptest::num::*;\n\n use proptest::prelude::*;\n\n use std::io::*;\n\n use vm::bytecode::*;\n\n use vm::interpreter::{interpret, trace};\n", "file_path": "rulox/src/vm/interpreter.rs", "rank": 15, "score": 101874.51230189468 }, { "content": "pub trait Function {\n\n fn arity(&self) -> usize;\n\n fn call(\n\n &self,\n\n interpreter: &mut Interpreter,\n\n arguments: &Vec<Rc<Object>>,\n\n ) -> Result<Rc<Object>>;\n\n}\n\n\n\nimpl std::fmt::Debug for dyn Function {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"<lox fn>\")\n\n }\n\n}\n\n\n\npub struct Clock;\n\n\n\nimpl Function for Clock {\n\n fn arity(&self) -> usize {\n\n 0\n", "file_path": "radogost/rlox/src/functions.rs", "rank": 16, "score": 101654.59165601457 }, { "content": "pub fn interpret(chunk: &Chunk) -> Result<(), RuntimeError> {\n\n let mut vm = Vm::new(chunk);\n\n while vm.interpret_next()? {}\n\n Ok(())\n\n}\n\n\n", "file_path": "rulox/src/vm/interpreter.rs", "rank": 17, "score": 99176.14095814247 }, { "content": "pub fn scan_into_iterator<'a>(\n\n source: &'a str,\n\n) -> impl Iterator<Item = Result<TokenWithContext, ScannerError>> + 'a {\n\n TokensIterator {\n\n scanner: Scanner::initialize(source),\n\n }\n\n}\n\n\n", "file_path": "rulox/src/frontend/scanner.rs", "rank": 19, "score": 83149.72427881596 }, { "content": "#[derive(PartialEq, PartialOrd, FromPrimitive, ToPrimitive, Clone, Copy)]\n\nenum Precedence {\n\n None,\n\n Assignment,\n\n Or,\n\n And,\n\n Equality,\n\n Comparison,\n\n Term,\n\n Factor,\n\n Unary,\n\n Call,\n\n Primary,\n\n}\n\n\n\nimpl Precedence {\n\n /// Returns the next (as in the immediately higher) Precedence.\n\n /// Note that this is not defined for the item with the highest\n\n /// precedence. In such case you'll get a panic\n\n fn next(self) -> Precedence {\n\n // This reduces some boilerplate.\n\n Precedence::from_u8(self.to_u8().unwrap() + 1).unwrap()\n\n }\n\n}\n\n\n", "file_path": "rulox/src/vm/compiler.rs", "rank": 20, "score": 81585.46869114901 }, { "content": "fn run_file(filename: &str) {\n\n let mut file = File::open(filename).expect(\"Could not read file: \");\n\n let mut code = String::new();\n\n file.read_to_string(&mut code)\n\n .expect(\"Could not read file: \");\n\n\n\n let mut interpreter = Interpreter::new();\n\n let (tokens, lexer_errors) = lexer::lex(&code);\n\n print_errors(&lexer_errors);\n\n\n\n let (statements, parser_errors) = parser::parse(&tokens);\n\n print_errors(&parser_errors);\n\n\n\n if !lexer_errors.is_empty() || !parser_errors.is_empty() {\n\n std::process::exit(64);\n\n }\n\n\n\n let scopes = resolver::resolve(&statements);\n\n if scopes.is_err() {\n\n std::process::exit(64);\n\n }\n\n interpreter.add_scopes(scopes.unwrap());\n\n\n\n interpreter\n\n .interpret(statements)\n\n .expect(\"Interpreter error: \");\n\n}\n\n\n", "file_path": "radogost/rlox/src/main.rs", "rank": 22, "score": 81058.33668777735 }, { "content": "pub fn disassemble_instruction<T>(\n\n instruction: &OpCode,\n\n chunk: &Chunk,\n\n out: &mut LineWriter<T>,\n\n) -> Result<(), Error>\n\nwhere\n\n T: Write,\n\n{\n\n match *instruction {\n\n OpCode::Return => writeln!(out, \"OP_RETURN\"),\n\n OpCode::Constant(offset) => if offset >= chunk.values_count() {\n\n //TODO: this should probably return an error\n\n writeln!(out, \"OP_CONSTANT {:4} 'ILLEGAL_ACCESS'\", offset)\n\n } else {\n\n writeln!(\n\n out,\n\n \"OP_CONSTANT {:4} '{:?}'\",\n\n offset,\n\n chunk.get_value(offset)\n\n )\n", "file_path": "rulox/src/vm/bytecode.rs", "rank": 23, "score": 80792.2971701551 }, { "content": "fn is_whitespace(c: char) -> bool {\n\n match c {\n\n ' ' | '\\r' | '\\t' | '\\n' => true,\n\n _ => false,\n\n }\n\n}\n\n\n\nimpl<'a> Scanner<'a> {\n\n fn initialize(source: &'a str) -> Scanner {\n\n Scanner {\n\n current_position: Position::initial(),\n\n current_lexeme: \"\".into(),\n\n source: multipeek(source.chars()),\n\n }\n\n }\n\n\n\n fn peek_check(&mut self, check: &dyn Fn(char) -> bool) -> bool {\n\n self.source.reset_peek();\n\n match self.source.peek() {\n\n Some(&c) => check(c),\n", "file_path": "rulox/src/frontend/scanner.rs", "rank": 24, "score": 79686.70203461779 }, { "content": "fn is_alphanumeric(c: char) -> bool {\n\n is_digit(c) || is_alpha(c)\n\n}\n\n\n", "file_path": "rulox/src/frontend/scanner.rs", "rank": 25, "score": 79686.70203461779 }, { "content": "fn is_digit(c: char) -> bool {\n\n c >= '0' && c <= '9'\n\n}\n\n\n", "file_path": "rulox/src/frontend/scanner.rs", "rank": 26, "score": 79686.70203461779 }, { "content": "fn is_alpha(c: char) -> bool {\n\n (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_'\n\n}\n\n\n", "file_path": "rulox/src/frontend/scanner.rs", "rank": 27, "score": 79686.70203461779 }, { "content": "struct TokensIterator<'a> {\n\n scanner: Scanner<'a>,\n\n}\n\n\n\nimpl<'a> Iterator for TokensIterator<'a> {\n\n type Item = Result<TokenWithContext, ScannerError>;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.scanner.scan_next()\n\n }\n\n}\n\n\n", "file_path": "rulox/src/frontend/scanner.rs", "rank": 28, "score": 74913.57060660253 }, { "content": "pub trait LoxImplementation {\n\n fn run(&mut self, source: &str) -> RunResult;\n\n}\n\n\n\npub struct Runner<I: LoxImplementation> {\n\n rulox: I,\n\n}\n\n\n\nimpl<I: LoxImplementation> Runner<I> {\n\n pub fn new(implementation: I) -> Runner<I> {\n\n Runner {\n\n rulox: implementation,\n\n }\n\n }\n\n pub fn run_file(&mut self, file_name: &str) -> RunResult {\n\n let mut file =\n\n File::open(file_name).map_err(|_| RunError::IoError(\"Error opening file\".into()))?; // TODO: add context\n\n let mut source = String::new();\n\n file.read_to_string(&mut source)\n\n .map_err(|_| RunError::IoError(\"Error reading file\".into()))?;\n", "file_path": "rulox/src/user_interface/mod.rs", "rank": 29, "score": 69697.72590455029 }, { "content": "trait LexicallyScoped {\n\n fn resolve(&self, &mut LexicalScopesResolver) -> Result<(), LexicalScopesResolutionError>;\n\n}\n\n\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 30, "score": 63024.78110534048 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub enum LexicalScopesResolutionError {\n\n ReadLocalInItsOwnInitializer,\n\n VariableAlreadyExistsInScope,\n\n ReturnFromTopLevelCode,\n\n ReturnFromInitializer,\n\n UseOfThisOutsideAClass,\n\n UseOfSuperOutsideAClass,\n\n UseOfSuperOutsideASubClass,\n\n}\n\n\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 31, "score": 57719.20765432073 }, { "content": " return Err(LexicalScopesResolutionError::UseOfThisOutsideAClass);\n\n }\n\n resolver.resolve_local(handle.clone(), *identifier);\n\n Ok(())\n\n }\n\n Expr::Super(ref handle, ref super_identifier, _member_identifier) => {\n\n match resolver.current_class {\n\n ClassType::None => Err(LexicalScopesResolutionError::UseOfSuperOutsideAClass),\n\n ClassType::Class => {\n\n Err(LexicalScopesResolutionError::UseOfSuperOutsideASubClass)\n\n }\n\n _ => {\n\n resolver.resolve_local(handle.clone(), *super_identifier);\n\n Ok(())\n\n }\n\n }\n\n }\n\n Expr::Identifier(ref handle, ref identifier) => {\n\n let scopes = resolver.scopes.len();\n\n if scopes != 0\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 32, "score": 57718.86176206916 }, { "content": " Err(LexicalScopesResolutionError::ReturnFromInitializer)\n\n }\n\n Some(_) => match *r {\n\n None => Ok(()),\n\n Some(ref e) => e.resolve(resolver),\n\n },\n\n None => Err(LexicalScopesResolutionError::ReturnFromTopLevelCode),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl LexicallyScoped for Expr {\n\n fn resolve(\n\n &self,\n\n resolver: &mut LexicalScopesResolver,\n\n ) -> Result<(), LexicalScopesResolutionError> {\n\n match *self {\n\n Expr::This(ref handle, ref identifier) => {\n\n if let ClassType::None = resolver.current_class {\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 33, "score": 57716.19201732988 }, { "content": " use treewalk::parser::*;\n\n\n\n #[test]\n\n fn global_variable() {\n\n let (tokens, _) = scan(&\"var a = 0;{fun f() {print a;}}\");\n\n let statements = Parser::default().parse(&tokens).unwrap();\n\n let mut lexical_scope_resolver = LexicalScopesResolver::new();\n\n for statement in statements.iter() {\n\n assert!(lexical_scope_resolver.resolve(&statement).is_ok());\n\n }\n\n let lexical_scopes = lexical_scope_resolver.lexical_scopes;\n\n let mut handle_factory = VariableUseHandleFactory::new();\n\n let handle = handle_factory.next(); // Use of a in the function\n\n assert_eq!(Some(&3), lexical_scopes.get_depth(handle));\n\n }\n\n\n\n #[test]\n\n fn captured_variable() {\n\n let (tokens, _) = scan(&\"{var a = 0;fun f() {print a;}}\");\n\n let statements = Parser::default().parse(&tokens).unwrap();\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 34, "score": 57713.48894604275 }, { "content": " let mut lexical_scope_resolver = LexicalScopesResolver::new();\n\n for statement in statements.iter() {\n\n assert!(lexical_scope_resolver.resolve(&statement).is_ok());\n\n }\n\n let lexical_scopes = lexical_scope_resolver.lexical_scopes;\n\n let mut handle_factory = VariableUseHandleFactory::new();\n\n let handle = handle_factory.next(); // Use of a in the function\n\n assert_eq!(Some(&2), lexical_scopes.get_depth(handle));\n\n }\n\n\n\n #[test]\n\n fn lexical_capture() {\n\n let (tokens, _) = scan(&\"var a = 0;{fun f() {print a;} var a = 1;}\");\n\n let statements = Parser::default().parse(&tokens).unwrap();\n\n let mut lexical_scope_resolver = LexicalScopesResolver::new();\n\n for statement in statements.iter() {\n\n assert!(lexical_scope_resolver.resolve(&statement).is_ok());\n\n }\n\n let lexical_scopes = lexical_scope_resolver.lexical_scopes;\n\n let mut handle_factory = VariableUseHandleFactory::new();\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 35, "score": 57712.627732995534 }, { "content": " }\n\n }\n\n // If we failed to find it in the locals, it must be a global.\n\n // It might not be there right now, but it might appear later on.\n\n // We will know it only at runtime.\n\n self.lexical_scopes.depths.insert(handle, max_depth);\n\n }\n\n\n\n #[allow(dead_code)] // Used in tests\n\n pub fn resolve(\n\n &mut self,\n\n statement: &Statement,\n\n ) -> Result<&LexicalScopes, LexicalScopesResolutionError> {\n\n statement.resolve(self).map(move |_| &self.lexical_scopes)\n\n }\n\n\n\n pub fn resolve_all(\n\n &mut self,\n\n statements: &[Statement],\n\n ) -> Result<&LexicalScopes, Vec<LexicalScopesResolutionError>> {\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 36, "score": 57712.313927544725 }, { "content": "\n\n #[test]\n\n fn global_redeclaration_is_allowed() {\n\n let (tokens, _) = scan(&\"var a = 1;var a = 2;\");\n\n let statements = Parser::default().parse(&tokens).unwrap();\n\n let mut lexical_scope_resolver = LexicalScopesResolver::new();\n\n assert!(lexical_scope_resolver.resolve(&statements[0]).is_ok());\n\n assert!(lexical_scope_resolver.resolve(&statements[1]).is_ok());\n\n }\n\n\n\n #[test]\n\n fn error_on_return_outside_a_function() {\n\n let (tokens, _) = scan(&\"return;\");\n\n let statements = Parser::default().parse(&tokens).unwrap();\n\n let mut lexical_scope_resolver = LexicalScopesResolver::new();\n\n assert!(lexical_scope_resolver.resolve(&statements[0]).is_err());\n\n }\n\n}\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 37, "score": 57712.14284865082 }, { "content": " Statement::Class(ref c) => {\n\n resolver.declare(c.name)?;\n\n let enclosing_class = resolver.current_class;\n\n resolver.current_class = ClassType::Class;\n\n resolver.define(c.name);\n\n if let Some(ref superclass) = c.superclass {\n\n resolver.current_class = ClassType::Subclass;\n\n superclass.resolve(resolver)?;\n\n resolver.begin_scope();\n\n resolver.define(Identifier::super_identifier());\n\n }\n\n resolver.begin_scope();\n\n resolver.define(Identifier::this());\n\n for method in &c.methods {\n\n method.resolve(resolver)?;\n\n }\n\n resolver.end_scope();\n\n if c.superclass.is_some() {\n\n resolver.end_scope();\n\n }\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 38, "score": 57711.923710506046 }, { "content": " resolver.declare(self.name)?;\n\n let enclosing_function = resolver.current_function;\n\n resolver.current_function = Some(self.kind);\n\n resolver.define(self.name);\n\n resolver.begin_scope();\n\n for argument in &self.arguments {\n\n resolver.declare(*argument)?;\n\n resolver.define(*argument);\n\n }\n\n self.body.resolve(resolver)?;\n\n resolver.end_scope();\n\n resolver.current_function = enclosing_function;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use frontend::scanner::*;\n\n use treewalk::lexical_scope_resolver::*;\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 39, "score": 57711.61503105745 }, { "content": " let handle = handle_factory.next(); // Use of a in the function\n\n assert_eq!(Some(&3), lexical_scopes.get_depth(handle));\n\n }\n\n\n\n #[test]\n\n fn error_on_shadowing() {\n\n let (tokens, _) = scan(&\"var a = 0;{var a = a;}\");\n\n let statements = Parser::default().parse(&tokens).unwrap();\n\n let mut lexical_scope_resolver = LexicalScopesResolver::new();\n\n assert!(lexical_scope_resolver.resolve(&statements[0]).is_ok());\n\n assert!(lexical_scope_resolver.resolve(&statements[1]).is_err());\n\n }\n\n\n\n #[test]\n\n fn error_on_local_redeclaration() {\n\n let (tokens, _) = scan(&\"fun bad() {var a = 1;var a = 2;}\");\n\n let statements = Parser::default().parse(&tokens).unwrap();\n\n let mut lexical_scope_resolver = LexicalScopesResolver::new();\n\n assert!(lexical_scope_resolver.resolve(&statements[0]).is_err());\n\n }\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 40, "score": 57711.10286628536 }, { "content": " && resolver.scopes[scopes - 1]\n\n .get(&identifier)\n\n .unwrap_or(&VariableDefinition::Undefined)\n\n == &VariableDefinition::Declared\n\n {\n\n Err(LexicalScopesResolutionError::ReadLocalInItsOwnInitializer)\n\n } else {\n\n resolver.resolve_local(*handle, *identifier);\n\n Ok(())\n\n }\n\n }\n\n Expr::Assignment(ref assigment) => assigment.resolve(resolver),\n\n Expr::Literal(_) => Ok(()),\n\n Expr::Unary(ref e) => e.right.resolve(resolver),\n\n Expr::Binary(ref e) => e\n\n .left\n\n .resolve(resolver)\n\n .and_then(|_| e.right.resolve(resolver)),\n\n Expr::Logic(ref e) => e\n\n .left\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 41, "score": 57710.33185682534 }, { "content": " resolver.current_class = enclosing_class;\n\n Ok(())\n\n }\n\n Statement::Expression(ref e) => e.resolve(resolver),\n\n Statement::IfThen(ref s) => s\n\n .condition\n\n .resolve(resolver)\n\n .and_then(|_| s.then_branch.resolve(resolver)),\n\n Statement::IfThenElse(ref s) => s\n\n .condition\n\n .resolve(resolver)\n\n .and_then(|_| s.then_branch.resolve(resolver))\n\n .and_then(|_| s.else_branch.resolve(resolver)),\n\n Statement::While(ref s) => s\n\n .condition\n\n .resolve(resolver)\n\n .and_then(|_| s.body.resolve(resolver)),\n\n Statement::Print(ref e) => e.resolve(resolver),\n\n Statement::Return(ref r) => match resolver.current_function {\n\n Some(FunctionKind::Initializer) => {\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 42, "score": 57710.206594580646 }, { "content": " Statement::Block(ref b) => {\n\n resolver.begin_scope();\n\n for statement in &b.statements {\n\n statement.resolve(resolver)?;\n\n }\n\n resolver.end_scope();\n\n Ok(())\n\n }\n\n Statement::VariableDefinition(ref identifier) => {\n\n resolver.declare(*identifier)?;\n\n resolver.define(*identifier);\n\n Ok(())\n\n }\n\n Statement::VariableDefinitionWithInitalizer(ref identifier, ref initializer) => {\n\n resolver.declare(*identifier)?;\n\n initializer.resolve(resolver)?;\n\n resolver.define(*identifier);\n\n Ok(())\n\n }\n\n Statement::FunctionDefinition(ref f) => f.resolve(resolver),\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 43, "score": 57705.11795955033 }, { "content": " v.insert(VariableDefinition::Declared);\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n fn define(&mut self, identifier: Identifier) -> () {\n\n let scopes = self.scopes.len();\n\n if scopes == 0 {\n\n return;\n\n };\n\n self.scopes[scopes - 1].insert(identifier, VariableDefinition::Defined);\n\n }\n\n\n\n fn resolve_local(&mut self, handle: VariableUseHandle, identifier: Identifier) -> () {\n\n let max_depth = self.scopes.len();\n\n for depth in 0..max_depth {\n\n if self.scopes[max_depth - depth - 1].contains_key(&identifier) {\n\n self.lexical_scopes.depths.insert(handle, depth);\n\n return;\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 44, "score": 57702.99290070089 }, { "content": " .resolve(resolver)\n\n .and_then(|_| e.right.resolve(resolver)),\n\n Expr::Grouping(ref e) => e.expr.resolve(resolver),\n\n Expr::Call(ref e) => {\n\n e.callee.resolve(resolver)?;\n\n for argument in &e.arguments {\n\n argument.resolve(resolver)?;\n\n }\n\n Ok(())\n\n }\n\n Expr::Get(ref g) => {\n\n g.instance.resolve(resolver)?;\n\n Ok(())\n\n }\n\n Expr::Set(ref s) => {\n\n s.value.resolve(resolver)?;\n\n s.instance.resolve(resolver)?;\n\n Ok(())\n\n }\n\n }\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 45, "score": 57702.67128934717 }, { "content": " }\n\n}\n\n\n\nimpl LexicallyScoped for Assignment {\n\n fn resolve(\n\n &self,\n\n resolver: &mut LexicalScopesResolver,\n\n ) -> Result<(), LexicalScopesResolutionError> {\n\n self.rvalue.resolve(resolver)?;\n\n let Target::Identifier(ref identifier) = self.lvalue;\n\n resolver.resolve_local(self.handle, *identifier);\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl LexicallyScoped for FunctionDefinition {\n\n fn resolve(\n\n &self,\n\n resolver: &mut LexicalScopesResolver,\n\n ) -> Result<(), LexicalScopesResolutionError> {\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 46, "score": 57700.52237116407 }, { "content": "use fnv::FnvHashMap;\n\nuse std::collections::hash_map::Entry;\n\nuse treewalk::ast::*;\n\n\n\npub type Depth = usize;\n\n\n\npub struct LexicalScopes {\n\n depths: FnvHashMap<VariableUseHandle, Depth>,\n\n}\n\n\n\nimpl LexicalScopes {\n\n pub fn new() -> LexicalScopes {\n\n LexicalScopes {\n\n depths: FnvHashMap::default(),\n\n }\n\n }\n\n\n\n pub fn get_depth(&self, handle: VariableUseHandle) -> Option<&Depth> {\n\n self.depths.get(&handle)\n\n }\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 47, "score": 57700.101878713285 }, { "content": " let resolution_errors: Vec<LexicalScopesResolutionError> = statements\n\n .iter()\n\n .map(|s| s.resolve(self))\n\n .filter(|r| r.is_err())\n\n .map(|r| r.unwrap_err())\n\n .collect();\n\n if resolution_errors.is_empty() {\n\n Ok(&self.lexical_scopes)\n\n } else {\n\n Err(resolution_errors)\n\n }\n\n }\n\n}\n\n\n\nimpl LexicallyScoped for Statement {\n\n fn resolve(\n\n &self,\n\n resolver: &mut LexicalScopesResolver,\n\n ) -> Result<(), LexicalScopesResolutionError> {\n\n match *self {\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 48, "score": 57698.86998859103 }, { "content": " lexical_scopes: LexicalScopes::new(),\n\n }\n\n }\n\n\n\n fn begin_scope(&mut self) -> () {\n\n self.scopes.push(FnvHashMap::default());\n\n }\n\n\n\n fn end_scope(&mut self) -> () {\n\n self.scopes.pop();\n\n }\n\n\n\n fn declare(&mut self, identifier: Identifier) -> Result<(), LexicalScopesResolutionError> {\n\n let scopes = self.scopes.len();\n\n if scopes == 0 {\n\n return Ok(());\n\n };\n\n match self.scopes[scopes - 1].entry(identifier) {\n\n Entry::Occupied(_) => Err(LexicalScopesResolutionError::VariableAlreadyExistsInScope),\n\n Entry::Vacant(v) => {\n", "file_path": "rulox/src/treewalk/lexical_scope_resolver.rs", "rank": 49, "score": 57697.022315357455 }, { "content": " class Person {\n\n init(name) {\n\n this.name = name;\n\n }\n\n\n\n hi() {\n\n return \"Hi, my name is \" + this.name;\n\n }\n\n }\n\n var alice = Person(\"Alice\");\n\n var hiAlice = alice.hi();\n\n \"#;\n\n let interpreter = interpret(source);\n\n let hi = interpreter.environment.borrow().get(0, \"hiAlice\").unwrap();\n\n assert_eq!(*hi, Object::String(\"Hi, my name is Alice\".to_owned()));\n\n }\n\n\n\n #[test]\n\n fn super_method_calls() {\n\n let source = r#\"\n", "file_path": "radogost/rlox/src/interpreter.rs", "rank": 50, "score": 50962.811921387925 }, { "content": " class Foo {}\n\n var foo = Foo();\n\n foo.field = \"some value\";\n\n var field = foo.field;\n\n \"#;\n\n let interpreter = interpret(source);\n\n let field = interpreter.environment.borrow().get(0, \"field\").unwrap();\n\n assert_eq!(*field, Object::String(\"some value\".to_owned()));\n\n }\n\n\n\n #[test]\n\n fn method_calls() {\n\n let source = r#\"\n", "file_path": "radogost/rlox/src/interpreter.rs", "rank": 51, "score": 50962.811921387925 }, { "content": " class Duck {\n\n type() {\n\n return \"Duck\";\n\n }\n\n }\n\n\n", "file_path": "radogost/rlox/src/interpreter.rs", "rank": 52, "score": 50962.811921387925 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n if args.len() == 1 {\n\n run_prompt();\n\n } else if args.len() == 2 {\n\n run_file(&args[1]);\n\n } else {\n\n eprintln!(\"Unexpected number of arguments. Expected none (interactive) or one(file).\");\n\n }\n\n}\n", "file_path": "radogost/rlox/src/main.rs", "rank": 53, "score": 50403.70692531371 }, { "content": "#[derive(Debug)]\n\nstruct EnvironmentImpl {\n\n parent: Option<Environment>,\n\n values: FnvHashMap<Identifier, Value>,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Environment {\n\n actual: Rc<RefCell<EnvironmentImpl>>,\n\n}\n\n\n\nimpl PartialEq for Environment {\n\n fn eq(&self, _other: &Environment) -> bool {\n\n false\n\n }\n\n}\n\n\n\nimpl Environment {\n\n pub fn new_with_natives(identifier_map: &mut IdentifierMap) -> Environment {\n\n let environment = Environment::new();\n\n Callable::register_natives(&environment, identifier_map);\n", "file_path": "rulox/src/treewalk/interpreter.rs", "rank": 54, "score": 49541.74199873249 }, { "content": "fn run_prompt() {\n\n let mut interpreter = Interpreter::new();\n\n loop {\n\n print!(\"> \");\n\n io::stdout().flush().expect(\"Could not write to stdout\");\n\n let mut buffer = String::new();\n\n match io::stdin().read_line(&mut buffer) {\n\n Ok(_) => {\n\n let (tokens, lexer_errors) = lexer::lex(&buffer);\n\n print_errors(&lexer_errors);\n\n\n\n let (statements, parser_errors) = parser::parse(&tokens);\n\n print_errors(&parser_errors);\n\n\n\n if !lexer_errors.is_empty() || !parser_errors.is_empty() {\n\n std::process::exit(64);\n\n }\n\n\n\n let scopes = resolver::resolve(&statements);\n\n if scopes.is_err() {\n", "file_path": "radogost/rlox/src/main.rs", "rank": 55, "score": 48992.21982456581 }, { "content": "struct Vm<'a> {\n\n chunk: &'a Chunk,\n\n program_counter: usize,\n\n stack: Vec<Value>,\n\n /// Allocated objects so the GC can keep track of them.\n\n /// The variants of ObjectReference is a ref-counted\n\n /// pointer to their actual data. That is not enough to\n\n /// guarantee that we free all the memory because the object\n\n /// graph might contain cycles.\n\n /// TODO: not sure if this should be weak or not\n\n objects: Vec<ObjectReference>,\n\n}\n\n\n\nimpl<'a> Vm<'a> {\n\n fn new(chunk: &'a Chunk) -> Vm<'a> {\n\n Vm {\n\n chunk,\n\n program_counter: 0,\n\n stack: vec![],\n\n objects: vec![],\n", "file_path": "rulox/src/vm/interpreter.rs", "rank": 56, "score": 47177.85550548682 }, { "content": "struct Scanner<'a> {\n\n current_position: Position,\n\n current_lexeme: String,\n\n source: MultiPeek<str::Chars<'a>>,\n\n}\n\n\n", "file_path": "rulox/src/frontend/scanner.rs", "rank": 57, "score": 47177.85550548682 }, { "content": "struct Lexer<'a> {\n\n source: &'a str,\n\n source_iter: std::iter::Peekable<std::iter::Enumerate<Chars<'a>>>,\n\n start: usize,\n\n line: u32,\n\n eof_returned: bool,\n\n}\n\n\n\nimpl<'a> Lexer<'a> {\n\n fn new(source: &'a str) -> Self {\n\n Self {\n\n source,\n\n source_iter: source.chars().enumerate().peekable(),\n\n start: 0,\n\n line: 1,\n\n eof_returned: false,\n\n }\n\n }\n\n\n\n fn string(&mut self, start_pos: usize) -> Result<TokenType> {\n", "file_path": "radogost/rlox/src/lexer.rs", "rank": 58, "score": 47177.85550548682 }, { "content": "struct Parser<'a> {\n\n token_iter: std::iter::Peekable<std::slice::Iter<'a, Token<'a>>>,\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n\n fn new(tokens: &'a Vec<Token<'a>>) -> Self {\n\n Self {\n\n token_iter: tokens.iter().peekable(),\n\n }\n\n }\n\n\n\n fn statement(&mut self) -> Result<Stmt> {\n\n if let Some(token) = self.token_iter.peek() {\n\n match &token.token_type {\n\n TokenType::Print => self.print_statement(),\n\n TokenType::Var => self.var_declaration(),\n\n TokenType::LeftBrace => self.block(),\n\n TokenType::If => self.if_statement(),\n\n TokenType::While => self.while_statement(),\n\n TokenType::For => self.for_statement(),\n", "file_path": "radogost/rlox/src/parser.rs", "rank": 59, "score": 47177.85550548682 }, { "content": "type Line = usize;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum OpCode {\n\n Constant(Offset),\n\n Return,\n\n Negate,\n\n Not,\n\n // Having a single binary opcode parametrized on its operand makes\n\n // the code cleaner.\n\n // Since constant already has an offset we're not making the\n\n // encoding worse. The extra space would have been allocated anyway.\n\n Binary(BinaryOp),\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum BinaryOp {\n\n Add,\n\n Subtract,\n\n Multiply,\n", "file_path": "rulox/src/vm/bytecode.rs", "rank": 60, "score": 45875.91610062267 }, { "content": "type Offset = usize;\n\n/// Subset of values that can be initialised when a chunk is created.\n\n/// They will be turned into proper values when the VM accesses them.\n\n#[derive(Debug)]\n\npub enum Constant {\n\n Number(f64),\n\n Bool(bool),\n\n Nil,\n\n String(String),\n\n}\n", "file_path": "rulox/src/vm/bytecode.rs", "rank": 61, "score": 45875.91610062267 }, { "content": "/// A single-pass Pratt Parser that consumes tokens from an iterator,\n\n/// parses them into a Lox programs and emits a chunk of bytecode.\n\n/// The parser also keeps tracks of errors.\n\nstruct Parser<'a, I>\n\nwhere\n\n I: Iterator<Item = Result<TokenWithContext, ScannerError>>,\n\n{\n\n chunk: &'a mut Chunk,\n\n tokens: Peekable<I>,\n\n errors: Vec<CompilationError>,\n\n}\n", "file_path": "rulox/src/vm/compiler.rs", "rank": 62, "score": 44581.79964562422 }, { "content": " class MallardDuck < Duck {\n\n type() {\n\n return \"Mallard\" + super.type();\n\n }\n\n }\n\n\n\n var duck = MallardDuck();\n\n var type = duck.type();\n\n \"#;\n\n let interpreter = interpret(source);\n\n let duck_type = interpreter.environment.borrow().get(0, \"type\").unwrap();\n\n assert_eq!(*duck_type, Object::String(\"MallardDuck\".to_owned()));\n\n }\n\n}\n", "file_path": "radogost/rlox/src/interpreter.rs", "rank": 63, "score": 44462.99001369712 }, { "content": "pub trait PrettyPrint {\n\n fn pretty_print_into(&self, identifier_map: &IdentifierMap, pretty_printed: &mut String) -> ();\n\n fn pretty_print(&self, identifier_map: &IdentifierMap) -> String {\n\n let mut pretty_printed = String::new();\n\n self.pretty_print_into(identifier_map, &mut pretty_printed);\n\n pretty_printed\n\n }\n\n}\n\n\n\nimpl PrettyPrint for Expr {\n\n fn pretty_print_into(&self, identifier_map: &IdentifierMap, pretty_printed: &mut String) -> () {\n\n match *self {\n\n Expr::This(_, _) => pretty_printed.push_str(\"this\"),\n\n Expr::Super(_, _, ref identifier) => {\n\n pretty_printed.push_str(\"super.\");\n\n identifier.pretty_print_into(identifier_map, pretty_printed)\n\n }\n\n Expr::Literal(ref l) => l.pretty_print_into(identifier_map, pretty_printed),\n\n Expr::Unary(ref u) => u.pretty_print_into(identifier_map, pretty_printed),\n\n Expr::Binary(ref b) => b.pretty_print_into(identifier_map, pretty_printed),\n", "file_path": "rulox/src/treewalk/pretty_printer.rs", "rank": 64, "score": 43095.60464348404 }, { "content": "fn print_errors(errors: &Vec<LoxError>) {\n\n for error in errors {\n\n eprintln!(\"{}\", error);\n\n }\n\n}\n\n\n", "file_path": "radogost/rlox/src/main.rs", "rank": 65, "score": 36338.999446473914 }, { "content": "use crate::classes::{LoxClass, LoxInstance};\n\nuse crate::functions::Function;\n\n\n\nuse std::cell::RefCell;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::rc::Rc;\n\n\n\n#[derive(Debug)]\n\npub enum Object {\n\n Boolean(bool),\n\n Nil,\n\n Number(f64),\n\n String(String),\n\n Function(Rc<dyn Function>),\n\n Class(Rc<LoxClass>),\n\n Instance(Rc<RefCell<LoxInstance>>),\n\n}\n\n\n\nimpl Display for Object {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n", "file_path": "radogost/rlox/src/object.rs", "rank": 66, "score": 32212.055672779636 }, { "content": " match &self {\n\n Object::Nil => write!(f, \"nil\"),\n\n Object::Number(num) => {\n\n if num.fract() == 0.0 {\n\n write!(f, \"{:.0}\", num)\n\n } else {\n\n write!(f, \"{}\", num)\n\n }\n\n }\n\n Object::Boolean(b) => write!(f, \"{}\", b),\n\n Object::String(s) => write!(f, \"{}\", s),\n\n Object::Function(func) => write!(f, \"{:?}\", func),\n\n Object::Class(class) => write!(f, \"{}\", class),\n\n Object::Instance(instance) => write!(f, \"{}\", instance.borrow()),\n\n }\n\n }\n\n}\n\n\n\nimpl PartialEq for Object {\n\n fn eq(&self, other: &Self) -> bool {\n", "file_path": "radogost/rlox/src/object.rs", "rank": 67, "score": 32210.56522824077 }, { "content": " match (self, other) {\n\n (Object::Boolean(a), Object::Boolean(b)) => a == b,\n\n (Object::Nil, Object::Nil) => true,\n\n (Object::Number(a), Object::Number(b)) => a == b,\n\n (Object::String(a), Object::String(b)) => a == b,\n\n _ => false,\n\n }\n\n }\n\n}\n", "file_path": "radogost/rlox/src/object.rs", "rank": 68, "score": 32201.13409915441 }, { "content": "use crate::token::TokenType;\n\n\n\nuse std::rc::Rc;\n\n\n\npub type ExprId = u64;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Expr {\n\n // literal values\n\n Number(f64),\n\n String(String),\n\n Boolean(bool),\n\n Nil,\n\n // compound expressions\n\n Binary {\n\n left: Box<Expr>,\n\n token_type: TokenType,\n\n right: Box<Expr>,\n\n },\n\n Call {\n", "file_path": "radogost/rlox/src/statement.rs", "rank": 69, "score": 32191.35467541595 }, { "content": " If {\n\n condition: Expr,\n\n then_branch: Box<Stmt>,\n\n else_branch: Option<Box<Stmt>>,\n\n },\n\n While {\n\n condition: Expr,\n\n body: Box<Stmt>,\n\n },\n\n Function {\n\n name: String,\n\n parameters: Rc<Vec<String>>,\n\n body: Rc<Vec<Stmt>>,\n\n },\n\n Return {\n\n value: Option<Expr>,\n\n },\n\n Class {\n\n name: String,\n\n superclass: Option<Box<Expr>>,\n\n methods: Box<Vec<Stmt>>,\n\n },\n\n}\n", "file_path": "radogost/rlox/src/statement.rs", "rank": 70, "score": 32187.375091667644 }, { "content": " callee: Box<Expr>,\n\n arguments: Box<Vec<Expr>>,\n\n },\n\n Get {\n\n object: Box<Expr>,\n\n name: String,\n\n },\n\n Set {\n\n object: Box<Expr>,\n\n name: String,\n\n value: Rc<Expr>,\n\n },\n\n Super {\n\n id: ExprId,\n\n keyword: &'static str,\n\n method: String,\n\n },\n\n This {\n\n id: ExprId,\n\n keyword: &'static str,\n", "file_path": "radogost/rlox/src/statement.rs", "rank": 71, "score": 32185.023680614264 }, { "content": " },\n\n Grouping {\n\n expression: Box<Expr>,\n\n },\n\n Unary {\n\n token_type: TokenType,\n\n right: Box<Expr>,\n\n },\n\n Logical {\n\n left: Box<Expr>,\n\n operator: TokenType,\n\n right: Box<Expr>,\n\n },\n\n // assignments\n\n Variable {\n\n id: ExprId,\n\n name: String,\n\n },\n\n Assign {\n\n id: ExprId,\n", "file_path": "radogost/rlox/src/statement.rs", "rank": 72, "score": 32184.895467179496 }, { "content": " name: String,\n\n value: Box<Expr>,\n\n },\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Stmt {\n\n Expression {\n\n expression: Expr,\n\n },\n\n Print {\n\n expression: Expr,\n\n },\n\n Var {\n\n name: String,\n\n initializer: Option<Expr>,\n\n },\n\n Block {\n\n statements: Box<Vec<Stmt>>,\n\n },\n", "file_path": "radogost/rlox/src/statement.rs", "rank": 73, "score": 32178.689018573426 }, { "content": "\n\nimpl LoxFunction {\n\n pub fn new(\n\n parameters: Rc<Vec<String>>,\n\n body: Rc<Vec<Stmt>>,\n\n closure: Rc<RefCell<Environment>>,\n\n is_initializer: bool,\n\n ) -> Self {\n\n LoxFunction {\n\n parameters,\n\n body,\n\n closure,\n\n is_initializer,\n\n }\n\n }\n\n\n\n pub fn bind(&self, instance: Rc<Object>) -> Self {\n\n let mut environment = Environment::with_enclosing(self.closure.clone());\n\n environment.define(\"this\", instance);\n\n Self {\n", "file_path": "radogost/rlox/src/functions.rs", "rank": 74, "score": 32159.93580852607 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\npub enum TokenType {\n\n LeftParen,\n\n RightParen,\n\n LeftBrace,\n\n RightBrace,\n\n Comma,\n\n Dot,\n\n Minus,\n\n Plus,\n\n Semicolon,\n\n Slash,\n\n Star,\n\n\n\n Bang,\n\n BangEqual,\n\n Equal,\n\n EqualEqual,\n\n Greater,\n\n GreaterEqual,\n", "file_path": "radogost/rlox/src/token.rs", "rank": 75, "score": 32158.15947763422 }, { "content": " True,\n\n Var,\n\n While,\n\n\n\n Eof,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Token<'a> {\n\n pub token_type: TokenType,\n\n pub lexeme: &'a str,\n\n pub line: u32,\n\n}\n", "file_path": "radogost/rlox/src/token.rs", "rank": 76, "score": 32157.306113841885 }, { "content": " .iter()\n\n .zip(arguments.iter())\n\n .for_each(|(declaration, argument)| {\n\n environment.define(declaration, argument.clone());\n\n });\n\n\n\n let result = interpreter.execute_block(&self.body, Rc::new(RefCell::new(environment)));\n\n let return_value = match result {\n\n Ok(()) => {\n\n if self.is_initializer {\n\n self.closure.borrow().get(0, \"this\")?\n\n } else {\n\n Rc::new(Object::Nil)\n\n }\n\n }\n\n Err(LoxError::Return(value)) => {\n\n if self.is_initializer {\n\n self.closure.borrow().get(0, \"this\")?\n\n } else {\n\n value\n\n }\n\n }\n\n Err(err) => return Err(err),\n\n };\n\n\n\n Ok(return_value)\n\n }\n\n}\n", "file_path": "radogost/rlox/src/functions.rs", "rank": 77, "score": 32156.676761768198 }, { "content": " }\n\n\n\n fn call(&self, _: &mut Interpreter, _: &Vec<Rc<Object>>) -> Result<Rc<Object>> {\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n\n Ok(Rc::new(Object::Number(now.as_secs() as f64)))\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Clock {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"<native fn>\")\n\n }\n\n}\n\n\n\npub struct LoxFunction {\n\n parameters: Rc<Vec<String>>,\n\n body: Rc<Vec<Stmt>>,\n\n closure: Rc<RefCell<Environment>>,\n\n is_initializer: bool,\n\n}\n", "file_path": "radogost/rlox/src/functions.rs", "rank": 78, "score": 32154.40227832741 }, { "content": " parameters: self.parameters.clone(),\n\n body: self.body.clone(),\n\n closure: Rc::new(RefCell::new(environment)),\n\n is_initializer: self.is_initializer,\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for LoxFunction {\n\n fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"LoxFunction {{ parameters: {:?}, body: {:?} }}\",\n\n self.parameters, self.body\n\n )\n\n }\n\n}\n\n\n\nimpl Function for LoxFunction {\n\n fn arity(&self) -> usize {\n", "file_path": "radogost/rlox/src/functions.rs", "rank": 79, "score": 32153.088138602703 }, { "content": " Less,\n\n LessEqual,\n\n\n\n Identifier,\n\n String(String),\n\n Number(f64),\n\n\n\n And,\n\n Class,\n\n Else,\n\n False,\n\n Fun,\n\n For,\n\n If,\n\n Nil,\n\n Or,\n\n Print,\n\n Return,\n\n Super,\n\n This,\n", "file_path": "radogost/rlox/src/token.rs", "rank": 80, "score": 32147.661348931393 }, { "content": " self.parameters.len()\n\n }\n\n\n\n fn call(\n\n &self,\n\n interpreter: &mut Interpreter,\n\n arguments: &Vec<Rc<Object>>,\n\n ) -> Result<Rc<Object>> {\n\n if self.arity() != arguments.len() {\n\n return Err(LoxError::InterpreterError(\n\n format!(\n\n \"Expected {} arguments but got {}.\",\n\n self.arity(),\n\n arguments.len()\n\n )\n\n .into(),\n\n ));\n\n };\n\n let mut environment = Environment::with_enclosing(self.closure.clone());\n\n self.parameters\n", "file_path": "radogost/rlox/src/functions.rs", "rank": 81, "score": 32147.430011883855 }, { "content": "use crate::environment::Environment;\n\nuse crate::error::{LoxError, Result};\n\nuse crate::interpreter::Interpreter;\n\nuse crate::object::Object;\n\nuse crate::statement::Stmt;\n\n\n\nuse std::cell::RefCell;\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::rc::Rc;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\n\n", "file_path": "radogost/rlox/src/functions.rs", "rank": 82, "score": 32139.029648114712 }, { "content": " ) -> Self {\n\n Self {\n\n name,\n\n superclass,\n\n methods,\n\n }\n\n }\n\n\n\n pub fn find_method(&self, name: &str) -> Option<Rc<LoxFunction>> {\n\n if let Some(method) = self.methods.get(name) {\n\n Some(Rc::clone(method))\n\n } else if let Some(superclass) = &self.superclass {\n\n superclass.find_method(name)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl Display for LoxClass {\n", "file_path": "radogost/rlox/src/classes.rs", "rank": 83, "score": 32093.641870883683 }, { "content": " if let Object::Instance(instance) = wrapping_object.clone().as_ref() {\n\n if let Some(value) = instance.borrow().fields.get(name) {\n\n Ok(Rc::clone(value))\n\n } else if let Some(method) = instance.borrow().class.as_ref().find_method(name) {\n\n Ok(Rc::new(Object::Function(Rc::new(\n\n method.bind(wrapping_object),\n\n ))))\n\n } else {\n\n Err(LoxError::InterpreterError(\n\n format!(\"Undefined property {}.\", name).into(),\n\n ))\n\n }\n\n } else {\n\n Err(LoxError::InterpreterError(\n\n \"Only instances have fields.\".into(),\n\n ))\n\n }\n\n }\n\n\n\n pub fn set(&mut self, name: &str, value: Rc<Object>) {\n", "file_path": "radogost/rlox/src/classes.rs", "rank": 84, "score": 32091.733993664035 }, { "content": "use crate::error::{LoxError, Result};\n\nuse crate::functions::LoxFunction;\n\nuse crate::object::Object;\n\n\n\nuse std::collections::HashMap;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::rc::Rc;\n\n\n\n#[derive(Debug)]\n\npub struct LoxClass {\n\n name: String,\n\n superclass: Option<Rc<LoxClass>>,\n\n methods: HashMap<String, Rc<LoxFunction>>,\n\n}\n\n\n\nimpl LoxClass {\n\n pub fn new(\n\n name: String,\n\n superclass: Option<Rc<LoxClass>>,\n\n methods: HashMap<String, Rc<LoxFunction>>,\n", "file_path": "radogost/rlox/src/classes.rs", "rank": 85, "score": 32088.496848214756 }, { "content": " fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", self.name)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct LoxInstance {\n\n class: Rc<LoxClass>,\n\n fields: HashMap<String, Rc<Object>>,\n\n}\n\n\n\nimpl LoxInstance {\n\n pub fn new(class: Rc<LoxClass>) -> Self {\n\n Self {\n\n class,\n\n fields: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn get(wrapping_object: Rc<Object>, name: &str) -> Result<Rc<Object>> {\n", "file_path": "radogost/rlox/src/classes.rs", "rank": 86, "score": 32088.288341042982 }, { "content": " self.fields.insert(name.to_string(), value);\n\n }\n\n}\n\n\n\nimpl Display for LoxInstance {\n\n fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {\n\n write!(f, \"{} instance\", self.class.name)\n\n }\n\n}\n", "file_path": "radogost/rlox/src/classes.rs", "rank": 87, "score": 32072.8852843887 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl TreeWalkRuloxInterpreter {\n\n fn scan_and_parse(&mut self, source: &str) -> Result<Vec<ast::Statement>, Vec<InputError>> {\n\n let (tokens, scanner_errors) = scanner::scan(source);\n\n let mut errors: Vec<InputError> = scanner_errors\n\n .iter()\n\n .map(|e| InputError::ScannerError(e.clone()))\n\n .collect();\n\n match self.parser.parse(&tokens) {\n\n Ok(expr) => {\n\n if errors.is_empty() {\n\n Ok(expr)\n\n } else {\n\n Err(errors)\n\n }\n\n }\n\n Err(parse_errors) => {\n", "file_path": "rulox/src/treewalk/mod.rs", "rank": 88, "score": 32072.068640596382 }, { "content": " }\n\n}\n\n\n\nimpl LoxImplementation for TreeWalkRuloxInterpreter {\n\n fn run(&mut self, source: &str) -> Result<(), RunError> {\n\n match self.run(source) {\n\n Ok(_) => Ok(()),\n\n //TODO: improve\n\n _ => Err(RunError::Error),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use treewalk::*;\n\n\n\n proptest! {\n\n #[test]\n\n fn doesnt_crash(ref input in \"\\\\PC*\") {\n\n let mut lox_vm = TreeWalkRuloxInterpreter::default();\n\n lox_vm.run(input)\n\n }\n\n }\n\n}\n", "file_path": "rulox/src/treewalk/mod.rs", "rank": 89, "score": 32070.455548565227 }, { "content": "pub mod bytecode;\n\npub mod compiler;\n\npub mod interpreter;\n\n\n\nuse std::io::{stdout, LineWriter};\n\nuse user_interface::{LoxImplementation, RunError};\n\n\n\n#[derive(Default)]\n\npub struct LoxVm {}\n\n\n\nimpl LoxImplementation for LoxVm {\n\n fn run(&mut self, source: &str) -> Result<(), RunError> {\n\n let chunk = compiler::compile(source).map_err(|_| RunError::Error)?;\n\n let stdout = stdout();\n\n let handle = stdout.lock();\n\n let mut writer = LineWriter::new(handle);\n\n bytecode::disassemble(&chunk, \"Test\", &mut writer).map_err(|_| RunError::Error)?;\n\n interpreter::trace(&chunk, &mut writer).map_err(|_| RunError::Error)?;\n\n Ok(())\n\n }\n", "file_path": "rulox/src/vm/mod.rs", "rank": 90, "score": 32070.249106847543 }, { "content": " for error in parse_errors {\n\n errors.push(InputError::ParserError(error))\n\n }\n\n Err(errors)\n\n }\n\n }\n\n }\n\n\n\n fn run(&mut self, source: &str) -> Result<(), LoxError> {\n\n let statements = self.scan_and_parse(source).map_err(LoxError::InputError)?;\n\n let lexical_scope = self\n\n .lexical_scope_resolver\n\n .resolve_all(&statements)\n\n .map_err(LoxError::LexicalScopesResolutionError)?;\n\n for statement in &statements {\n\n self.interpreter\n\n .execute(&lexical_scope, &statement)\n\n .map_err(LoxError::RuntimeError)?;\n\n }\n\n Ok(())\n", "file_path": "rulox/src/treewalk/mod.rs", "rank": 91, "score": 32068.513694787 }, { "content": "mod ast;\n\nmod interpreter;\n\nmod lexical_scope_resolver;\n\nmod parser;\n\nmod pretty_printer;\n\n\n\nuse self::ast::IdentifierMap;\n\nuse self::interpreter::{Environment, RuntimeError, StatementInterpreter};\n\nuse self::lexical_scope_resolver::{LexicalScopesResolutionError, LexicalScopesResolver};\n\nuse self::parser::{ParseError, Parser};\n\nuse frontend::scanner;\n\nuse user_interface::{LoxImplementation, RunError};\n\n\n\n#[derive(Debug)]\n", "file_path": "rulox/src/treewalk/mod.rs", "rank": 92, "score": 32065.24646256184 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use vm::*;\n\n\n\n proptest! {\n\n #[test]\n\n #[ignore]\n\n fn doesnt_crash(ref input in \"\\\\PC*\") {\n\n let mut lox_vm = LoxVm::default();\n\n lox_vm.run(input)\n\n }\n\n }\n\n}\n", "file_path": "rulox/src/vm/mod.rs", "rank": 93, "score": 32059.529060716945 }, { "content": "pub mod scanner;\n", "file_path": "rulox/src/frontend/mod.rs", "rank": 94, "score": 32054.76052737137 }, { "content": " Some(Box::new(Expr::Variable {\n\n id: next_id(),\n\n name: superclass_identifier.to_string(),\n\n }))\n\n } else {\n\n None\n\n };\n\n\n\n self.consume(TokenType::LeftBrace, \"Expect '{' before class body\".into())?;\n\n\n\n let mut methods = vec![];\n\n while !self.matches(&[TokenType::RightBrace]) {\n\n methods.push(self.function()?);\n\n }\n\n self.consume(TokenType::RightBrace, \"Expect '}' after class body\".into())?;\n\n\n\n Ok(Stmt::Class {\n\n name: name.to_string(),\n\n superclass,\n\n methods: Box::new(methods),\n", "file_path": "radogost/rlox/src/parser.rs", "rank": 99, "score": 52.16095332030784 } ]
Rust
nakadion/src/handler/mod.rs
chridou/nakadion
86975ec81cd4b1d14a8ce26e8bc02f0772cb71c9
use std::fmt; use std::time::{Duration, Instant}; pub use bytes::Bytes; use futures::future::BoxFuture; pub type BatchHandlerFuture<'a> = BoxFuture<'a, BatchPostAction>; use crate::nakadi_types::{ event_type::EventTypeName, partition::PartitionId, subscription::{EventTypePartition, StreamId, SubscriptionCursor}, }; pub use crate::nakadi_types::Error; mod typed; pub use typed::*; #[derive(Debug)] #[non_exhaustive] pub struct BatchMeta<'a> { pub stream_id: StreamId, pub cursor: &'a SubscriptionCursor, pub frame_started_at: Instant, pub frame_completed_at: Instant, pub frame_id: usize, pub n_events: usize, } #[derive(Debug, Clone)] pub enum BatchPostAction { Commit(BatchStats), DoNotCommit(BatchStats), AbortStream(String), ShutDown(String), } impl BatchPostAction { pub fn commit_no_stats() -> Self { BatchPostAction::Commit(BatchStats::default()) } pub fn commit(t_deserialize: Duration) -> Self { BatchPostAction::Commit(BatchStats { t_deserialize: Some(t_deserialize), }) } pub fn do_not_commit_no_stats() -> Self { BatchPostAction::DoNotCommit(BatchStats::default()) } pub fn do_not_commit(t_deserialize: Duration) -> Self { BatchPostAction::DoNotCommit(BatchStats { t_deserialize: Some(t_deserialize), }) } } #[derive(Default, Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub struct BatchStats { pub t_deserialize: Option<Duration>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum InactivityAnswer { KeepMeAlive, KillMe, } impl InactivityAnswer { pub fn should_kill(self) -> bool { self == InactivityAnswer::KillMe } pub fn should_stay_alive(self) -> bool { self == InactivityAnswer::KeepMeAlive } } pub trait BatchHandler: Send { fn handle<'a>(&'a mut self, events: Bytes, meta: BatchMeta<'a>) -> BatchHandlerFuture<'a>; fn on_inactive( &mut self, _inactive_for: Duration, _last_activity: Instant, ) -> InactivityAnswer { InactivityAnswer::KeepMeAlive } } pub struct HandlerFn<F>(pub F); impl<F> BatchHandler for HandlerFn<F> where F: for<'a> FnMut(Bytes, BatchMeta<'a>) -> BatchHandlerFuture<'a> + Send, { fn handle<'a>(&'a mut self, events: Bytes, meta: BatchMeta<'a>) -> BatchHandlerFuture<'a> { (self.0)(events, meta) } } #[derive(Debug, Clone, Eq, PartialEq)] pub enum HandlerAssignment { Unspecified, EventType(EventTypeName), EventTypePartition(EventTypePartition), } impl HandlerAssignment { pub fn event_type(&self) -> Option<&EventTypeName> { self.event_type_and_partition().0 } pub fn partition(&self) -> Option<&PartitionId> { self.event_type_and_partition().1 } pub fn event_type_and_partition(&self) -> (Option<&EventTypeName>, Option<&PartitionId>) { match self { HandlerAssignment::Unspecified => (None, None), HandlerAssignment::EventType(event_type) => (Some(&event_type), None), HandlerAssignment::EventTypePartition(ref etp) => { (Some(etp.event_type()), Some(etp.partition())) } } } pub fn into_event_type_and_partition(self) -> (Option<EventTypeName>, Option<PartitionId>) { match self { HandlerAssignment::Unspecified => (None, None), HandlerAssignment::EventType(event_type) => (Some(event_type), None), HandlerAssignment::EventTypePartition(etp) => { let (a, b) = etp.split(); (Some(a), Some(b)) } } } } impl fmt::Display for HandlerAssignment { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { HandlerAssignment::Unspecified => write!(f, "[unspecified]")?, HandlerAssignment::EventType(ref event_type) => { write!(f, "[event_type={}]", event_type)? } HandlerAssignment::EventTypePartition(ref event_type_partition) => write!( f, "[event_type={}, partition={}]", event_type_partition.event_type(), event_type_partition.partition() )?, } Ok(()) } } pub trait BatchHandlerFactory: Send + Sync + 'static { fn handler<'a>( &'a self, assignment: &'a HandlerAssignment, ) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>>; } impl<T> BatchHandlerFactory for T where T: for<'a> Fn(&'a HandlerAssignment) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>> + Send + Sync + 'static, { fn handler<'a>( &'a self, assignment: &'a HandlerAssignment, ) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>> { self(assignment) } }
use std::fmt; use std::time::{Duration, Instant}; pub use bytes::Bytes; use futures::future::BoxFuture; pub type BatchHandlerFuture<'a> = BoxFuture<'a, BatchPostAction>; use crate::nakadi_types::{ event_type::EventTypeName, partition::PartitionId, subscription::{EventTypePartition, StreamId, SubscriptionCursor}, }; pub use crate::nakadi_types::Error; mod typed; pub use typed::*; #[derive(Debug)] #[non_exhaustive] pub struct BatchMeta<'a> { pub stream_id: StreamId, pub cursor: &'a SubscriptionCursor, pub frame_started_at: Instant, pub frame_completed_at: Instant, pub frame_id: usize, pub n_events: usize, } #[derive(Debug, Clone)] pub enum BatchPostAction { Commit(BatchStats), DoNotCommit(BatchStats), AbortStream(String), ShutDown(String), } impl BatchPostAction { pub fn commit_no_stats() -> Self { BatchPostAction::Commit(BatchStats::default()) } pub fn commit(t_deserialize: Duration) -> Self { BatchPostAction::Commit(BatchStats { t_deserialize: Some(t_deserialize), }) } pub fn do_not_commit_no_stats() -> Self { BatchPostAction::DoNotCommit(BatchStats::default()) } pub fn do_not_commit(t_deserialize: Duration) -> Self { BatchPostAction::DoNotCommit(BatchStats { t_deserialize: Some(t_deserialize), }) } } #[derive(Default, Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub struct BatchStats { pub t_deserialize: Option<Duration>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum InactivityAnswer { KeepMeAlive, KillMe, } impl InactivityAnswer { pub fn should_kill(self) -> bool { self == InactivityAnswer::KillMe } pub fn should_stay_alive(self) -> bool { self == InactivityAnswer::KeepMeAlive } } pub trait BatchHandler: Send { fn handle<'a>(&'a mut self, events: Bytes, meta: BatchMeta<'a>) -> BatchHandlerFuture<'a>; fn on_inactive( &mut self, _inactive_for: Duration, _last_activity: Instant, ) -> InactivityAnswer { InactivityAnswer::KeepMeAlive } } pub struct HandlerFn<F>(pub F); impl<F> BatchHandler for HandlerFn<F> where F: for<'a> FnMut(Bytes, BatchMeta<'a>) -> BatchHandlerFuture<'a> + Send, { fn handle<'a>(&'a mut self, events: Bytes, meta: BatchMeta<'a>) -> BatchHandlerFuture<'a> { (self.0)(events, meta) } } #[derive(Debug, Clone, Eq, PartialEq)] pub enum HandlerAssignment { Unspecified, EventType(EventTypeName), EventTypePartition(EventTypePartition), } impl HandlerAssignment { pub fn event_type(&self) -> Option<&EventTypeName> { self.event_type_and_partition().0 } pub fn partition(&self) -> Option<&PartitionId> { self.event_type_and_partition().1 } pub fn event_type_and_partition(&self) -> (Option<&EventTypeName>, Option<&PartitionId>) { match self { HandlerAssignment::Unspecified => (None, None), HandlerAssignment::EventType(event_type) => (Some(&event_type), None), HandlerAssignment::EventTypePartition(ref etp) => { (Some(etp.event_type()), Some(etp.partition())) } } } pub fn into_event_type_and_partition(self) -> (Option<EventTypeName>, Option<PartitionId>) { match self { HandlerAssignment::Unspecified => (None, None), HandlerAssignment::EventType(event_type) => (Some(event_type), None), HandlerAssignment::EventTypePartition(etp) => { let (a, b) = etp.split(); (Some(a), Some(b))
:Unspecified => write!(f, "[unspecified]")?, HandlerAssignment::EventType(ref event_type) => { write!(f, "[event_type={}]", event_type)? } HandlerAssignment::EventTypePartition(ref event_type_partition) => write!( f, "[event_type={}, partition={}]", event_type_partition.event_type(), event_type_partition.partition() )?, } Ok(()) } } pub trait BatchHandlerFactory: Send + Sync + 'static { fn handler<'a>( &'a self, assignment: &'a HandlerAssignment, ) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>>; } impl<T> BatchHandlerFactory for T where T: for<'a> Fn(&'a HandlerAssignment) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>> + Send + Sync + 'static, { fn handler<'a>( &'a self, assignment: &'a HandlerAssignment, ) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>> { self(assignment) } }
} } } } impl fmt::Display for HandlerAssignment { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { HandlerAssignment:
random
[ { "content": "/// Something that has an event type and a partition\n\n///\n\n/// Must only return event types and partitions that belong together.\n\npub trait EventTypePartitionLike {\n\n fn event_type(&self) -> &EventTypeName;\n\n fn partition(&self) -> &PartitionId;\n\n}\n\n\n\n/// Represents event-type/partition pair.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventTypePartition)\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]\n\npub struct EventTypePartition {\n\n pub event_type: EventTypeName,\n\n pub partition: PartitionId,\n\n}\n\n\n\nimpl EventTypePartition {\n\n pub fn new<E: Into<EventTypeName>, P: Into<PartitionId>>(event_type: E, partition: P) -> Self {\n\n Self {\n\n event_type: event_type.into(),\n\n partition: partition.into(),\n\n }\n", "file_path": "nakadi-types/src/subscription/mod.rs", "rank": 0, "score": 230738.7801553685 }, { "content": "pub trait LifecycleListener: Send + 'static {\n\n fn on_consumer_started(&self, _subscription_id: SubscriptionId) {}\n\n fn on_consumer_stopped(&self, _subscription_id: SubscriptionId) {}\n\n fn on_stream_connected(&self, _subscription_id: SubscriptionId, _stream_id: StreamId) {}\n\n fn on_stream_ended(&self, _subscription_id: SubscriptionId, _stream_id: StreamId) {}\n\n}\n\n\n", "file_path": "nakadion/src/consumer/mod.rs", "rank": 2, "score": 203355.77064990558 }, { "content": "/// Basically the same a `BatchHandler` with the difference that\n\n/// deserialized events are passed to the processing logic.\n\n///\n\n/// This is basically a convenience handler.\n\n///\n\n/// The events must implement `serde`s `DeserializeOwned`.\n\n///\n\n/// # Hint\n\n///\n\n/// The `handle` method gets called on `&mut self`.\n\n/// # Example\n\n///\n\n/// ```\n\n/// use serde::Deserialize;\n\n/// use futures::FutureExt;\n\n///\n\n/// use nakadion::handler::*;\n\n///\n\n///\n\n/// // Use a struct to maintain state\n\n/// struct MyHandler {\n\n/// pub count: i32,\n\n/// }\n\n///\n\n/// #[derive(Deserialize)]\n\n/// struct MyEvent(i32);\n\n///\n\n/// // Implement the processing logic by implementing `BatchHandler`\n\n/// impl EventsHandler for MyHandler {\n\n/// type Event = MyEvent;\n\n///\n\n/// fn handle(&mut self, events: Vec<MyEvent>, _meta: BatchMeta) -> EventsHandlerFuture {\n\n/// async move {\n\n/// for MyEvent(amount) in events {\n\n/// self.count += amount;\n\n/// }\n\n/// EventsPostAction::Commit\n\n/// }.boxed()\n\n/// }\n\n///\n\n/// fn deserialize_on(&mut self, n_bytes: usize) -> SpawnTarget {\n\n/// // We expect costly deserialization...\n\n/// if n_bytes > 10_000 {\n\n/// SpawnTarget::Dedicated\n\n/// } else {\n\n/// SpawnTarget::Executor\n\n/// }\n\n/// }\n\n/// }\n\n///\n\n/// ```\n\npub trait EventsHandler {\n\n type Event: DeserializeOwned + Send + 'static;\n\n /// Execute the processing logic with a deserialized batch of events.\n\n fn handle<'a>(\n\n &'a mut self,\n\n events: Vec<Self::Event>,\n\n meta: BatchMeta<'a>,\n\n ) -> BoxFuture<'a, EventsPostAction>;\n\n\n\n /// A handler which is invoked if deserialization of the\n\n /// whole events batch at once failed.\n\n ///\n\n /// The default implementation will shut down the consumer.\n\n fn handle_deserialization_errors<'a>(\n\n &'a mut self,\n\n results: Vec<EventDeserializationResult<Self::Event>>,\n\n _meta: BatchMeta<'a>,\n\n ) -> EventsHandlerFuture<'a> {\n\n let num_events = results.len();\n\n let num_failed = results.iter().filter(|r| r.is_err()).count();\n", "file_path": "nakadion/src/handler/typed.rs", "rank": 3, "score": 200493.7496385713 }, { "content": "/// Logs the given `Arguments` at different log levels\n\npub trait Logger: Send + Sync + 'static {\n\n fn debug(&self, args: Arguments);\n\n fn info(&self, args: Arguments);\n\n fn warn(&self, args: Arguments);\n\n fn error(&self, args: Arguments);\n\n}\n\n\n\nimpl<T> Logger for T\n\nwhere\n\n T: LoggingAdapter,\n\n{\n\n fn debug(&self, args: Arguments) {\n\n LoggingAdapter::debug(self, &LoggingContext::default(), args)\n\n }\n\n\n\n fn info(&self, args: Arguments) {\n\n LoggingAdapter::info(self, &LoggingContext::default(), args)\n\n }\n\n fn warn(&self, args: Arguments) {\n\n LoggingAdapter::warn(self, &LoggingContext::default(), args)\n", "file_path": "nakadion/src/logging/mod.rs", "rank": 4, "score": 198629.28167119098 }, { "content": "/// Publish non serialized events.\n\n///\n\n/// This trait is implemented for all types which implement `PublishesSerializedEvents`.\n\npub trait PublishesEvents {\n\n fn publish_events<'a, E: Serialize + Sync, T: Into<FlowId>>(\n\n &'a self,\n\n event_type: &'a EventTypeName,\n\n events: &'a [E],\n\n flow_id: T,\n\n ) -> PublishFuture<'a>;\n\n}\n\n\n\n/// Publishes events with retries\n\n///\n\n/// ## `PublishApi`\n\n///\n\n/// The publisher implements `PublishApi`. If the trait method is used\n\n/// for publishing no retries are done on partial successes. Retries are\n\n/// only done on io errors and server errors or on auth errors if\n\n/// `retry_on_auth_errors` is set to `true`.\n\n///\n\n#[derive(Clone)]\n\npub struct Publisher<C> {\n", "file_path": "nakadion/src/publisher/mod.rs", "rank": 5, "score": 198557.00927182083 }, { "content": "/// An adapter for pluggable logging.\n\n///\n\n/// Implementors can be used by the `Consumer`\n\npub trait LoggingAdapter: Send + Sync + 'static {\n\n fn debug(&self, context: &LoggingContext, args: Arguments);\n\n fn info(&self, context: &LoggingContext, args: Arguments);\n\n fn warn(&self, context: &LoggingContext, args: Arguments);\n\n fn error(&self, context: &LoggingContext, args: Arguments);\n\n}\n\n\n\n/// Logs to stdout\n\n///\n\n/// This does not use the tokio version. It blocks the current thread.\n\n#[derive(Clone)]\n\npub struct StdOutLoggingAdapter(LogConfig);\n\n\n\nimpl StdOutLoggingAdapter {\n\n pub fn new(config: LogConfig) -> Self {\n\n Self(config)\n\n }\n\n}\n\n\n\nimpl Default for StdOutLoggingAdapter {\n", "file_path": "nakadion/src/logging/mod.rs", "rank": 6, "score": 194523.6136106057 }, { "content": "/// Publishes events that have been serialized before\n\n///\n\n/// This trait can be made a trait object\n\npub trait PublishesSerializedEvents {\n\n /// Publishes the serialized events.\n\n fn publish_serialized_events<'a>(\n\n &'a self,\n\n event_type: &'a EventTypeName,\n\n events: &[Bytes],\n\n flow_id: FlowId,\n\n ) -> PublishFuture<'a>;\n\n}\n\n\n", "file_path": "nakadion/src/publisher/mod.rs", "rank": 7, "score": 193934.2435003372 }, { "content": "fn add_delimiter(n: &mut usize, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n (*n) -= 1;\n\n if *n == 0 {\n\n write!(f, \"]\")?;\n\n } else {\n\n write!(f, \";\")?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl<'a> fmt::Display for ContextDisplay<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.format(f)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"slog\")]\n\npub mod slog_adapter {\n\n use std::fmt;\n\n\n", "file_path": "nakadion/src/logging/mod.rs", "rank": 9, "score": 180972.89292612794 }, { "content": "fn assemble_bytes_to_publish(events: &[Bytes]) -> Bytes {\n\n let mut size = events.iter().map(|b| b.len()).sum();\n\n if events.is_empty() || size == 0 {\n\n return Bytes::default();\n\n }\n\n size += (events.len() - 1) + 2; // commas plus outer braces\n\n let mut buffer = Vec::with_capacity(size);\n\n\n\n buffer.push(b'[');\n\n\n\n let last_idx = events.len() - 1;\n\n for (i, event) in events.iter().enumerate() {\n\n buffer.extend_from_slice(event);\n\n if i != last_idx {\n\n buffer.push(b',');\n\n }\n\n }\n\n\n\n buffer.push(b']');\n\n\n\n buffer.into()\n\n}\n", "file_path": "nakadion/src/publisher/mod.rs", "rank": 10, "score": 172186.70672748607 }, { "content": "fn pos_to_bytes(pos: (usize, usize), bytes: &Bytes) -> Bytes {\n\n let (a, b) = pos;\n\n assert!(a < b, \"invalid line parse indexes\");\n\n bytes.slice(a..b)\n\n}\n\n\n\nimpl Default for Cursor {\n\n fn default() -> Cursor {\n\n Cursor {\n\n line_position: (0, 0),\n\n partition: (0, 0),\n\n event_type: (0, 0),\n\n offset: (0, 0),\n\n cursor_token: (0, 0),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ParseBatchError(String);\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 11, "score": 168079.8276745538 }, { "content": "/// An interface on which `Nakadion` exposes metrics\n\n///\n\n/// An implementor of this interface can be used with\n\n/// `Instrumentation::new`\n\n///\n\n/// Implementations of this trait should not be shared with multiple consumers\n\n/// since they are stateful e.g. in flight batches.\n\npub trait Instruments {\n\n fn consumer_started(&self) {}\n\n fn consumer_stopped(&self, _ran_for: Duration) {}\n\n fn streaming_ended(&self, _streamed_for: Duration) {}\n\n\n\n /// Triggered when a single connect attempt for a stream was successful\n\n ///\n\n /// `time` is the time for the request\n\n fn stream_connect_attempt_success(&self, _time: Duration) {}\n\n /// Triggered when a single connect attempt for a stream failed\n\n ///\n\n /// `time` is the time for the request\n\n fn stream_connect_attempt_failed(&self, _time: Duration) {}\n\n /// Triggered when a stream was finally connect after maybe multiple attempts\n\n ///\n\n /// `time` is the time for the whole cycle until a connection was made\n\n fn stream_connected(&self, _time: Duration) {}\n\n /// Triggered when connecting to a stream finally failed after maybe multiple attempts\n\n ///\n\n /// `time` is the time for the whole cycle until a connection attempts finally failed\n", "file_path": "nakadion/src/instrumentation/mod.rs", "rank": 12, "score": 164325.80889942625 }, { "content": " pub trait Instruments {\n\n /// Tracks the number of unconsumed events.\n\n fn unconsumed_events(&self, n_unconsumed: usize);\n\n }\n\n\n\n pub struct SubscriptionStatsReporter {\n\n sender: Sender<Message>,\n\n }\n\n\n\n impl SubscriptionStatsReporter {\n\n pub fn new<C, I, L>(client: C, instrumentation: I, logger: L) -> Self\n\n where\n\n I: Instruments + Clone + Send + 'static,\n\n C: SubscriptionApi + Clone + Send + Sync + 'static,\n\n L: LoggingAdapter + Send + 'static,\n\n {\n\n let (sender, receiver) = channel::unbounded();\n\n\n\n let looper = Looper::new(client, instrumentation, receiver, logger);\n\n\n", "file_path": "nakadion/src/tools/mod.rs", "rank": 13, "score": 164317.2322081835 }, { "content": "fn create_sorted_partitioner<B: BuildHasher + Clone>(\n\n mut partitions: Vec<PartitionId>,\n\n build_hasher: B,\n\n) -> Partitioner<B> {\n\n let ids_and_ints: Result<Vec<_>, _> = partitions\n\n .iter()\n\n .map(|id| id.as_str().parse::<u64>().map(|n| (id, n)))\n\n .collect();\n\n\n\n if let Ok(mut ids_and_ints) = ids_and_ints {\n\n ids_and_ints.sort_by_key(|x| x.1);\n\n Partitioner::new_with_hasher(\n\n ids_and_ints.into_iter().map(|(p, _)| p.clone()).collect(),\n\n build_hasher,\n\n )\n\n } else {\n\n partitions.sort();\n\n Partitioner::new_sorted_with_hasher(partitions, build_hasher)\n\n }\n\n}\n\n\n", "file_path": "nakadion/src/publisher/partitioner.rs", "rank": 14, "score": 162846.54647879695 }, { "content": "#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\nstruct EventTypeSchemaInputSer {\n\n #[serde(rename = \"type\")]\n\n pub schema_type: SchemaType,\n\n pub schema: SchemaSyntax,\n\n}\n\n\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 15, "score": 162503.66343914572 }, { "content": "#[inline]\n\nfn is_whitespace(b: u8) -> bool {\n\n b == b' ' || b == b'\\t' || b == b'\\n' || b == b'\\r'\n\n}\n\n\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 16, "score": 162143.42095085979 }, { "content": "pub fn safe_commit_timeout(secs: u32) -> Duration {\n\n if secs > 1 {\n\n Duration::from_secs(u64::from(secs - 1))\n\n } else {\n\n Duration::from_millis(500)\n\n }\n\n}\n", "file_path": "nakadion/src/internals/background_committer/pending_cursors.rs", "rank": 17, "score": 160830.08375303764 }, { "content": "/// Publishes a batch of Events.\n\n///\n\n/// All items must be of the EventType identified by name.\n\n///\n\n/// Reception of Events will always respect the configuration of its EventType with respect to\n\n/// validation, enrichment and partition. The steps performed on reception of incoming message\n\n/// are:\n\n///\n\n/// 1. Every validation rule specified for the EventType will be checked in order against the\n\n/// incoming Events. Validation rules are evaluated in the order they are defined and the Event\n\n/// is rejected in the first case of failure. If the offending validation rule provides\n\n/// information about the violation it will be included in the BatchItemResponse. If the\n\n/// EventType defines schema validation it will be performed at this moment. The size of each\n\n/// Event will also be validated. The maximum size per Event is configured by the administrator.\n\n/// We use the batch input to measure the size of events, so unnecessary spaces, tabs, and\n\n/// carriage returns will count towards the event size.\n\n///\n\n/// 2. Once the validation succeeded, the content of the Event is updated according to the\n\n/// enrichment rules in the order the rules are defined in the EventType. No preexisting\n\n/// value might be changed (even if added by an enrichment rule). Violations on this will force\n\n/// the immediate rejection of the Event. The invalid overwrite attempt will be included in\n\n/// the item’s BatchItemResponse object.\n\n///\n\n/// 3. The incoming Event’s relative ordering is evaluated according to the rule on the\n\n/// EventType. Failure to evaluate the rule will reject the Event.\n\n///\n\n/// Given the batched nature of this operation, any violation on validation or failures on\n\n/// enrichment or partitioning will cause the whole batch to be rejected, i.e. none of its\n\n/// elements are pushed to the underlying broker.\n\n///\n\n/// Failures on writing of specific partitions to the broker might influence other\n\n/// partitions. Failures at this stage will fail only the affected partitions.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#/event-types/name/events_post)\n\npub trait PublishApi {\n\n /// Publishes a batch of Events of this EventType. All items must be of the EventType\n\n /// identified by name.\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/event-types/name/events_post)\n\n fn publish_events_batch<'a, B: Into<Bytes>, T: Into<FlowId>>(\n\n &'a self,\n\n event_type: &'a EventTypeName,\n\n events: B,\n\n flow_id: T,\n\n ) -> PublishFuture<'a>;\n\n}\n\n\n", "file_path": "nakadion/src/api/mod.rs", "rank": 18, "score": 160651.6400441067 }, { "content": "/// Instruments a `Publisher`\n\npub trait Instruments {\n\n /// All batch items have been successfully submitted\n\n fn published(&self, elapsed: Duration);\n\n /// Not all batch items have been successfully submitted\n\n fn publish_failed(&self, elapsed: Duration);\n\n /// Stats after each attempt to submit a batch\n\n fn batch_stats(&self, stats: BatchStats);\n\n}\n\n\n", "file_path": "nakadion/src/publisher/instrumentation/mod.rs", "rank": 19, "score": 160638.588415049 }, { "content": "pub trait SubscriptionApi {\n\n /// This endpoint creates a subscription for EventTypes.\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/subscriptions_post)\n\n fn create_subscription<T: Into<FlowId>>(\n\n &self,\n\n input: &SubscriptionInput,\n\n flow_id: T,\n\n ) -> ApiFuture<Subscription>;\n\n\n\n /// Returns a subscription identified by id.\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/subscriptions/subscription_id_get)\n\n fn get_subscription<T: Into<FlowId>>(\n\n &self,\n\n id: SubscriptionId,\n\n flow_id: T,\n\n ) -> ApiFuture<Subscription>;\n\n\n\n /// Lists all subscriptions that exist in a system.\n", "file_path": "nakadion/src/api/mod.rs", "rank": 20, "score": 160638.588415049 }, { "content": "pub trait StreamingEssentials:\n\n SubscriptionStreamApi + SubscriptionCommitApi + Send + Sync + 'static\n\n{\n\n}\n\n\n\nimpl<T> StreamingEssentials for T where\n\n T: SubscriptionStreamApi + SubscriptionCommitApi + Send + Sync + 'static\n\n{\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct IoError(String);\n\n\n\nimpl IoError {\n\n pub fn new<T: Into<String>>(s: T) -> Self {\n\n Self(s.into())\n\n }\n\n\n\n /// Keep private, needs improvement of IoError\n\n pub(crate) fn into_string(self) -> String {\n", "file_path": "nakadion/src/components/mod.rs", "rank": 21, "score": 160638.588415049 }, { "content": "pub trait MonitoringApi {\n\n /// Deletes an EventType identified by its name.\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/event-types/name/cursor-distances_post)\n\n fn get_cursor_distances<T: Into<FlowId>>(\n\n &self,\n\n name: &EventTypeName,\n\n query: &[CursorDistanceQuery],\n\n flow_id: T,\n\n ) -> ApiFuture<Vec<CursorDistanceResult>>;\n\n\n\n /// Used when a consumer wants to know how far behind\n\n /// in the stream its application is lagging.\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/event-types/name/cursors-lag_post)\n\n fn get_cursor_lag<T: Into<FlowId>>(\n\n &self,\n\n name: &EventTypeName,\n\n cursors: &[Cursor],\n\n flow_id: T,\n", "file_path": "nakadion/src/api/mod.rs", "rank": 22, "score": 160638.588415049 }, { "content": "#[test]\n\nfn deserialize_subscription_cursor() {\n\n use crate::nakadi_types::subscription::SubscriptionCursor;\n\n let line_sample = r#\"{\"cursor\":{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"}}\"#;\n\n\n\n let line = EventStreamBatch::try_from_slice(line_sample.as_bytes()).unwrap();\n\n\n\n let _ = line.cursor_deserialized::<SubscriptionCursor>().unwrap();\n\n}\n", "file_path": "nakadion/src/components/streams/event_stream/mod.rs", "rank": 23, "score": 160458.46857595874 }, { "content": "/// Provides an `AccessToken`.\n\n///\n\n/// Authentication can be disabled by returning `None` on `get_token`.\n\npub trait ProvidesAccessToken {\n\n /// Get a new `Token`. Return `None` to disable authentication.\n\n fn get_token(&self) -> TokenFuture;\n\n}\n\n\n\npub struct AccessTokenProvider {\n\n inner: Arc<dyn ProvidesAccessToken + Send + Sync + 'static>,\n\n}\n\n\n\nimpl AccessTokenProvider {\n\n pub fn new<P>(provider: P) -> Self\n\n where\n\n P: ProvidesAccessToken + Send + Sync + 'static,\n\n {\n\n Self {\n\n inner: Arc::new(provider),\n\n }\n\n }\n\n\n\n /// Creates a new `AccessTokenProvider` from the environment\n", "file_path": "nakadion/src/auth/mod.rs", "rank": 24, "score": 157191.01654062077 }, { "content": "pub trait SubscriptionStreamApi {\n\n /// Starts a new stream for reading events from this subscription.\n\n ///\n\n /// Starts a new stream for reading events from this subscription. The minimal consumption unit is a partition, so\n\n /// it is possible to start as many streams as the total number of partitions in event-types of this subscription.\n\n /// The position of the consumption is managed by Nakadi. The client is required to commit the cursors he gets in\n\n /// a stream.\n\n ///\n\n /// If you create a stream without specifying the partitions to read from - Nakadi will automatically assign\n\n /// partitions to this new stream. By default Nakadi distributes partitions among clients trying to give an equal\n\n /// number of partitions to each client (the amount of data is not considered). This is default and the most common\n\n /// way to use streaming endpoint.\n\n ///\n\n /// It is also possible to directly request specific partitions to be delivered within the stream. If these\n\n /// partitions are already consumed by another stream of this subscription - Nakadi will trigger a rebalance that\n\n /// will assign these partitions to the new stream. The request will fail if user directly requests partitions that\n\n /// are already requested directly by another active stream of this subscription. The overall picture will be the\n\n /// following: streams which directly requested specific partitions will consume from them; streams that didn’t\n\n /// specify which partitions to consume will consume partitions that left - Nakadi will autobalance free partitions\n\n /// among these streams (balancing happens by number of partitions).\n", "file_path": "nakadion/src/api/mod.rs", "rank": 25, "score": 157186.33738890232 }, { "content": "pub trait SubscriptionCommitApi {\n\n /// Endpoint for committing offsets of the subscription.\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/subscriptions/subscription_id/cursors_post)\n\n fn commit_cursors<T: Into<FlowId>>(\n\n &self,\n\n id: SubscriptionId,\n\n stream: StreamId,\n\n cursors: &[SubscriptionCursor],\n\n flow_id: T,\n\n ) -> ApiFuture<CursorCommitResults>;\n\n}\n\n\n\n/// A stream of of chunks directly from Nakadi\n\npub struct SubscriptionStreamChunks {\n\n pub stream_id: StreamId,\n\n pub chunks: BytesStream,\n\n}\n\n\n\nimpl<'a> SubscriptionStreamChunks {\n\n pub fn parts(self) -> (StreamId, BytesStream) {\n\n (self.stream_id, self.chunks)\n\n }\n\n}\n", "file_path": "nakadion/src/api/mod.rs", "rank": 26, "score": 157186.33738890232 }, { "content": "pub trait SchemaRegistryApi {\n\n /// Returns a list of all registered EventTypes\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/event-types_get)\n\n fn list_event_types<T: Into<FlowId>>(&self, flow_id: T) -> ApiFuture<Vec<EventType>>;\n\n\n\n /// Creates a new EventType.\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/event-types_post)\n\n fn create_event_type<T: Into<FlowId>>(\n\n &self,\n\n event_type: &EventTypeInput,\n\n flow_id: T,\n\n ) -> ApiFuture<()>;\n\n\n\n /// Returns the EventType identified by its name.\n\n ///\n\n /// See also [Nakadi Manual](https://nakadi.io/manual.html#/event-types/name_get)\n\n fn get_event_type<T: Into<FlowId>>(\n\n &self,\n", "file_path": "nakadion/src/api/mod.rs", "rank": 27, "score": 157186.33738890232 }, { "content": "trait ConsumerInternal: fmt::Debug {\n\n fn start(&self, consumer_state: ConsumerState) -> BoxFuture<'static, ConsumerAbort>;\n\n\n\n fn config(&self) -> &Config;\n\n\n\n fn logging_adapter(&self) -> Arc<dyn LoggingAdapter>;\n\n\n\n fn add_lifecycle_listener(&self, listener: Box<dyn LifecycleListener>);\n\n}\n\n\n", "file_path": "nakadion/src/consumer/mod.rs", "rank": 28, "score": 154292.94838454408 }, { "content": "#[test]\n\nfn a_schema_input_can_be_parsed() {\n\n let input = EventTypeSchemaInput::json_schema_parsed(r#\"{\"description\":\"test event b\",\"properties\":{\"count\":{\"type\":\"integer\"}},\"required\":[\"count\"]}\"#).unwrap();\n\n assert_eq!(input.schema_type(), SchemaType::JsonSchema);\n\n}\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 29, "score": 150298.78171554237 }, { "content": "fn pos_to_str(pos: (usize, usize), bytes: &[u8]) -> &str {\n\n let (a, b) = pos;\n\n let slice = &bytes[a..b];\n\n unsafe { std::str::from_utf8_unchecked(slice) }\n\n}\n\n\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 30, "score": 145468.66343361078 }, { "content": "fn set_stream_commit_timeout(commit_config: &mut CommitConfig, connect_config: &ConnectConfig) {\n\n if commit_config.stream_commit_timeout_secs.is_none() {\n\n let timeout = connect_config\n\n .stream_parameters\n\n .commit_timeout_secs\n\n .unwrap_or_default();\n\n commit_config.stream_commit_timeout_secs = Some(timeout);\n\n }\n\n}\n", "file_path": "nakadion/src/consumer/config_types/mod.rs", "rank": 31, "score": 141530.82856805628 }, { "content": "fn is_retry_on_api_error_allowed(api_error: &NakadiApiError, retry_on_auth_errors: bool) -> bool {\n\n if api_error.is_io_error() || api_error.is_server_error() {\n\n true\n\n } else {\n\n api_error.is_auth_error() && retry_on_auth_errors\n\n }\n\n}\n\n\n", "file_path": "nakadion/src/publisher/mod.rs", "rank": 32, "score": 136721.87916994697 }, { "content": "fn find_next_obj(json_bytes: &[u8], start: usize) -> Result<(usize, usize), ParseBatchError> {\n\n if start == json_bytes.len() {\n\n return Err(\"Reached end\".into());\n\n }\n\n\n\n let mut idx_begin = start;\n\n while idx_begin < json_bytes.len() {\n\n if json_bytes[idx_begin] == OBJ_OPEN {\n\n break;\n\n }\n\n idx_begin += 1;\n\n }\n\n\n\n if idx_begin >= json_bytes.len() - 1 {\n\n return Err(\"Not an object. Missing starting `{{`.\".into());\n\n }\n\n\n\n let mut idx_end = idx_begin + 1;\n\n let mut level = 0;\n\n while idx_end < json_bytes.len() {\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 33, "score": 135195.14021165107 }, { "content": "/// Returns `None` if retries were disabled\n\nfn get_events_for_retry(\n\n failure: &SubmissionFailure,\n\n events: &[Bytes],\n\n strategy: SubmissionFailureStrategy,\n\n) -> Result<Option<Vec<Bytes>>, Error> {\n\n match strategy {\n\n SubmissionFailureStrategy::Abort => Ok(None),\n\n SubmissionFailureStrategy::RetryNotSubmitted => {\n\n if events.len() != failure.len() {\n\n return Err(Error::new(\n\n \"The number of events did not match the number of batch response items\",\n\n ));\n\n }\n\n\n\n let mut to_retry = Vec::new();\n\n for (batch_rsp, event_bytes) in failure.iter().zip(events.iter()) {\n\n if batch_rsp.publishing_status != PublishingStatus::Submitted {\n\n to_retry.push(event_bytes.clone());\n\n }\n\n }\n\n Ok(Some(to_retry))\n\n }\n\n SubmissionFailureStrategy::RetryAll => Ok(Some(events.to_vec())),\n\n }\n\n}\n\n\n", "file_path": "nakadion/src/publisher/mod.rs", "rank": 34, "score": 134933.89504904786 }, { "content": "fn next_string(json_bytes: &[u8], start: usize) -> Result<Option<(usize, usize)>, ParseBatchError> {\n\n if start == json_bytes.len() {\n\n return Ok(None);\n\n }\n\n\n\n let mut idx_begin = start;\n\n while idx_begin < json_bytes.len() {\n\n if json_bytes[idx_begin] == DOUBLE_QUOTE {\n\n break;\n\n }\n\n idx_begin += 1;\n\n }\n\n\n\n if idx_begin == json_bytes.len() {\n\n return Ok(None);\n\n }\n\n\n\n if idx_begin >= json_bytes.len() - 1 {\n\n return Err(format!(\"Not a string. Missing starting `\\\"` after pos {}\", start).into());\n\n }\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 35, "score": 134506.2756567918 }, { "content": "// This function clones the ast before deserializing... but we are in an\n\n// exceptional case anyways...\n\nfn try_deserialize_individually<T: DeserializeOwned + Send + 'static>(\n\n events: &[u8],\n\n) -> Result<Vec<EventDeserializationResult<T>>, serde_json::Error> {\n\n let deserialized_json_asts: Vec<serde_json::Value> = serde_json::from_slice(events)?;\n\n\n\n let mut results = Vec::with_capacity(deserialized_json_asts.len());\n\n\n\n for ast in deserialized_json_asts {\n\n let ast2 = ast.clone();\n\n match serde_json::from_value(ast) {\n\n Ok(event) => results.push(Ok(event)),\n\n Err(err) => results.push(Err((ast2, err))),\n\n }\n\n }\n\n\n\n Ok(results)\n\n}\n", "file_path": "nakadion/src/handler/typed.rs", "rank": 36, "score": 134265.13221846253 }, { "content": "pub fn parse_line<T: AsRef<[u8]>>(json_bytes: T) -> Result<LineItems, ParseBatchError> {\n\n let mut line_items = LineItems::default();\n\n let json_bytes = json_bytes.as_ref();\n\n\n\n let mut next_byte = 0;\n\n while next_byte < json_bytes.len() {\n\n if let Some(end) = parse_next_item(json_bytes, next_byte, &mut line_items)? {\n\n next_byte = end + 1;\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n if line_items.cursor.line_position.1 == 0 {\n\n Err(\"No Cursor\".into())\n\n } else {\n\n Ok(line_items)\n\n }\n\n}\n\n\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 37, "score": 126828.89899883933 }, { "content": "/// Can be assigned a partition\n\npub trait PartitionAssignable {\n\n /// Assign a partition.\n\n fn assign_partition(&mut self, partition: &PartitionId);\n\n}\n\n\n\nimpl<D> PartitionAssignable for BusinessEventPub<D> {\n\n fn assign_partition(&mut self, partition: &PartitionId) {\n\n self.metadata.partition = Some(partition.clone());\n\n }\n\n}\n\n\n\nimpl<D> PartitionAssignable for DataChangeEventPub<D> {\n\n fn assign_partition(&mut self, partition: &PartitionId) {\n\n self.metadata.partition = Some(partition.clone());\n\n }\n\n}\n\n\n\nimpl<D> PartitionKeyExtractable for BusinessEventPub<D>\n\nwhere\n\n D: PartitionKeyExtractable,\n", "file_path": "nakadion/src/publisher/partitioner.rs", "rank": 38, "score": 125346.95907142194 }, { "content": "#[test]\n\nfn test_parse_cursor() {\n\n let cursor_sample = r#\"{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"}\"#;\n\n\n\n let mut cursor: Cursor = Default::default();\n\n\n\n let cursor_sample = cursor_sample.as_str();\n\n parse_cursor_fields(cursor_sample, &mut cursor, 0, cursor_sample.len()).unwrap();\n\n\n\n assert_eq!(cursor.partition_str(cursor_sample.as_ref()), \"6\");\n\n assert_eq!(cursor.offset_str(cursor_sample.as_ref()), \"543\");\n\n assert_eq!(\n\n cursor.event_type_str(cursor_sample.as_ref()),\n\n \"order.ORDER_RECEIVED\"\n\n );\n\n assert_eq!(\n\n cursor.cursor_token_str(cursor_sample.as_ref()),\n\n \"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"\n\n );\n\n}\n\n\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 39, "score": 125283.21907632411 }, { "content": "#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n pub struct SchemaSyntax(String, env=\"EVENT_TYPE_SCHEMA_SYNTAX\");\n\n}\n\n\n\nnew_type! {\n\n #[doc=\"Number of milliseconds that Nakadi stores events published to this event type.\\n\\n\\\n\n See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventTypeOptions*retention_time)\"]\n\n #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]\n\n pub copy struct RetentionTime(u64);\n\n}\n\n\n\nimpl RetentionTime {\n\n pub fn to_duration(self) -> Duration {\n\n Duration::from_millis(self.0)\n\n }\n\n}\n\n\n\n/// Additional parameters for tuning internal behavior of Nakadi.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventTypeOptions)\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 40, "score": 124655.4866002394 }, { "content": "#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]\n\npub struct EventTypeOptions {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub retention_time: Option<RetentionTime>,\n\n}\n\n\n\n#[derive(Debug, Default, Clone, Serialize, Deserialize)]\n\npub struct EventTypeAuthorization {\n\n #[serde(default)]\n\n pub admins: AuthorizationAttributes,\n\n #[serde(default)]\n\n pub readers: AuthorizationAttributes,\n\n #[serde(default)]\n\n pub writers: AuthorizationAttributes,\n\n}\n\n\n\nimpl EventTypeAuthorization {\n\n pub fn new<A, R, W>(admins: A, readers: R, writers: W) -> Self\n\n where\n\n A: Into<AuthorizationAttributes>,\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 41, "score": 124652.94439385147 }, { "content": "/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventTypeStatistics)\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct EventTypeStatistics {\n\n /// Write rate for events of this EventType. This rate encompasses all producers of this\n\n /// EventType for a Nakadi cluster.\n\n ///\n\n /// Measured in event count per minute.\n\n pub messages_per_minute: u64,\n\n /// Average message size for each Event of this EventType. Includes in the count the whole serialized\n\n /// form of the event, including metadata.\n\n /// Measured in bytes.\n\n pub message_size: u64,\n\n /// Amount of parallel readers (consumers) to this EventType.\n\n pub read_parallelism: u64,\n\n /// Amount of parallel writers (producers) to this EventType.\n\n pub write_parallelism: u64,\n\n}\n\n\n\nimpl EventTypeStatistics {\n\n pub fn new(\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 42, "score": 124650.76887587392 }, { "content": "/// metadata_enrichment strategy. For `undefined` event types it’s not possible to use this\n\n/// strategy, since metadata field is not required.\n\n///\n\n/// See documentation for the write operation for details on behaviour in case of unsuccessful\n\n/// enrichment.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType*enrichment_strategies)\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum EnrichmentStrategy {\n\n MetadataEnrichment,\n\n}\n\n\n\nimpl Default for EnrichmentStrategy {\n\n fn default() -> Self {\n\n EnrichmentStrategy::MetadataEnrichment\n\n }\n\n}\n\n\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 43, "score": 124649.90464907343 }, { "content": "/// Indicates the fields used for evaluation the partition of Events of this type.\n\n///\n\n/// If this is set it MUST be a valid required field as defined in the schema.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType*partition_key_fields)\n\n#[derive(Debug, Default, Clone, Serialize, Deserialize)]\n\npub struct PartitionKeyFields(Vec<PartitionKeyField>);\n\n\n\nimpl PartitionKeyFields {\n\n pub fn new<I>(items: I) -> Self\n\n where\n\n I: IntoIterator,\n\n I::Item: Into<PartitionKeyField>,\n\n {\n\n let items = items.into_iter().map(|it| it.into()).collect();\n\n Self(items)\n\n }\n\n\n\n pub fn partition_key<T: Into<PartitionKeyField>>(mut self, v: T) -> Self {\n\n self.push(v);\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 44, "score": 124647.87735685961 }, { "content": " /// latest schema version as long as they follow the robustness principle.\n\n Forward,\n\n /// Any schema modification is accepted, even if it might break existing producers or consumers. When\n\n /// validating events, no additional properties are accepted unless explicitly stated in the schema.\n\n None,\n\n}\n\n\n\nimpl Default for CompatibilityMode {\n\n fn default() -> Self {\n\n CompatibilityMode::Forward\n\n }\n\n}\n\n\n\nnew_type! {\n\n#[doc=\"Part of `PartitionKeyFields`\\n\"]\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n pub struct PartitionKeyField(String, env=\"EVENT_TYPE_PARTITION_KEY_FIELD\");\n\n}\n\n\n\n/// Required when 'partition_resolution_strategy' is set to ‘hash’. Must be absent otherwise.\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 45, "score": 124647.26991855394 }, { "content": " #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]\n\n pub struct EventTypeName(String, env=\"EVENT_TYPE_NAME\");\n\n}\n\n\n\n/// Defines the category of this EventType.\n\n///\n\n/// The value set will influence, if not set otherwise, the default set of\n\n/// validations, enrichment-strategies, and the effective schema for validation.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType*category)\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum Category {\n\n /// No predefined changes apply. The effective schema for the validation is\n\n /// exactly the same as the EventTypeSchema.\n\n Undefined,\n\n /// Events of this category will be DataChangeEvents. The effective schema during\n\n /// the validation contains metadata, and adds fields data_op and data_type. The\n\n /// passed EventTypeSchema defines the schema of data.\n\n Data,\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 46, "score": 124646.46576246343 }, { "content": " }\n\n}\n\n\n\n/// The type of schema definition. Currently only json_schema (JSON Schema v04) is supported, but in the\n\n/// future there could be others.\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum SchemaType {\n\n #[serde(rename = \"json_schema\")]\n\n JsonSchema,\n\n}\n\n\n\n/// The most recent schema for this EventType. Submitted events will be validated against it.\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct EventTypeSchema {\n\n /// This field is automatically generated by Nakadi. Values are based on semantic versioning. Changes to title\n\n /// or description are considered PATCH level changes. Adding new optional fields is considered a MINOR level\n\n /// change. All other changes are considered MAJOR level.\n\n pub version: String,\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 47, "score": 124646.41699907464 }, { "content": "/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType*audience)\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\npub enum EventTypeAudience {\n\n #[serde(rename = \"component-internal\")]\n\n ComponentInternal,\n\n #[serde(rename = \"business-unit-internal\")]\n\n BusinessUnitInternal,\n\n #[serde(rename = \"company-internal\")]\n\n CompanyInternal,\n\n #[serde(rename = \"external-partner\")]\n\n ExternalPartner,\n\n #[serde(rename = \"external-public\")]\n\n ExternalPublic,\n\n}\n\n\n\n/// Determines the enrichment to be performed on an Event upon reception. Enrichment is\n\n/// performed once upon reception (and after validation) of an Event and is only possible on\n\n/// fields that are not defined on the incoming Event.\n\n///\n\n/// For event types in categories `business` or `data` it’s mandatory to use\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 48, "score": 124644.71288005049 }, { "content": "//! Types for defining and monitoring event types\n\nuse std::time::Duration;\n\n\n\nuse chrono::{DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::misc::{AuthorizationAttribute, AuthorizationAttributes, OwningApplication};\n\n\n\nmod event_type_input;\n\npub use event_type_input::*;\n\n\n\nnew_type! {\n\n #[doc=r#\"Name of an EventType. The name is constrained by a regular expression.\n\n\n\nNote: the name can encode the owner/responsible for this EventType and ideally should\n\nfollow a common pattern that makes it easy to read and understand, but this level of\n\nstructure is not enforced. For example a team name and data type can be used such as\n\n‘acme-team.price-change’.\n\n\n\nSee also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType*name)\"#]\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 49, "score": 124642.41030215217 }, { "content": " self.0.is_empty()\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n}\n\n\n\nimpl<A> From<A> for PartitionKeyFields\n\nwhere\n\n A: Into<PartitionKeyField>,\n\n{\n\n fn from(k: A) -> Self {\n\n Self(vec![k.into()])\n\n }\n\n}\n\n\n\nimpl<A, B> From<(A, B)> for PartitionKeyFields\n\nwhere\n\n A: Into<PartitionKeyField>,\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 50, "score": 124642.14427125541 }, { "content": " /// Events of this category will be BusinessEvents. The effective schema for\n\n /// validation contains metadata and any additionally defined properties passed in the\n\n /// EventTypeSchema directly on top level of the Event. If name conflicts arise, creation\n\n /// of this EventType will be rejected.\n\n Business,\n\n}\n\n\n\n/// Determines how the assignment of the event to a partition should be handled.\n\n///\n\n/// The default is `random`.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#/registry/partition-strategies_get)\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum PartitionStrategy {\n\n /// Resolution of the target partition happens randomly (events are evenly\n\n /// distributed on the topic’s partitions).\n\n Random,\n\n /// Resolution of the partition follows the computation of a hash from the value of\n\n /// the fields indicated in the EventType’s partition_key_fields, guaranteeing that Events\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 51, "score": 124641.92123232398 }, { "content": " &self.0\n\n }\n\n}\n\n\n\n/// Event type cleanup policy. There are two possible values.\n\n///\n\n/// It’s not possible to change the value of this field for existing event type.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType*cleanup_policy)\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum CleanupPolicy {\n\n /// This cleanup policy will delete old events after retention time expires. Nakadi guarantees that each\n\n /// event will be available for at least the retention time period. However Nakadi doesn’t guarantee that event\n\n /// will be deleted right after retention time expires.\n\n Delete,\n\n /// This cleanup policy will keep only the latest event for each event key. The compaction is performed per\n\n /// partition, there is no compaction across partitions. The key that will be used as a compaction key should be\n\n /// specified in ‘partition_compaction_key’ field of event metadata. This cleanup policy is not available for\n\n /// ‘undefined’ category of event types.\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 52, "score": 124641.37710254395 }, { "content": " messages_per_minute: u64,\n\n message_size: u64,\n\n read_parallelism: u64,\n\n write_parallelism: u64,\n\n ) -> Self {\n\n Self {\n\n messages_per_minute,\n\n message_size,\n\n read_parallelism,\n\n write_parallelism,\n\n }\n\n }\n\n}\n\n\n\n/// Definition of an event type\n\n///\n\n/// This struct is only used for querying from Nakadi.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType)\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 53, "score": 124640.97409201322 }, { "content": " pub schema: EventTypeSchema,\n\n /// Required when ‘partition_resolution_strategy’ is set to ‘hash’. Must be absent otherwise.\n\n /// Indicates the fields used for evaluation the partition of Events of this type.\n\n ///\n\n /// If this is set it MUST be a valid required field as defined in the schema.\n\n #[serde(default)]\n\n pub partition_key_fields: PartitionKeyFields,\n\n /// Event type cleanup policy. There are two possible values:\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub cleanup_policy: Option<CleanupPolicy>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub default_statistic: Option<EventTypeStatistics>,\n\n #[serde(default)]\n\n pub options: EventTypeOptions,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub authorization: Option<EventTypeAuthorization>,\n\n pub audience: Option<EventTypeAudience>,\n\n /// This is only an informational field. The events are delivered to consumers in the order they were published.\n\n /// No reordering is done by Nakadi.\n\n ///\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 54, "score": 124640.68660530497 }, { "content": "/// It’s designed to be flexible enough so that producers can evolve their schemas while not\n\n/// inadvertently breaking existent consumers.\n\n///\n\n/// Once defined, the compatibility mode is fixed, since otherwise it would break a predefined contract,\n\n/// declared by the producer.\n\n///\n\n/// The default is `forward`.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType*compatibility_mode)\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum CompatibilityMode {\n\n /// Consumers can reliably parse events produced under different versions. Every event published\n\n /// since the first version is still valid based on the newest schema. When in compatible mode, it’s allowed to\n\n /// add new optional properties and definitions to an existing schema, but no other changes are allowed.\n\n /// Under this mode, the following json-schema attributes are not supported: `not`, `patternProperties`,\n\n /// `additionalProperties` and `additionalItems`. When validating events, additional properties is `false`.\n\n Compatible,\n\n /// Compatible schema changes are allowed. It’s possible to use the full json schema specification\n\n /// for defining schemas. Consumers of forward compatible event types can safely read events tagged with the\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 55, "score": 124639.17480160238 }, { "content": " self\n\n }\n\n\n\n pub fn push<T: Into<PartitionKeyField>>(&mut self, v: T) {\n\n self.0.push(v.into());\n\n }\n\n\n\n pub fn into_inner(self) -> Vec<PartitionKeyField> {\n\n self.0\n\n }\n\n\n\n pub fn iter(&self) -> impl Iterator<Item = &PartitionKeyField> {\n\n self.0.iter()\n\n }\n\n\n\n pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut PartitionKeyField> {\n\n self.0.iter_mut()\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 56, "score": 124638.2371149163 }, { "content": "pub struct EventType {\n\n /// Name of this EventType. The name is constrained by a regular expression.\n\n ///\n\n /// Note: the name can encode the owner/responsible for this EventType and ideally should\n\n /// follow a common pattern that makes it easy to read and understand, but this level of\n\n /// structure is not enforced. For example a team name and data type can be used such as\n\n /// ‘acme-team.price-change’.\n\n pub name: EventTypeName,\n\n /// Indicator of the application owning this EventType.\n\n pub owning_application: Option<OwningApplication>,\n\n /// Defines the category of this EventType.\n\n ///\n\n /// The value set will influence, if not set otherwise, the default set of\n\n /// validations, enrichment-strategies, and the effective schema for validation.\n\n pub category: Category,\n\n /// Determines the enrichment to be performed on an Event upon reception. Enrichment is\n\n /// performed once upon reception (and after validation) of an Event and is only possible on\n\n /// fields that are not defined on the incoming Event.\n\n ///\n\n /// For event types in categories ‘business’ or ‘data’ it’s mandatory to use\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 57, "score": 124637.42916950537 }, { "content": " /// Creation timestamp of the schema. This is generated by Nakadi. It should not be\n\n /// specified when updating a schema and sending it may result in a client error.\n\n pub created_at: DateTime<Utc>,\n\n ///The type of schema definition. Currently only json_schema (JSON Schema v04) is supported, but in the\n\n ///future there could be others.\n\n #[serde(rename = \"type\")]\n\n pub schema_type: SchemaType,\n\n /// The schema as string in the syntax defined in the field type. Failure to respect the\n\n /// syntax will fail any operation on an EventType.\n\n pub schema: SchemaSyntax,\n\n}\n\n\n\nnew_type! {\n\n#[doc=r#\"\n\nThe schema as string in the syntax defined in the field type.\n\n\n\nFailure to respect the\n\nsyntax will fail any operation on an EventType.\n\n\n\n\"#]\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 58, "score": 124635.33669666144 }, { "content": " B: Into<PartitionKeyField>,\n\n{\n\n fn from((a, b): (A, B)) -> Self {\n\n Self(vec![a.into(), b.into()])\n\n }\n\n}\n\n\n\nimpl<A, B, C> From<(A, B, C)> for PartitionKeyFields\n\nwhere\n\n A: Into<PartitionKeyField>,\n\n B: Into<PartitionKeyField>,\n\n C: Into<PartitionKeyField>,\n\n{\n\n fn from((a, b, c): (A, B, C)) -> Self {\n\n Self(vec![a.into(), b.into(), c.into()])\n\n }\n\n}\n\n\n\nimpl AsRef<[PartitionKeyField]> for PartitionKeyFields {\n\n fn as_ref(&self) -> &[PartitionKeyField] {\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 59, "score": 124634.87848024837 }, { "content": " self\n\n }\n\n\n\n pub fn add_admin<T: Into<AuthorizationAttribute>>(&mut self, admin: T) {\n\n self.admins.push(admin.into())\n\n }\n\n pub fn add_reader<T: Into<AuthorizationAttribute>>(&mut self, reader: T) {\n\n self.readers.push(reader.into())\n\n }\n\n pub fn add_writer<T: Into<AuthorizationAttribute>>(&mut self, writer: T) {\n\n self.writers.push(writer.into())\n\n }\n\n}\n\n\n\n/// Intended target audience of the event type. Relevant for standards around quality of design and documentation,\n\n/// reviews, discoverability, changeability, and permission granting. See the guidelines\n\n/// https://opensource.zalando.com/restful-api-guidelines/#219\n\n///\n\n/// This attribute adds no functionality and is used only to inform users about the usage scope of the event type.\n\n///\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 60, "score": 124634.62276050159 }, { "content": " R: Into<AuthorizationAttributes>,\n\n W: Into<AuthorizationAttributes>,\n\n {\n\n Self {\n\n admins: admins.into(),\n\n readers: readers.into(),\n\n writers: writers.into(),\n\n }\n\n }\n\n\n\n pub fn admin<T: Into<AuthorizationAttribute>>(mut self, admin: T) -> Self {\n\n self.admins.push(admin.into());\n\n self\n\n }\n\n pub fn reader<T: Into<AuthorizationAttribute>>(mut self, reader: T) -> Self {\n\n self.readers.push(reader.into());\n\n self\n\n }\n\n pub fn writer<T: Into<AuthorizationAttribute>>(mut self, writer: T) -> Self {\n\n self.writers.push(writer.into());\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 61, "score": 124634.02541704231 }, { "content": " ///\n\n /// The compaction can be not applied to events that were published recently and located at the head of the\n\n /// queue, which means that the actual amount of events received by consumers can be different depending on time\n\n /// when the consumption happened.\n\n ///\n\n /// When using ‘compact’ cleanup policy user should consider that different Nakadi endpoints showing the amount\n\n /// of events will actually show the original amount of events published, not the actual amount of events that\n\n /// are currently there.\n\n /// E.g. subscription /stats endpoint will show the value ‘unconsumed_events’ - but that may not match with the\n\n /// actual amount of events unconsumed in that subscription as ‘compact’ cleanup policy may delete older events\n\n /// in the middle of queue if there is a newer event for the same key published.\n\n ///\n\n /// For more details about compaction implementation please read the documentation of Log Compaction in\n\n /// [Kafka](https://kafka.apache.org/documentation/#compaction), Nakadi currently relies on this implementation.\n\n Compact,\n\n}\n\n\n\nimpl Default for CleanupPolicy {\n\n fn default() -> Self {\n\n CleanupPolicy::Delete\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 62, "score": 124633.58466340434 }, { "content": " /// metadata_enrichment strategy. For ‘undefined’ event types it’s not possible to use this\n\n /// strategy, since metadata field is not required.\n\n ///\n\n /// See documentation for the write operation for details on behaviour in case of unsuccessful\n\n /// enrichment.\n\n #[serde(default)]\n\n pub enrichment_strategies: Vec<EnrichmentStrategy>,\n\n /// Determines how the assignment of the event to a partition should be handled.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub partition_strategy: Option<PartitionStrategy>,\n\n /// Compatibility mode provides a mean for event owners to evolve their schema, given changes respect the\n\n /// semantics defined by this field.\n\n ///\n\n /// It’s designed to be flexible enough so that producers can evolve their schemas while not\n\n /// inadvertently breaking existent consumers.\n\n ///\n\n /// Once defined, the compatibility mode is fixed, since otherwise it would break a predefined contract,\n\n /// declared by the producer.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub compatibility_mode: Option<CompatibilityMode>,\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 63, "score": 124633.2161184846 }, { "content": " /// This field is useful in case the producer wants to communicate the complete order accross all the events\n\n /// published to all partitions. This is the case when there is an incremental generator on the producer side,\n\n /// for example.\n\n ///\n\n /// It differs from partition_key_fields in the sense that it’s not used for partitioning (known as sharding in\n\n /// some systems). The order indicated by ordering_key_fields can also differ from the order the events are in\n\n /// each partition, in case of out-of-order submission.\n\n ///\n\n /// In most cases, this would have just a single item (the path of the field\n\n /// by which this is to be ordered), but can have multiple items, in which case\n\n /// those are considered as a compound key, with lexicographic ordering (first\n\n /// item is most significant).\n\n #[serde(default)]\n\n pub ordering_key_fields: Vec<String>,\n\n #[serde(default)]\n\n pub ordering_instance_ids: Vec<String>,\n\n /// Date and time when this event type was created.\n\n pub created_at: DateTime<Utc>,\n\n /// Date and time when this event type was updated.\n\n pub updated_at: DateTime<Utc>,\n\n}\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 64, "score": 124631.4547801968 }, { "content": " /// with same values on those fields end in the same partition. Given the event type’s category\n\n /// is DataChangeEvent, field path is considered relative to “data”.\n\n Hash,\n\n /// Target partition is defined by the client. As long as the indicated\n\n /// partition exists, Event assignment will respect this value. Correctness of the relative\n\n /// ordering of events is under the responsibility of the Producer. Requires that the client\n\n /// provides the target partition on metadata.partition (See EventMetadata). Failure to do\n\n /// so will reject the publishing of the Event.\n\n UserDefined,\n\n}\n\n\n\nimpl Default for PartitionStrategy {\n\n fn default() -> Self {\n\n PartitionStrategy::Random\n\n }\n\n}\n\n\n\n/// Compatibility mode provides a mean for event owners to evolve their schema, given changes respect the\n\n/// semantics defined by this field.\n\n///\n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 65, "score": 124629.78504979725 }, { "content": "/// Operational statistics for an EventType. This data may be provided by users on Event Type creation.\n\n/// Nakadi uses this object in order to provide an optimal number of partitions from a throughput perspective.\n\n///\n\n/// This field defines the number of partitions in the underlying Kafka topic of an event type.\n\n/// The amount of partitions is given by the expression max(read_parallelism, write_parallelism).\n\n/// The maximum number of partitions is specific to each deployment of Nakadi\n\n/// and should be referred to in a separated document.\n\n///\n\n/// For historical reasons the way that the number of partitions is defined is not as straighforward as it could.\n\n/// The fields messages_per_minute and message_size could potentially influence the resulting amount of partitions,\n\n/// so it’s recommended to set both of them to 1 (one).\n\n/// Providing values different than 1 could result in a higher number of partitions being created.\n\n///\n\n/// For those interested in why these fields exist, in the beginning of the project the developers\n\n/// run a very rudimentary benchmark to understand how much data could be ingested by a single Kafka topic-partition.\n\n/// This benchmark data was later used by this feature to define the suposedely\n\n/// ideal number of partitions for the user’s needs. Over time the maintainers of\n\n/// the project found this benchmark to be unreliable,\n\n/// usually resulting in fewer partitions than needed.\n\n/// \n", "file_path": "nakadi-types/src/event_type/mod.rs", "rank": 66, "score": 124629.34874293015 }, { "content": "/// Can return a key for manual partitioning\n\npub trait PartitionKeyExtractable {\n\n type Key: Hash;\n\n\n\n /// Returns the key for partitioning\n\n fn partition_key(&self) -> Self::Key;\n\n}\n\n\n", "file_path": "nakadion/src/publisher/partitioner.rs", "rank": 67, "score": 122980.67478572493 }, { "content": "#[test]\n\nfn test_next_string_none_1() {\n\n let sample = b\"\";\n\n let r = next_string(sample, 0).unwrap();\n\n assert!(r.is_none());\n\n}\n\n\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 68, "score": 122539.35471729725 }, { "content": "fn parse_next_cursor_item(\n\n json_bytes: &[u8],\n\n start: usize,\n\n cursor: &mut Cursor,\n\n) -> Result<Option<usize>, ParseBatchError> {\n\n if let Ok(Some((begin, end))) = next_string(json_bytes, start) {\n\n if end - begin < 2 {\n\n return Err(\"String can not be a label if len<2\".into());\n\n }\n\n\n\n let label = &json_bytes[begin + 1..end];\n\n let last = match label {\n\n CURSOR_PARTITION_LABEL => {\n\n if let Some((a, b)) = next_string(json_bytes, end + 1)? {\n\n if b - a < 2 {\n\n return Err(\"Empty String for partition\".into());\n\n } else {\n\n cursor.partition = (a + 1, b);\n\n b\n\n }\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 69, "score": 122492.37213950118 }, { "content": "pub trait SubscriptionApiExt {\n\n /// Reset all the offsets for the subscription to \"begin\"\n\n fn reset_cursors_to_begin<T: Into<FlowId>>(\n\n &self,\n\n id: SubscriptionId,\n\n flow_id: T,\n\n ) -> ApiFuture<()>;\n\n}\n\n\n\nimpl<S> SubscriptionApiExt for S\n\nwhere\n\n S: super::SubscriptionApi + Send + Sync + 'static,\n\n{\n\n /// Resets all cursors of the given subscription to `CursorOffset::Begin`\n\n fn reset_cursors_to_begin<T: Into<FlowId>>(\n\n &self,\n\n id: SubscriptionId,\n\n flow_id: T,\n\n ) -> ApiFuture<()> {\n\n let flow_id = flow_id.into();\n", "file_path": "nakadion/src/api/api_ext.rs", "rank": 70, "score": 120755.67009567673 }, { "content": "#[test]\n\nfn parse_subscription_batch_line_with_info() {\n\n let line_sample = r#\"{\"cursor\":{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"},\"events\":[{\"metadata\":\"#\n\n + r#\"{\"occurred_at\":\"1996-10-15T16:39:57+07:00\",\"eid\":\"1f5a76d8-db49-4144-ace7\"#\n\n + r#\"-e683e8ff4ba4\",\"event_type\":\"aruha-test-hila\",\"partition\":\"5\",\"#\n\n + r#\"\"received_at\":\"2016-09-30T09:19:00.525Z\",\"flow_id\":\"blahbloh\"},\"#\n\n + r#\"\"data_op\":\"C\",\"data\":{\"order_number\":\"abc\",\"id\":\"111\"},\"#\n\n + r#\"\"data_type\":\"blah\"}],\"info\":{\"debug\":\"Stream started\"}}\"#;\n\n\n\n let cursor_sample = r#\"{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"}\"#;\n\n\n\n let events_sample = r#\"[{\"metadata\":\"#.to_owned()\n\n + r#\"{\"occurred_at\":\"1996-10-15T16:39:57+07:00\",\"eid\":\"1f5a76d8-db49-4144-ace7\"#\n\n + r#\"-e683e8ff4ba4\",\"event_type\":\"aruha-test-hila\",\"partition\":\"5\",\"#\n\n + r#\"\"received_at\":\"2016-09-30T09:19:00.525Z\",\"flow_id\":\"blahbloh\"},\"#\n\n + r#\"\"data_op\":\"C\",\"data\":{\"order_number\":\"abc\",\"id\":\"111\"},\"#\n\n + r#\"\"data_type\":\"blah\"}]\"#;\n", "file_path": "nakadion/src/components/streams/event_stream/mod.rs", "rank": 71, "score": 119653.24954156118 }, { "content": "/// A common trait for dispatching Http requests.\n\n///\n\n/// This trait is used to enable pluggable\n\n/// HTTP clients\n\npub trait DispatchHttpRequest {\n\n fn dispatch<'a>(&'a self, req: Request<Bytes>) -> ResponseFuture<'a>;\n\n}\n\n\n\n/// An error with can be caused by a remote call.\n\n///\n\n/// This is a low level error.\n\n#[derive(Debug)]\n\npub struct RemoteCallError {\n\n message: Option<String>,\n\n cause: Option<Box<dyn StdError + Send + Sync + 'static>>,\n\n detail: RemoteCallErrorDetail,\n\n}\n\n\n\nimpl RemoteCallError {\n\n pub fn new_io() -> Self {\n\n Self {\n\n message: None,\n\n cause: None,\n\n detail: RemoteCallErrorDetail::Io,\n", "file_path": "nakadion/src/api/dispatch_http_request.rs", "rank": 72, "score": 118669.89393480719 }, { "content": "//! Types for handling events\n\n//!\n\n//! Consumable and publishable event templates\n\n\n\npub use crate::{event_type::EventTypeName, partition::PartitionId, FlowId};\n\n\n\nuse chrono::{DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n\nuse uuid::Uuid;\n\n\n\npub mod publishable;\n\n\n\nnew_type! {\n\n #[doc=\"Identifier for an event.\\n\\nSometimes also called EID.\"]\n\n #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\n pub copy struct EventId(Uuid);\n\n}\n\n\n\n/// Shortcut for creating a new random `EventId`\n\npub struct Eid;\n", "file_path": "nakadi-types/src/event/mod.rs", "rank": 73, "score": 117919.01470164953 }, { "content": "\n\nimpl From<Eid> for EventId {\n\n fn from(_: Eid) -> Self {\n\n Self::random()\n\n }\n\n}\n\n\n\nimpl EventId {\n\n /// Generate a new random `EventId` from a version 4 UUID\n\n pub fn random() -> Self {\n\n Self::new(Uuid::new_v4())\n\n }\n\n}\n\n\n\nnew_type! {\n\n #[doc=\"\"]\n\n #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\n pub struct DataType(String);\n\n}\n\n\n", "file_path": "nakadi-types/src/event/mod.rs", "rank": 74, "score": 117917.13188830459 }, { "content": "pub struct DataChangeEvent<T> {\n\n /// The payload of the type\n\n pub data: T,\n\n pub data_type: DataType,\n\n pub data_op: DataOp,\n\n pub metadata: EventMetaData,\n\n}\n\n\n\n/// A `BusinessEvent` template for consumption of events\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_BusinessEvent)\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct BusinessEvent<T> {\n\n #[serde(flatten)]\n\n pub data: T,\n\n pub metadata: EventMetaData,\n\n}\n\n\n\n/// Metadata of an event\n\n///\n", "file_path": "nakadi-types/src/event/mod.rs", "rank": 75, "score": 117907.07979992463 }, { "content": "/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventMetadata)\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct EventMetaData {\n\n /// Identifier of this Event.\n\n ///\n\n /// Clients MUST generate this value and it SHOULD be guaranteed to be unique from the\n\n /// perspective of the producer. Consumers MIGHT use this value to assert uniqueness of\n\n /// reception of the Event.\n\n pub eid: EventId,\n\n /// The EventType of this Event\n\n pub event_type: EventTypeName,\n\n /// Timestamp of creation of the Event generated by the producer.\n\n pub occurred_at: DateTime<Utc>,\n\n /// Timestamp of the reception of the Event by Nakadi. This is enriched upon reception of\n\n /// the Event.\n\n pub received_at: DateTime<Utc>,\n\n #[serde(default)]\n\n /// Event identifier of the Event that caused the generation of this Event.\n\n /// Set by the producer.\n\n pub parent_eids: Vec<EventId>,\n\n /// Indicates the partition assigned to this Event.\n\n pub partition: PartitionId,\n\n #[serde(default)]\n\n pub version: String,\n\n pub flow_id: FlowId,\n\n}\n", "file_path": "nakadi-types/src/event/mod.rs", "rank": 76, "score": 117905.70283150223 }, { "content": "/// The type of operation executed on the entity.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum DataOp {\n\n #[serde(rename = \"C\")]\n\n Creation,\n\n #[serde(rename = \"U\")]\n\n Update,\n\n #[serde(rename = \"D\")]\n\n Deletion,\n\n #[serde(rename = \"S\")]\n\n Snapshot,\n\n}\n\n\n\n/// A `DataChangeEvent` template for consumption of events\n\n///\n\n/// Represents a change on a resource. Also contains indicators\n\n/// for the data type and the type of operation performed.\n\n///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_DataChangeEvent)\n\n#[derive(Debug, Clone, Deserialize)]\n", "file_path": "nakadi-types/src/event/mod.rs", "rank": 77, "score": 117904.36072116037 }, { "content": "pub fn paginate_subscriptions(\n\n api_client: ApiClient,\n\n event_type: Option<EventTypeName>,\n\n owning_application: Option<OwningApplication>,\n\n limit: Option<usize>,\n\n offset: Option<usize>,\n\n show_status: bool,\n\n flow_id: FlowId,\n\n) -> BoxStream<'static, Result<Subscription, NakadiApiError>> {\n\n let params = PaginationParams {\n\n event_type,\n\n owning_application,\n\n limit: limit.unwrap_or(20),\n\n offset: offset.unwrap_or(0),\n\n show_status,\n\n };\n\n\n\n let (tx, rx) = unbounded_channel::<Result<Subscription, NakadiApiError>>();\n\n\n\n tokio::spawn(paginate(api_client, params, flow_id, tx));\n\n\n\n UnboundedReceiverStream::new(rx).boxed()\n\n}\n\n\n", "file_path": "nakadion/src/api/client/get_subscriptions.rs", "rank": 78, "score": 117446.00238689226 }, { "content": "#[test]\n\nfn parse_subscription_batch_line_without_info() {\n\n let line_sample = r#\"{\"cursor\":{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"},\"events\":[{\"metadata\":\"#\n\n + r#\"{\"occurred_at\":\"1996-10-15T16:39:57+07:00\",\"eid\":\"1f5a76d8-db49-4144-ace7\"#\n\n + r#\"-e683e8ff4ba4\",\"event_type\":\"aruha-test-hila\",\"partition\":\"5\",\"#\n\n + r#\"\"received_at\":\"2016-09-30T09:19:00.525Z\",\"flow_id\":\"blahbloh\"},\"#\n\n + r#\"\"data_op\":\"C\",\"data\":{\"order_number\":\"abc\",\"id\":\"111\"},\"#\n\n + r#\"\"data_type\":\"blah\"}]}\"#;\n\n\n\n let cursor_sample = r#\"{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"}\"#;\n\n\n\n let events_sample = r#\"[{\"metadata\":\"#.to_owned()\n\n + r#\"{\"occurred_at\":\"1996-10-15T16:39:57+07:00\",\"eid\":\"1f5a76d8-db49-4144-ace7\"#\n\n + r#\"-e683e8ff4ba4\",\"event_type\":\"aruha-test-hila\",\"partition\":\"5\",\"#\n\n + r#\"\"received_at\":\"2016-09-30T09:19:00.525Z\",\"flow_id\":\"blahbloh\"},\"#\n\n + r#\"\"data_op\":\"C\",\"data\":{\"order_number\":\"abc\",\"id\":\"111\"},\"#\n\n + r#\"\"data_type\":\"blah\"}]\"#;\n", "file_path": "nakadion/src/components/streams/event_stream/mod.rs", "rank": 79, "score": 117086.97033712231 }, { "content": "#[test]\n\nfn parse_subscription_batch_line_keep_alive_with_info() {\n\n let line_sample = r#\"{\"cursor\":{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"},\"info\":{\"debug\":\"Stream started\"}}\"#;\n\n\n\n let cursor_sample = r#\"{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"}\"#;\n\n\n\n let info_sample = r#\"{\"debug\":\"Stream started\"}\"#;\n\n\n\n let line = EventStreamBatch::try_from_slice(line_sample.as_bytes()).unwrap();\n\n\n\n assert_eq!(line.bytes(), line_sample.as_bytes());\n\n assert_eq!(line.cursor_bytes(), cursor_sample.as_bytes());\n\n assert_eq!(line.partition_str(), \"6\");\n\n assert_eq!(line.event_type_str(), \"order.ORDER_RECEIVED\");\n\n assert_eq!(line.info_str(), Some(info_sample));\n\n assert_eq!(line.is_keep_alive_line(), true);\n\n}\n\n\n", "file_path": "nakadion/src/components/streams/event_stream/mod.rs", "rank": 80, "score": 114654.01163944369 }, { "content": "#[test]\n\nfn parse_subscription_batch_line_keep_alive_without_info() {\n\n let line_sample = r#\"{\"cursor\":{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"}}\"#;\n\n\n\n let cursor_sample = r#\"{\"partition\":\"6\",\"offset\":\"543\",\"#.to_owned()\n\n + r#\"\"event_type\":\"order.ORDER_RECEIVED\",\"cursor_token\":\"#\n\n + r#\"\"b75c3102-98a4-4385-a5fd-b96f1d7872f2\"}\"#;\n\n\n\n let line = EventStreamBatch::try_from_slice(line_sample.as_bytes()).unwrap();\n\n\n\n assert_eq!(line.bytes(), line_sample.as_bytes(), \"line bytes\");\n\n assert_eq!(\n\n line.cursor_bytes(),\n\n cursor_sample.as_bytes(),\n\n \"cursor bytes\"\n\n );\n\n assert_eq!(line.partition_str(), \"6\");\n\n assert_eq!(line.event_type_str(), \"order.ORDER_RECEIVED\");\n\n assert_eq!(line.info_bytes(), None);\n\n assert_eq!(line.is_keep_alive_line(), true);\n\n}\n\n\n", "file_path": "nakadion/src/components/streams/event_stream/mod.rs", "rank": 81, "score": 112344.24729989513 }, { "content": "fn parse_no_events_for(parts: Vec<&str>) -> Result<StreamDeadPolicy, Error> {\n\n if parts[0] != \"no_events_for_seconds\" {\n\n return Err(Error::new(\"not StreamDeadPolicy::NoEventsFor\"));\n\n }\n\n\n\n if parts.len() == 2 {\n\n let seconds: u32 = parts[1]\n\n .parse()\n\n .map_err(|err| Error::new(format!(\"{} not an u32: {}\", parts[0], err)))?;\n\n Ok(StreamDeadPolicy::NoEventsFor { seconds })\n\n } else {\n\n Err(Error::new(\"not StreamDeadPolicy::NoEventsFor\"))\n\n }\n\n}\n\n\n\n/// Configures the logging for partition events.\n\n///\n\n/// A partition event occurs if a new partition is discovered for the\n\n/// first time on a stream (`AfterConnect`) or if it was deactivated or reactivated\n\n/// (`ActivityChange`) caused by `PartitionInactivityTimeoutSecs`.\n", "file_path": "nakadion/src/consumer/config_types/complex_types.rs", "rank": 82, "score": 111769.57592034621 }, { "content": "fn parse_cursor_fields<T: AsRef<[u8]>>(\n\n json_bytes: T,\n\n cursor: &mut Cursor,\n\n start: usize,\n\n end: usize,\n\n) -> Result<(), ParseBatchError> {\n\n let mut next_byte = start;\n\n while next_byte <= end {\n\n if let Some(end) = parse_next_cursor_item(json_bytes.as_ref(), next_byte, cursor)? {\n\n next_byte = end + 1\n\n } else {\n\n break;\n\n }\n\n }\n\n if cursor.partition.0 == 0 {\n\n Err(format!(\"Partition missing in cursor @ {}\", next_byte).into())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "nakadion/src/components/streams/event_stream/line_parser.rs", "rank": 83, "score": 111501.03825510843 }, { "content": "pub fn mandatory<T>(v: Option<T>, field_name: &'static str) -> Result<T, crate::Error> {\n\n if let Some(v) = v {\n\n Ok(v)\n\n } else {\n\n Err(crate::Error::new(format!(\n\n \"field '{}' is mandatory\",\n\n field_name\n\n )))\n\n }\n\n}\n", "file_path": "nakadi-types/src/helpers.rs", "rank": 84, "score": 108098.10474220902 }, { "content": "#[derive(Clone)]\n\nenum InstrumentationSelection {\n\n Off,\n\n Custom(Arc<dyn Instruments + Send + Sync>),\n\n #[cfg(feature = \"metrix\")]\n\n Metrix(Metrix),\n\n}\n", "file_path": "nakadion/src/instrumentation/mod.rs", "rank": 85, "score": 104537.20195932518 }, { "content": "struct Backoff {\n\n max: u64,\n\n iter: Box<dyn Iterator<Item = u64> + Send + 'static>,\n\n}\n\n\n\nimpl Backoff {\n\n pub fn new(max: u64) -> Self {\n\n let iter = Box::new(CONNECT_RETRY_BACKOFF_SECS.iter().copied());\n\n Backoff { max, iter }\n\n }\n\n\n\n pub fn next(&mut self) -> Duration {\n\n let d = if let Some(next) = self.iter.next() {\n\n next\n\n } else {\n\n self.max\n\n };\n\n\n\n let d = std::cmp::min(d, self.max);\n\n\n\n Duration::from_secs(d)\n\n }\n\n}\n", "file_path": "nakadion/src/components/connector/mod.rs", "rank": 86, "score": 104290.63017165776 }, { "content": "struct Inner {\n\n urls: Urls,\n\n dispatch_http_request: Box<dyn DispatchHttpRequest + Send + Sync + 'static>,\n\n access_token_provider: Box<dyn ProvidesAccessToken + Send + Sync + 'static>,\n\n timeout_millis: Option<ApiClientTimeoutMillis>,\n\n attempt_timeout_millis: ApiClientAttemptTimeoutMillis,\n\n initial_retry_interval_millis: ApiClientInitialRetryIntervalMillis,\n\n retry_interval_multiplier: ApiClientRetryIntervalMultiplier,\n\n max_retry_interval_millis: ApiClientMaxRetryIntervalMillis,\n\n retry_on_auth_errors: ApiClientRetryOnAuthErrors,\n\n}\n\n\n\nimpl fmt::Debug for Inner {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.urls.base_url)?;\n\n Ok(())\n\n }\n\n}\n\n\n\nmod urls {\n", "file_path": "nakadion/src/api/client/mod.rs", "rank": 87, "score": 104290.63017165776 }, { "content": "#[derive(Clone)]\n\nenum InstrumentationSelection {\n\n Off,\n\n Custom(Arc<dyn Instruments + Send + Sync>),\n\n #[cfg(feature = \"metrix\")]\n\n Metrix(Metrix),\n\n}\n\n\n\nimpl Default for InstrumentationSelection {\n\n fn default() -> Self {\n\n Self::Off\n\n }\n\n}\n\n\n\n/// Used by the publisher to notify on measurable state changes\n\n#[derive(Default, Clone)]\n\npub struct Instrumentation {\n\n instr: InstrumentationSelection,\n\n}\n\n\n\nimpl Instrumentation {\n", "file_path": "nakadion/src/publisher/instrumentation/mod.rs", "rank": 88, "score": 102113.83897641802 }, { "content": "struct Inner<C> {\n\n config: Config,\n\n api_client: C,\n\n handler_factory: Arc<dyn BatchHandlerFactory>,\n\n logging_adapter: Arc<dyn LoggingAdapter>,\n\n lifecycle_listeners: LifecycleListeners,\n\n}\n\n\n\nimpl<C> fmt::Debug for Inner<C> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"[]\")?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<C> ConsumerInternal for Inner<C>\n\nwhere\n\n C: StreamingEssentials + Clone,\n\n{\n\n fn start(&self, consumer_state: ConsumerState) -> BoxFuture<'static, ConsumerAbort> {\n", "file_path": "nakadion/src/consumer/mod.rs", "rank": 89, "score": 101499.03657921584 }, { "content": " }\n\n}\n\n\n\n/// The most recent schema for this EventType. Submitted events will be validated against it.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum EventTypeSchemaInput {\n\n Json(Value),\n\n}\n\n\n\nimpl EventTypeSchemaInput {\n\n pub fn json_schema<T: Into<Value>>(v: T) -> Self {\n\n Self::Json(v.into())\n\n }\n\n\n\n pub fn json_schema_parsed(v: &str) -> Result<Self, Error> {\n\n let parsed = serde_json::from_str(v)?;\n\n Ok(Self::Json(parsed))\n\n }\n\n\n\n pub fn schema_type(&self) -> SchemaType {\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 90, "score": 100357.35528710282 }, { "content": "}\n\n\n\nimpl EventTypeInput {\n\n /// returns a builder with default values\n\n pub fn builder() -> EventTypeInputBuilder {\n\n EventTypeInputBuilder::default()\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Clone, Serialize, Deserialize)]\n\npub struct EventTypeInputBuilder {\n\n /// Name of this EventType. The name is constrained by a regular expression.\n\n ///\n\n /// Note: the name can encode the owner/responsible for this EventType and ideally should\n\n /// follow a common pattern that makes it easy to read and understand, but this level of\n\n /// structure is not enforced. For example a team name and data type can be used such as\n\n /// ‘acme-team.price-change’.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub name: Option<EventTypeName>,\n\n /// Indicator of the application owning this EventType.\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 91, "score": 100352.69921802108 }, { "content": " pub fn compatibility_mode<T: Into<CompatibilityMode>>(mut self, v: T) -> Self {\n\n self.compatibility_mode = Some(v.into());\n\n self\n\n }\n\n\n\n pub fn schema<T: Into<EventTypeSchemaInput>>(mut self, v: T) -> Self {\n\n self.schema = Some(v.into());\n\n self\n\n }\n\n /// Event type cleanup policy. There are two possible values:\n\n pub fn cleanup_policy<T: Into<CleanupPolicy>>(mut self, v: T) -> Self {\n\n self.cleanup_policy = Some(v.into());\n\n self\n\n }\n\n pub fn default_statistic<T: Into<EventTypeStatistics>>(mut self, v: T) -> Self {\n\n self.default_statistic = Some(v.into());\n\n self\n\n }\n\n pub fn options<T: Into<EventTypeOptions>>(mut self, v: T) -> Self {\n\n self.options = Some(v.into());\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 92, "score": 100349.8659410981 }, { "content": " /// follow a common pattern that makes it easy to read and understand, but this level of\n\n /// structure is not enforced. For example a team name and data type can be used such as\n\n /// ‘acme-team.price-change’.\n\n pub fn name<T: Into<EventTypeName>>(mut self, v: T) -> Self {\n\n self.name = Some(v.into());\n\n self\n\n }\n\n /// Indicator of the application owning this EventType.\n\n pub fn owning_application<T: Into<OwningApplication>>(mut self, v: T) -> Self {\n\n self.owning_application = Some(v.into());\n\n self\n\n }\n\n /// Defines the category of this EventType.\n\n ///\n\n /// The value set will influence, if not set otherwise, the default set of\n\n /// validations, enrichment-strategies, and the effective schema for validation.\n\n pub fn category<T: Into<Category>>(mut self, v: T) -> Self {\n\n self.category = Some(v.into());\n\n self\n\n }\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 93, "score": 100349.5621939699 }, { "content": " schema_type: self.schema_type(),\n\n schema: self.schema_syntax(),\n\n };\n\n\n\n wrapper.serialize(serializer)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for EventTypeSchemaInput {\n\n fn deserialize<D>(deserializer: D) -> Result<EventTypeSchemaInput, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let wrapper = EventTypeSchemaInputSer::deserialize(deserializer)?;\n\n\n\n match wrapper.schema_type {\n\n SchemaType::JsonSchema => {\n\n let schema_syntax =\n\n serde_json::from_str(wrapper.schema.as_ref()).map_err(SError::custom)?;\n\n Ok(EventTypeSchemaInput::Json(schema_syntax))\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 94, "score": 100349.30262455858 }, { "content": " #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[serde(flatten)]\n\n pub schema: Option<EventTypeSchemaInput>,\n\n /// Event type cleanup policy. There are two possible values:\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub cleanup_policy: Option<CleanupPolicy>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub default_statistic: Option<EventTypeStatistics>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub options: Option<EventTypeOptions>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub authorization: Option<EventTypeAuthorization>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub audience: Option<EventTypeAudience>,\n\n}\n\n\n\nimpl EventTypeInputBuilder {\n\n /// Name of this EventType. The name is constrained by a regular expression.\n\n ///\n\n /// Note: the name can encode the owner/responsible for this EventType and ideally should\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 95, "score": 100347.69181565232 }, { "content": " /// It’s designed to be flexible enough so that producers can evolve their schemas while not\n\n /// inadvertently breaking existent consumers.\n\n ///\n\n /// Once defined, the compatibility mode is fixed, since otherwise it would break a predefined contract,\n\n /// declared by the producer.\n\n pub compatibility_mode: CompatibilityMode,\n\n\n\n pub schema: EventTypeSchemaInput,\n\n /// Event type cleanup policy. There are two possible values:\n\n pub cleanup_policy: CleanupPolicy,\n\n /// Operational statistics for an EventType. This data may be provided by users on Event Type creation.\n\n /// Nakadi uses this object in order to provide an optimal number of partitions from a throughput perspective.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub default_statistic: Option<EventTypeStatistics>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub options: Option<EventTypeOptions>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub authorization: Option<EventTypeAuthorization>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub audience: Option<EventTypeAudience>,\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 96, "score": 100346.49252732898 }, { "content": " self\n\n }\n\n pub fn authorization<T: Into<EventTypeAuthorization>>(mut self, v: T) -> Self {\n\n self.authorization = Some(v.into());\n\n self\n\n }\n\n pub fn audience<T: Into<EventTypeAudience>>(mut self, v: T) -> Self {\n\n self.audience = Some(v.into());\n\n self\n\n }\n\n\n\n /// Validates the data and returns an `EventTypeInput` if valid.\n\n pub fn build(self) -> Result<EventTypeInput, Error> {\n\n let name = mandatory(self.name, \"name\")?;\n\n let owning_application = mandatory(self.owning_application, \"owning_application\")?;\n\n let category = mandatory(self.category, \"category\")?;\n\n let enrichment_strategies = self.enrichment_strategies.unwrap_or_default();\n\n let partition_strategy = mandatory(self.partition_strategy, \"partition_strategy\")?;\n\n let partition_key_fields = self.partition_key_fields;\n\n let compatibility_mode = mandatory(self.compatibility_mode, \"compatibility_mode\")?;\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 97, "score": 100346.05433700835 }, { "content": "///\n\n/// See also [Nakadi Manual](https://nakadi.io/manual.html#definition_EventType)\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct EventTypeInput {\n\n /// Name of this EventType. The name is constrained by a regular expression.\n\n ///\n\n /// Note: the name can encode the owner/responsible for this EventType and ideally should\n\n /// follow a common pattern that makes it easy to read and understand, but this level of\n\n /// structure is not enforced. For example a team name and data type can be used such as\n\n /// ‘acme-team.price-change’.\n\n pub name: EventTypeName,\n\n /// Indicator of the application owning this EventType.\n\n pub owning_application: OwningApplication,\n\n /// Defines the category of this EventType.\n\n ///\n\n /// The value set will influence, if not set otherwise, the default set of\n\n /// validations, enrichment-strategies, and the effective schema for validation.\n\n pub category: Category,\n\n /// Determines the enrichment to be performed on an Event upon reception. Enrichment is\n\n /// performed once upon reception (and after validation) of an Event and is only possible on\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 98, "score": 100346.05560011596 }, { "content": " match self {\n\n EventTypeSchemaInput::Json(_) => SchemaType::JsonSchema,\n\n }\n\n }\n\n\n\n pub fn schema_syntax(&self) -> SchemaSyntax {\n\n match self {\n\n EventTypeSchemaInput::Json(ref syntax) => {\n\n SchemaSyntax(serde_json::to_string(syntax).unwrap())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Serialize for EventTypeSchemaInput {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let wrapper = EventTypeSchemaInputSer {\n", "file_path": "nakadi-types/src/event_type/event_type_input.rs", "rank": 99, "score": 100345.34696763629 } ]
Rust
src/clone_wait_signal_pass_wasm.rs
AliceOh/RUST_clone_cgroup_project
64d381f849c5581dd2133417d0b2c2459c5e7e27
use std::process::Command; use std::thread::sleep; use std::time::Duration; use libc::{_exit, kill, pause, prctl, sigemptyset, sigset_t, sigwait, write, STDOUT_FILENO}; use nix::sys::signal::{ sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal, SIGCHLD, SIGCONT, SIGSTOP, SIGUSR1, }; use std::ffi::c_void; use nix::sched::{self, CloneFlags}; use nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus}; use nix::unistd::{getpid, getppid, Pid}; use interprocess::local_socket::{LocalSocketListener, LocalSocketStream}; use std::fs; use std::{ error::Error, io::{self, prelude::*, BufReader}, }; extern "C" fn handle_sigusr1(_: libc::c_int) { print_signal_safe("[clone child] Received Parent signal!\n"); } extern "C" fn handle_sigchld(_: libc::c_int) { print_signal_safe("[main] What a surprise! Got SIGCHLD!\n"); match waitpid(Pid::from_raw(-1), None) { Ok(_) => { print_signal_safe("[main] Child exited.\n"); print_signal_safe("[main] Bye Bye!\n"); exit_signal_safe(0); } Err(_) => { print_signal_safe("[main] waitpid() failed.\n"); exit_signal_safe(1); } } } fn child() -> isize { println!( "[clone child] Hello from child process with pid: {} and parent pid:{}", getpid(), getppid() ); let sig_action = SigAction::new( SigHandler::Handler(handle_sigusr1), SaFlags::empty(), SigSet::empty(), ); if let Err(err) = unsafe { sigaction(SIGUSR1, &sig_action) } { panic!("[clone child] sigaction() failed: {}", err); }; println!("[clone child] Wait for signal from parent"); unsafe { pause(); } println!("[clone child] Signal was delivered - pause is over"); let conn = LocalSocketStream::connect("/tmp/example.sock"); let mut conn = match conn { Ok(f) => f, Err(_e) => return 1, }; conn.write_all("Hello from client!\n".as_bytes()) .expect("client write to socket failed"); let mut buffer: Vec<u8> = Vec::new(); conn.read(&mut buffer).expect("read socket failed"); if let Ok(s) = String::from_utf8(buffer) { println!("[child]: received from socket: {}, length is {} bytes", s, s.len()); } println!("[clone child] Try to allocate big array"); let _v = Box::new([0i32; 600]); println!("[clone child] Yeah, get my array memory successfully!"); Command::new("ip") .arg("link") .spawn() .expect("ip command failed to start"); 0 } fn main() -> Result<(), Box<dyn Error>> { Command::new("mkdir") .arg("-p") .arg("/sys/fs/cgroup/foo") .output() .expect("failed to execute process"); println!("[main] after mkdir"); const STACK_SIZE: usize = 1024 * 1024; let ref mut stack = [0; STACK_SIZE]; let flags = CloneFlags::CLONE_NEWUSER | CloneFlags::CLONE_NEWPID | CloneFlags::CLONE_NEWNET | CloneFlags::CLONE_NEWNS | CloneFlags::CLONE_NEWCGROUP; let child_pid = sched::clone(Box::new(child), stack, flags, Some(Signal::SIGCHLD as i32)) .expect("Failed to spawn the child"); println!( "[main] I am the parent process with pid: {} and I cloned a child with PID {}.", getpid(), child_pid ); let sig_action = SigAction::new( SigHandler::Handler(handle_sigchld), SaFlags::empty(), SigSet::empty(), ); if let Err(err) = unsafe { sigaction(SIGCHLD, &sig_action) } { panic!("[main] sigaction() failed: {}", err); }; let pid_string = (i32::from(child_pid)).to_string(); fs::write("/sys/fs/cgroup/foo/cgroup.procs", pid_string).expect("Unable to write file"); let data = fs::read_to_string("/sys/fs/cgroup/foo/cgroup.procs").expect("Unable to read file"); println!("[main] read cgroup.procs get {}", data); let wasm_bytes = std::fs::read("add.wasm")?; fn handle_error(connection: io::Result<LocalSocketStream>) -> LocalSocketStream { match connection { Ok(val) => val, Err(error) => { eprintln!("\n"); panic!("Incoming connection failed: {}", error); } } } let listener = LocalSocketListener::bind("/tmp/example.sock") .expect("failed to set up LocalSocketListener"); println!("[main] bind /tmp/example.sock, socket server listening for connections."); println!("SIGUSR1 child_pid.as_raw() = {}", child_pid.as_raw()); unsafe { kill(child_pid.as_raw(), SIGUSR1 as i32); } let mut conn = listener.incoming().next().map(handle_error).unwrap(); let mut buffer: Vec<u8> = Vec::new(); conn.read(&mut buffer).expect("read socket failed"); if let Ok(s) = String::from_utf8(buffer) { println!("[main]: received from socket: {}, length is {} bytes", s, s.len()); } println!("[main]: before writing to socket"); conn.write_all(&wasm_bytes).expect("failed in write to socket"); println!("[main] I'll be doing my own stuff..."); loop { println!("[main] Do my own stuff."); sleep(Duration::from_millis(1000)); } } fn print_signal_safe(s: &str) { unsafe { write(STDOUT_FILENO, s.as_ptr() as (*const c_void), s.len()); } } fn exit_signal_safe(status: i32) { unsafe { _exit(status); } }
use std::process::Command; use std::thread::sleep; use std::time::Duration; use libc::{_exit, kill, pause, prctl, sigemptyset, sigset_t, sigwait, write, STDOUT_FILENO}; use nix::sys::signal::{ sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal, SIGCHLD, SIGCONT, SIGSTOP, SIGUSR1, }; use std::ffi::c_void; use nix::sched::{self, CloneFlags}; use nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus}; use nix::unistd::{getpid, getppid, Pid}; use interprocess::local_socket::{LocalSocketListener, LocalSocketStream}; use std::fs; use std::{ error::Error, io::{self, prelude::*, BufReader}, }; extern "C" fn handle_sigusr1(_: libc::c_int) { print_signal_safe("[clone child] Received Parent signal!\n"); } extern "C" fn handle_sigchld(_: libc::c_int) { print_signal_safe("[main] What a surprise! Got SIGCHLD!\n"); match waitpid(Pid::from_raw(-1), None) { Ok(_) => { print_signal_safe("[main] Child exited.\n"); print_signal_safe("[main] Bye Bye!\n"); exit_signal_safe(0); } Err(_) => { print_signal_safe("[main] waitpid() failed.\n"); exit_signal_safe(1); } } } fn child() -> isize { println!( "[clone child] Hello from child process with pid: {} and parent pid:{}", getpid(), getppid() ); let sig_action = SigAction::new( SigHandler::Handler(handle_sigusr1), SaFlags::empty(), SigSet::empty(), ); if let Err(err) = unsafe { sigaction(SIGUSR1, &sig_action) } { panic!("[clone child] sigaction() failed: {}", err); }; println!("[clone child] Wait for signal from parent"); unsafe { pause(); } println!("[clone child] Signal was delivered - pause is over"); let conn = LocalSocketStream::connect("/tmp/example.sock"); let mut conn = match conn { Ok(f) => f, Err(_e) => return 1, }; conn.write_all("Hello from client!\n".as_bytes()) .expect("client write to socket failed"); let mut buffer: Vec<u8> = Vec::new(); conn.read(&mut buffer).expect("read socket failed"); if let Ok(s) = String::from_utf8(buffer) { p
fn main() -> Result<(), Box<dyn Error>> { Command::new("mkdir") .arg("-p") .arg("/sys/fs/cgroup/foo") .output() .expect("failed to execute process"); println!("[main] after mkdir"); const STACK_SIZE: usize = 1024 * 1024; let ref mut stack = [0; STACK_SIZE]; let flags = CloneFlags::CLONE_NEWUSER | CloneFlags::CLONE_NEWPID | CloneFlags::CLONE_NEWNET | CloneFlags::CLONE_NEWNS | CloneFlags::CLONE_NEWCGROUP; let child_pid = sched::clone(Box::new(child), stack, flags, Some(Signal::SIGCHLD as i32)) .expect("Failed to spawn the child"); println!( "[main] I am the parent process with pid: {} and I cloned a child with PID {}.", getpid(), child_pid ); let sig_action = SigAction::new( SigHandler::Handler(handle_sigchld), SaFlags::empty(), SigSet::empty(), ); if let Err(err) = unsafe { sigaction(SIGCHLD, &sig_action) } { panic!("[main] sigaction() failed: {}", err); }; let pid_string = (i32::from(child_pid)).to_string(); fs::write("/sys/fs/cgroup/foo/cgroup.procs", pid_string).expect("Unable to write file"); let data = fs::read_to_string("/sys/fs/cgroup/foo/cgroup.procs").expect("Unable to read file"); println!("[main] read cgroup.procs get {}", data); let wasm_bytes = std::fs::read("add.wasm")?; fn handle_error(connection: io::Result<LocalSocketStream>) -> LocalSocketStream { match connection { Ok(val) => val, Err(error) => { eprintln!("\n"); panic!("Incoming connection failed: {}", error); } } } let listener = LocalSocketListener::bind("/tmp/example.sock") .expect("failed to set up LocalSocketListener"); println!("[main] bind /tmp/example.sock, socket server listening for connections."); println!("SIGUSR1 child_pid.as_raw() = {}", child_pid.as_raw()); unsafe { kill(child_pid.as_raw(), SIGUSR1 as i32); } let mut conn = listener.incoming().next().map(handle_error).unwrap(); let mut buffer: Vec<u8> = Vec::new(); conn.read(&mut buffer).expect("read socket failed"); if let Ok(s) = String::from_utf8(buffer) { println!("[main]: received from socket: {}, length is {} bytes", s, s.len()); } println!("[main]: before writing to socket"); conn.write_all(&wasm_bytes).expect("failed in write to socket"); println!("[main] I'll be doing my own stuff..."); loop { println!("[main] Do my own stuff."); sleep(Duration::from_millis(1000)); } } fn print_signal_safe(s: &str) { unsafe { write(STDOUT_FILENO, s.as_ptr() as (*const c_void), s.len()); } } fn exit_signal_safe(status: i32) { unsafe { _exit(status); } }
rintln!("[child]: received from socket: {}, length is {} bytes", s, s.len()); } println!("[clone child] Try to allocate big array"); let _v = Box::new([0i32; 600]); println!("[clone child] Yeah, get my array memory successfully!"); Command::new("ip") .arg("link") .spawn() .expect("ip command failed to start"); 0 }
function_block-function_prefixed
[ { "content": "/// allocate an array and new a \"ip\" process\n\nfn child() -> isize {\n\n println!(\n\n \"[clone child] Hello from child process with pid: {} and parent pid:{}\",\n\n getpid(),\n\n getppid()\n\n );\n\n\n\n // set signal handler for pause\n\n let sig_action = SigAction::new(\n\n SigHandler::Handler(handle_sigusr1),\n\n SaFlags::empty(),\n\n SigSet::empty(),\n\n );\n\n\n\n if let Err(err) = unsafe { sigaction(SIGUSR1, &sig_action) } {\n\n panic!(\"[clone child] sigaction() failed: {}\", err);\n\n };\n\n println!(\"[clone child] Wait for signal from parent\");\n\n // wait for signal\n\n unsafe {\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 0, "score": 148207.13552884647 }, { "content": "/// allocate an array and new a \"ip\" process\n\nfn child() -> isize {\n\n\n\n println!(\"[clone child] Hello from child process with pid: {} and parent pid:{}\", getpid(), getppid());\n\n\n\n println!(\"Wait 1 seconds for the cgroup changing to take effect\");\n\n sleep(Duration::from_secs(1));\n\n\n\n println!(\"[clone child] Try to allocate big array\");\n\n let _v = Box::new([0i32; 6000]);\n\n println!(\"[clone child] Yeah, get my array memory successfully!\");\n\n\n\n Command::new(\"ip\")\n\n .arg(\"link\")\n\n .spawn()\n\n .expect(\"ip command failed to start\");\n\n\n\n 0 // return 0\n\n}\n\n\n", "file_path": "src/clone_waitpid_cgroup_memory_fail.rs", "rank": 1, "score": 135470.49700403283 }, { "content": "/// allocate an array and new a \"ip\" process\n\nfn child() -> isize {\n\n println!(\n\n \"[clone child] Hello from child process with pid: {} and parent pid:{}\",\n\n getpid(),\n\n getppid()\n\n );\n\n\n\n // set signal handler for pause\n\n let sig_action = SigAction::new(\n\n SigHandler::Handler(handle_sigusr1),\n\n SaFlags::empty(),\n\n SigSet::empty(),\n\n );\n\n\n\n if let Err(err) = unsafe { sigaction(SIGUSR1, &sig_action) } {\n\n panic!(\"[clone child] sigaction() failed: {}\", err);\n\n };\n\n println!(\"[clone child] Wait for signal from parent\");\n\n // wait for signal\n\n unsafe {\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 3, "score": 131659.60782213474 }, { "content": "/// allocate an array and new a \"ip\" process\n\nfn child() -> isize {\n\n\n\n println!(\"[clone child] Hello from child process with pid: {} and parent pid:{}\", getpid(), getppid());\n\n\n\n println!(\"[clone child] Try to allocate big array\");\n\n let _v = Box::new([0i32; 600]);\n\n println!(\"[clone child] Yeah, get my array memory successfully!\");\n\n\n\n Command::new(\"ip\")\n\n .arg(\"link\")\n\n .spawn()\n\n .expect(\"ip command failed to start\");\n\n\n\n 0 // return 0\n\n}\n\n\n\n\n", "file_path": "src/clone_waitpid.rs", "rank": 4, "score": 124581.46848180048 }, { "content": "/// allocate an array and new a \"ip\" process\n\nfn child() -> isize {\n\n\n\n println!(\"[clone child] Hello from child process with pid: {} and parent pid:{}\", getpid(), getppid());\n\n\n\n println!(\"[clone child] Try to allocate big array\");\n\n let _v = Box::new([0i32; 600]);\n\n println!(\"[clone child] Yeah, get my array memory successfully!\");\n\n\n\n Command::new(\"ip\")\n\n .arg(\"link\")\n\n .spawn()\n\n .expect(\"ip command failed to start\");\n\n\n\n 0 // return 0\n\n}\n\n\n", "file_path": "src/clone_waitpid_cgroup_memory.rs", "rank": 5, "score": 117949.4220417885 }, { "content": "/// allocate an array and new a \"ip\" process\n\nfn child() -> isize {\n\n println!(\n\n \"[clone child] Hello from child process with pid: {} and parent pid:{}\",\n\n getpid(),\n\n getppid()\n\n );\n\n\n\n unsafe {\n\n prctl(PR_SET_KEEPCAPS, 1, 0, 0, 0);\n\n }\n\n println!(\"[clone child ] aft prctl\");\n\n\n\n\n\n let output = Command::new(\"cat\")\n\n // .arg(\"/sys/fs/cgroup/foo\")\n\n .arg(\"/sys/fs/cgroup/foo/memory.max\")\n\n .output()\n\n .expect(\"failed to execute process\");\n\n\n\n\n", "file_path": "src/clone_child_cgroup.rs", "rank": 6, "score": 105743.6729921339 }, { "content": "fn main() {\n\n const STACK_SIZE: usize = 1024 * 1024;\n\n let ref mut stack = [0; STACK_SIZE];\n\n\n\n let flags = CloneFlags::CLONE_NEWUSER\n\n | CloneFlags::CLONE_NEWPID\n\n | CloneFlags::CLONE_NEWNET\n\n | CloneFlags::CLONE_NEWNS\n\n | CloneFlags::CLONE_NEWCGROUP;\n\n\n\n let child_pid = sched::clone(Box::new(child), stack, flags, Some(Signal::SIGCHLD as i32)) // without SIGCHLD signal, waitpid gives error \"ECHILD: No child processes\"\n\n .expect(\"Failed to spawn the child\");\n\n\n\n println!(\n\n \"[main] I am the parent process with pid: {} and I cloned a child with PID {}.\",\n\n getpid(),\n\n child_pid\n\n );\n\n\n\n // set signal handler for child termination\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 7, "score": 94804.8504249976 }, { "content": "fn print_signal_safe(s: &str) {\n\n unsafe {\n\n write(STDOUT_FILENO, s.as_ptr() as (*const c_void), s.len());\n\n }\n\n}\n\n\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 8, "score": 91729.31918306464 }, { "content": "fn exit_signal_safe(status: i32) {\n\n unsafe {\n\n _exit(status);\n\n }\n\n}\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 9, "score": 89523.82430986488 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // let wasm_bytes = std::fs::read(\"add.wasm\")?;\n\n\n\n const STACK_SIZE: usize = 1024 * 1024;\n\n let ref mut stack = [0; STACK_SIZE];\n\n\n\n let flags = CloneFlags::CLONE_NEWUSER\n\n | CloneFlags::CLONE_NEWPID\n\n | CloneFlags::CLONE_NEWNET\n\n | CloneFlags::CLONE_NEWNS\n\n | CloneFlags::CLONE_NEWCGROUP;\n\n\n\n let child_pid = sched::clone(Box::new(child), stack, flags, Some(Signal::SIGCHLD as i32)) // without SIGCHLD signal, waitpid gives error \"ECHILD: No child processes\"\n\n .expect(\"Failed to spawn the child\");\n\n\n\n println!(\n\n \"[main] I am the parent process with pid: {} and I cloned a child with PID {}.\",\n\n getpid(),\n\n child_pid\n\n );\n", "file_path": "src/clone_child_cgroup.rs", "rank": 10, "score": 82648.8297321976 }, { "content": "fn main() {\n\n const STACK_SIZE: usize = 1024 * 1024;\n\n let ref mut stack = [0; STACK_SIZE];\n\n\n\n let flags = CloneFlags::CLONE_NEWUSER \n\n | CloneFlags::CLONE_NEWPID \n\n | CloneFlags::CLONE_NEWNET \n\n | CloneFlags::CLONE_NEWNS\n\n | CloneFlags::CLONE_NEWCGROUP;\n\n\n\n\n\n let child_pid = sched::clone(Box::new(child), stack, flags, \n\n Some(Signal::SIGCHLD as i32)) // without SIGCHLD signal, waitpid gives error \"ECHILD: No child processes\"\n\n .expect(\"Failed to spawn the child\");\n\n \n\n println!(\"[main] I am the parent process with pid: {} and I cloned a child with PID {}.\", getpid(), child_pid);\n\n\n\n let pid_string = (i32::from(child_pid)).to_string();\n\n\n\n // println!(\"Wait 10 seconds for the child process to up, before changing cgroup\");\n", "file_path": "src/clone_waitpid_cgroup_memory_fail.rs", "rank": 11, "score": 80408.10703585077 }, { "content": "fn main() {\n\n const STACK_SIZE: usize = 1024 * 1024;\n\n let ref mut stack = [0; STACK_SIZE];\n\n\n\n let flags = CloneFlags::CLONE_NEWUSER\n\n | CloneFlags::CLONE_NEWPID\n\n | CloneFlags::CLONE_NEWNET\n\n | CloneFlags::CLONE_NEWNS\n\n | CloneFlags::CLONE_NEWCGROUP;\n\n\n\n let child_pid = sched::clone(Box::new(child), stack, flags, Some(Signal::SIGCHLD as i32)) // without SIGCHLD signal, waitpid gives error \"ECHILD: No child processes\"\n\n .expect(\"Failed to spawn the child\");\n\n\n\n println!(\n\n \"[main] I am the parent process with pid: {} and I cloned a child with PID {}.\",\n\n getpid(),\n\n child_pid\n\n );\n\n\n\n // set signal handler for child termination\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 12, "score": 76470.52547436286 }, { "content": "fn print_signal_safe(s: &str) {\n\n unsafe {\n\n write(STDOUT_FILENO, s.as_ptr() as (*const c_void), s.len());\n\n }\n\n}\n\n\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 14, "score": 74782.85166986639 }, { "content": "fn print_signal_safe(s: &str) {\n\n unsafe {\n\n write(STDOUT_FILENO, s.as_ptr() as (*const c_void), s.len());\n\n }\n\n}\n\n\n", "file_path": "src/clone_child_cgroup.rs", "rank": 15, "score": 74314.24359004114 }, { "content": "fn exit_signal_safe(status: i32) {\n\n unsafe {\n\n _exit(status);\n\n }\n\n}\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 17, "score": 73018.38471875107 }, { "content": "fn exit_signal_safe(status: i32) {\n\n unsafe {\n\n _exit(status);\n\n }\n\n}\n", "file_path": "src/clone_child_cgroup.rs", "rank": 18, "score": 72223.16850171798 }, { "content": "use std::process::Command;\n\nuse std::thread::sleep;\n\nuse std::time::Duration;\n\n\n\nuse libc::{_exit, kill, pause, prctl, sigemptyset, sigset_t, sigwait, write, STDOUT_FILENO};\n\nuse nix::sys::signal::{\n\n sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal, SIGCHLD, SIGCONT, SIGSTOP, SIGUSR1,\n\n};\n\nuse std::ffi::c_void;\n\n\n\nuse nix::sched::{self, CloneFlags};\n\nuse nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus};\n\nuse nix::unistd::{getpid, getppid, Pid};\n\n\n\nuse std::fs;\n\n\n\n/// signal handling functions\n\n\n\nextern \"C\" fn handle_sigusr1(_: libc::c_int) {\n\n print_signal_safe(\"[clone child] Received Parent signal!\\n\");\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 19, "score": 68926.94513554555 }, { "content": "}\n\n\n\nextern \"C\" fn handle_sigchld(_: libc::c_int) {\n\n print_signal_safe(\"[main] What a surprise! Got SIGCHLD!\\n\");\n\n match waitpid(Pid::from_raw(-1), None) {\n\n Ok(_) => {\n\n print_signal_safe(\"[main] Child exited.\\n\");\n\n print_signal_safe(\"[main] Bye Bye!\\n\");\n\n exit_signal_safe(0);\n\n }\n\n Err(_) => {\n\n print_signal_safe(\"[main] waitpid() failed.\\n\");\n\n exit_signal_safe(1);\n\n }\n\n }\n\n}\n\n\n\n/// allocate an array and new a \"ip\" process\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 20, "score": 68912.37401866801 }, { "content": " let sig_action = SigAction::new(\n\n SigHandler::Handler(handle_sigchld),\n\n SaFlags::empty(),\n\n SigSet::empty(),\n\n );\n\n\n\n if let Err(err) = unsafe { sigaction(SIGCHLD, &sig_action) } {\n\n panic!(\"[main] sigaction() failed: {}\", err);\n\n };\n\n\n\n // set memory limit of child process\n\n let pid_string = (i32::from(child_pid)).to_string();\n\n\n\n fs::write(\"/sys/fs/cgroup/foo/cgroup.procs\", pid_string).expect(\"Unable to write file\");\n\n\n\n let data = fs::read_to_string(\"/sys/fs/cgroup/foo/cgroup.procs\").expect(\"Unable to read file\");\n\n println!(\"[main] read cgroup.procs get {}\", data);\n\n\n\n // send signal to child process\n\n println!(\"SIGUSR1 child_pid.as_raw() = {}\", child_pid.as_raw());\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 21, "score": 68905.62230136555 }, { "content": " pause();\n\n }\n\n\n\n println!(\"[clone child] Signal was delivered - pause is over\");\n\n\n\n println!(\"[clone child] Try to allocate big array\");\n\n let _v = Box::new([0i32; 6000]);\n\n println!(\"[clone child] Yeah, get my array memory successfully!\");\n\n\n\n Command::new(\"ip\")\n\n .arg(\"link\")\n\n .spawn()\n\n .expect(\"ip command failed to start\");\n\n\n\n 0 // return 0\n\n}\n\n\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 22, "score": 68900.29677749764 }, { "content": " unsafe {\n\n kill(child_pid.as_raw(), SIGUSR1 as i32); // resume the child process\n\n }\n\n\n\n // infinite loop\n\n println!(\"[main] I'll be doing my own stuff...\");\n\n loop {\n\n println!(\"[main] Do my own stuff.\");\n\n // ... replace sleep with the payload\n\n sleep(Duration::from_millis(500));\n\n }\n\n}\n\n\n", "file_path": "src/clone_wait_signal_cgroup_memory_fail.rs", "rank": 23, "score": 68899.1433052384 }, { "content": "fn main() {\n\n const STACK_SIZE: usize = 1024 * 1024;\n\n let ref mut stack = [0; STACK_SIZE];\n\n\n\n let flags = CloneFlags::CLONE_NEWUSER \n\n | CloneFlags::CLONE_NEWPID \n\n | CloneFlags::CLONE_NEWNET \n\n | CloneFlags::CLONE_NEWNS\n\n | CloneFlags::CLONE_NEWCGROUP;\n\n\n\n\n\n let child_pid = sched::clone(Box::new(child), stack, flags, \n\n Some(Signal::SIGCHLD as i32)) // without SIGCHLD signal, waitpid gives error \"ECHILD: No child processes\"\n\n .expect(\"Failed to spawn the child\");\n\n \n\n println!(\"[main] I am the parent process with pid: {} and I cloned a child with PID {}.\", getpid(), child_pid);\n\n \n\n println!(\"[main] I'll be doing my own stuff while waiting for the child {} termination...\", child_pid);\n\n loop {\n\n match waitpid(child_pid, Some(WaitPidFlag::WNOHANG)) {\n", "file_path": "src/clone_waitpid.rs", "rank": 25, "score": 64812.01601189976 }, { "content": "fn main() {\n\n const STACK_SIZE: usize = 1024 * 1024;\n\n let ref mut stack = [0; STACK_SIZE];\n\n\n\n let flags = CloneFlags::CLONE_NEWUSER \n\n | CloneFlags::CLONE_NEWPID \n\n | CloneFlags::CLONE_NEWNET \n\n | CloneFlags::CLONE_NEWNS\n\n | CloneFlags::CLONE_NEWCGROUP;\n\n\n\n\n\n let child_pid = sched::clone(Box::new(child), stack, flags, \n\n Some(Signal::SIGCHLD as i32)) // without SIGCHLD signal, waitpid gives error \"ECHILD: No child processes\"\n\n .expect(\"Failed to spawn the child\");\n\n \n\n println!(\"[main] I am the parent process with pid: {} and I cloned a child with PID {}.\", getpid(), child_pid);\n\n\n\n let pid_string = (i32::from(child_pid)).to_string();\n\n\n\n // println!(\"Wait 10 seconds for the child process to up, before changing cgroup\");\n", "file_path": "src/clone_waitpid_cgroup_memory.rs", "rank": 26, "score": 60948.22415305396 }, { "content": "fn main() {\n\n let child_pid = match fork() {\n\n Ok(Child) => {\n\n //////////////////////\n\n // child //\n\n //////////////////////\n\n println!(\n\n \"[fork child] I'm alive! My PID is {} and PPID is {}.\",\n\n getpid(),\n\n getppid()\n\n );\n\n\n\n println!(\"[fork child] I'm gonna sleep for a while and then just exit...\");\n\n sleep(Duration::from_secs(2));\n\n exit(0);\n\n }\n\n\n\n Ok(Parent { child, .. }) => {\n\n println!(\n\n \"[main] I am the parent process with pid: {} and I forked a child with PID {}.\",\n", "file_path": "src/fork_waitpid.rs", "rank": 27, "score": 54074.67435072502 }, { "content": "use std::time::Duration;\n\nuse std::thread::sleep;\n\nuse std::process::Command;\n\nuse std::fs;\n\nuse nix::sys::signal::Signal;\n\nuse nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus};\n\nuse nix::unistd::{getpid, getppid};\n\nuse nix::sched::{self, CloneFlags};\n\n\n\n/// allocate an array and new a \"ip\" process\n", "file_path": "src/clone_waitpid_cgroup_memory_fail.rs", "rank": 28, "score": 53419.97909066483 }, { "content": "\n\n Ok(status) => {\n\n println!(\"[main] Child exited with status {:?}.\", status);\n\n break;\n\n }\n\n\n\n Err(err) => panic!(\"[main] waitpid() failed: {}\", err),\n\n }\n\n }\n\n\n\n println!(\"[main] Bye Bye!\");\n\n}\n", "file_path": "src/clone_waitpid_cgroup_memory_fail.rs", "rank": 29, "score": 53414.96346470947 }, { "content": " // sleep(Duration::from_secs(10));\n\n\n\n fs::write(\"/sys/fs/cgroup/foo/cgroup.procs\", pid_string).expect(\"Unable to write file\");\n\n\n\n let data = fs::read_to_string(\"/sys/fs/cgroup/foo/cgroup.procs\").expect(\"Unable to read file\");\n\n \n\n println!(\"[main] read cgroup.procs get {}\", data);\n\n\n\n // println!(\"Wait 10 seconds for the child process to run, after changing cgroup\");\n\n // sleep(Duration::from_secs(10));\n\n\n\n \n\n println!(\"[main] I'll be doing my own stuff while waiting for the child {} termination...\", child_pid);\n\n loop {\n\n match waitpid(child_pid, Some(WaitPidFlag::WNOHANG)) {\n\n Ok(WaitStatus::StillAlive) => {\n\n println!(\"[main] Child is still alive, do my own stuff while waiting.\");\n\n // ... replace sleep with the work to be done in main\n\n sleep(Duration::from_millis(500));\n\n }\n", "file_path": "src/clone_waitpid_cgroup_memory_fail.rs", "rank": 30, "score": 53414.56063286067 }, { "content": "use std::process::Command;\n\nuse std::thread::sleep;\n\nuse std::time::Duration;\n\n\n\nuse libc::{_exit, kill, pause, prctl, sigemptyset, sigset_t, sigwait, write, STDOUT_FILENO};\n\nuse nix::sys::signal::{\n\n sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal, SIGCHLD, SIGCONT, SIGSTOP, SIGUSR1,\n\n};\n\nuse std::ffi::c_void;\n\n\n\nuse nix::sched::{self, CloneFlags};\n\nuse nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus};\n\nuse nix::unistd::{getpid, getppid, Pid};\n\n\n\nuse std::fs;\n\n\n\n/// signal handling functions\n\n\n\nextern \"C\" fn handle_sigusr1(_: libc::c_int) {\n\n print_signal_safe(\"[clone child] Received Parent signal!\\n\");\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 31, "score": 49367.67996617941 }, { "content": "}\n\n\n\nextern \"C\" fn handle_sigchld(_: libc::c_int) {\n\n print_signal_safe(\"[main] What a surprise! Got SIGCHLD!\\n\");\n\n match waitpid(Pid::from_raw(-1), None) {\n\n Ok(_) => {\n\n print_signal_safe(\"[main] Child exited.\\n\");\n\n print_signal_safe(\"[main] Bye Bye!\\n\");\n\n exit_signal_safe(0);\n\n }\n\n Err(_) => {\n\n print_signal_safe(\"[main] waitpid() failed.\\n\");\n\n exit_signal_safe(1);\n\n }\n\n }\n\n}\n\n\n\n/// allocate an array and new a \"ip\" process\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 36, "score": 49353.10884930184 }, { "content": " let sig_action = SigAction::new(\n\n SigHandler::Handler(handle_sigchld),\n\n SaFlags::empty(),\n\n SigSet::empty(),\n\n );\n\n\n\n if let Err(err) = unsafe { sigaction(SIGCHLD, &sig_action) } {\n\n panic!(\"[main] sigaction() failed: {}\", err);\n\n };\n\n\n\n // set memory limit of child process\n\n let pid_string = (i32::from(child_pid)).to_string();\n\n\n\n fs::write(\"/sys/fs/cgroup/foo/cgroup.procs\", pid_string).expect(\"Unable to write file\");\n\n\n\n let data = fs::read_to_string(\"/sys/fs/cgroup/foo/cgroup.procs\").expect(\"Unable to read file\");\n\n println!(\"[main] read cgroup.procs get {}\", data);\n\n\n\n // send signal to child process\n\n println!(\"SIGUSR1 child_pid.as_raw() = {}\", child_pid.as_raw());\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 38, "score": 49346.357131999415 }, { "content": " pause();\n\n }\n\n\n\n println!(\"[clone child] Signal was delivered - pause is over\");\n\n\n\n println!(\"[clone child] Try to allocate big array\");\n\n let _v = Box::new([0i32; 600]);\n\n println!(\"[clone child] Yeah, get my array memory successfully!\");\n\n\n\n Command::new(\"ip\")\n\n .arg(\"link\")\n\n .spawn()\n\n .expect(\"ip command failed to start\");\n\n\n\n 0 // return 0\n\n}\n\n\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 40, "score": 49341.03160813149 }, { "content": " unsafe {\n\n kill(child_pid.as_raw(), SIGUSR1 as i32); // resume the child process\n\n }\n\n\n\n // infinite loop\n\n println!(\"[main] I'll be doing my own stuff...\");\n\n loop {\n\n println!(\"[main] Do my own stuff.\");\n\n // ... replace sleep with the payload\n\n sleep(Duration::from_millis(500));\n\n }\n\n}\n\n\n", "file_path": "src/clone_wait_signal_cgroup_memory.rs", "rank": 42, "score": 49339.878135872255 }, { "content": "use std::time::Duration;\n\nuse std::thread::sleep;\n\nuse std::process::Command;\n\n\n\nuse nix::sys::signal::Signal;\n\nuse nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus};\n\nuse nix::unistd::{getpid, getppid};\n\nuse nix::sched::{self, CloneFlags};\n\n\n\n/// allocate an array and new a \"ip\" process\n", "file_path": "src/clone_waitpid.rs", "rank": 44, "score": 35112.145017306415 }, { "content": " Ok(WaitStatus::StillAlive) => {\n\n println!(\"[main] Child is still alive, do my own stuff while waiting.\");\n\n // ... replace sleep with the work to be done in main\n\n sleep(Duration::from_millis(500));\n\n }\n\n\n\n Ok(status) => {\n\n println!(\"[main] Child exited with status {:?}.\", status);\n\n break;\n\n }\n\n\n\n Err(err) => panic!(\"[main] waitpid() failed: {}\", err),\n\n }\n\n }\n\n\n\n println!(\"[main] Bye Bye!\");\n\n}\n", "file_path": "src/clone_waitpid.rs", "rank": 45, "score": 35108.487444248654 }, { "content": "use std::process::Command;\n\nuse std::thread::sleep;\n\nuse std::time::Duration;\n\n\n\nuse libc::{sighandler_t, c_int, c_void, _exit, clone, kill, pause, prctl, sigemptyset, sigset_t, sigwait, write, \n\n STDOUT_FILENO, PR_SET_KEEPCAPS };\n\nuse nix::sys::signal::{\n\n sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal, SIGCHLD, SIGCONT, SIGSTOP, SIGUSR1,\n\n};\n\n\n\nuse nix::sched::{self, CloneFlags};\n\nuse nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus};\n\nuse nix::unistd::{getpid, getppid, Pid};\n\n\n\nuse std::fs;\n\n\n\n\n\n\n\n/// signal handling functions\n\n\n", "file_path": "src/clone_child_cgroup.rs", "rank": 46, "score": 33703.94134377655 }, { "content": "extern \"C\" fn handle_sigusr1(_: c_int) {\n\n print_signal_safe(\"[clone child] Received Parent signal!\\n\");\n\n}\n\n\n\nextern \"C\" fn handle_sigchld(_: c_int) {\n\n print_signal_safe(\"[main] What a surprise! Got SIGCHLD!\\n\");\n\n match waitpid(Pid::from_raw(-1), None) {\n\n Ok(_) => {\n\n print_signal_safe(\"[main] Child exited.\\n\");\n\n print_signal_safe(\"[main] Bye Bye!\\n\");\n\n exit_signal_safe(0);\n\n }\n\n Err(_) => {\n\n print_signal_safe(\"[main] waitpid() failed.\\n\");\n\n exit_signal_safe(1);\n\n }\n\n }\n\n}\n\n\n\n// extern fn handler(_: c_int) {}\n\n\n\n// fn get_handler() -> sighandler_t {\n\n// handler as extern fn(c_int) as *mut c_void as sighandler_t\n\n// }\n\n\n\n/// allocate an array and new a \"ip\" process\n", "file_path": "src/clone_child_cgroup.rs", "rank": 47, "score": 33701.458007435984 }, { "content": " .output()\n\n .expect(\"failed to execute process\");\n\n\n\n println!(\"[clone child ] aft ls foo process\");\n\n\n\n if let Ok(s) = String::from_utf8(output.stdout) {\n\n println!(\"{}\", s);\n\n } \n\n\n\n sleep(Duration::from_secs(2));\n\n\n\n // // set signal handler for pause\n\n // let sig_action = SigAction::new(\n\n // SigHandler::Handler(handle_sigusr1),\n\n // SaFlags::empty(),\n\n // SigSet::empty(),\n\n // );\n\n\n\n // if let Err(err) = unsafe { sigaction(SIGUSR1, &sig_action) } {\n\n // panic!(\"[clone child] sigaction() failed: {}\", err);\n", "file_path": "src/clone_child_cgroup.rs", "rank": 48, "score": 33689.07573852707 }, { "content": " // };\n\n // println!(\"[clone child] Wait for signal from parent\");\n\n // // wait for signal\n\n // unsafe {\n\n // pause();\n\n // }\n\n\n\n\n\n println!(\"[clone child ] before read memory limit of child process\");\n\n\n\n\n\n\n\n // set memory limit of child process\n\n let pid_string = (i32::from(getpid())).to_string();\n\n println!(\"pid_string is {}\", pid_string);\n\n\n\n // fs::write(\"/sys/fs/cgroup/cg1/cgroup.procs\", pid_string).expect(\"Unable to write file\");\n\n\n\n // let data = fs::read_to_string(\"/sys/fs/cgroup/cg1/cgroup.procs\").expect(\"Unable to read file\");\n\n // println!(\"[clone child ] read cgroup.procs get {}\", data);\n", "file_path": "src/clone_child_cgroup.rs", "rank": 49, "score": 33688.68730239235 }, { "content": "\n\n // set signal handler for child termination\n\n let sig_action = SigAction::new(\n\n SigHandler::Handler(handle_sigchld),\n\n SaFlags::empty(),\n\n SigSet::empty(),\n\n );\n\n\n\n if let Err(err) = unsafe { sigaction(SIGCHLD, &sig_action) } {\n\n panic!(\"[main] sigaction() failed: {}\", err);\n\n };\n\n\n\n // // set memory limit of child process\n\n // let pid_string = (i32::from(child_pid)).to_string();\n\n\n\n // fs::write(\"/sys/fs/cgroup/foo/cgroup.procs\", pid_string).expect(\"Unable to write file\");\n\n\n\n // let data = fs::read_to_string(\"/sys/fs/cgroup/foo/cgroup.procs\").expect(\"Unable to read file\");\n\n // println!(\"[main] read cgroup.procs get {}\", data);\n\n\n", "file_path": "src/clone_child_cgroup.rs", "rank": 50, "score": 33684.34371588391 }, { "content": " // // send signal to child process\n\n // println!(\"SIGUSR1 child_pid.as_raw() = {}\", child_pid.as_raw());\n\n // unsafe {\n\n // kill(child_pid.as_raw(), SIGUSR1 as i32); // resume the child process\n\n // }\n\n\n\n // infinite loop\n\n println!(\"[main] I'll be doing my own stuff...\");\n\n loop {\n\n println!(\"[main] Do my own stuff.\");\n\n // ... replace sleep with the payload\n\n sleep(Duration::from_millis(500));\n\n }\n\n}\n\n\n", "file_path": "src/clone_child_cgroup.rs", "rank": 51, "score": 33683.05363356553 }, { "content": "\n\n fs::write(\"/sys/fs/cgroup/foo/cgroup.procs\", pid_string).expect(\"Unable to write file\");\n\n let data = fs::read_to_string(\"/sys/fs/cgroup/foo/cgroup.procs\").expect(\"Unable to read file\");\n\n println!(\"[clone child ] read cgroup.procs get {}\", data);\n\n\n\n\n\n // println!(\"[clone child] Signal was delivered - pause is over\");\n\n\n\n println!(\"[clone child] Try to allocate big array\");\n\n let _v = Box::new([0i32; 600]);\n\n println!(\"[clone child] Yeah, get my array memory successfully!\");\n\n\n\n Command::new(\"ip\")\n\n .arg(\"link\")\n\n .spawn()\n\n .expect(\"ip command failed to start\");\n\n\n\n sleep(Duration::from_secs(1));\n\n\n\n 0 // return 0\n\n}\n\n\n", "file_path": "src/clone_child_cgroup.rs", "rank": 52, "score": 33681.64344908748 }, { "content": " if let Ok(s) = String::from_utf8(output.stdout) {\n\n println!(\"{}\", s);\n\n }\n\n\n\n\n\n // mount -t cgroup2 none /mnt/cgroup2\n\n\n\n let output = Command::new(\"mount\")\n\n .arg(\"-t\")\n\n .arg(\"cgroup2\")\n\n .arg(\"none\")\n\n .arg(\"/sys/fs/cgroup\")\n\n .output()\n\n .expect(\"failed to execute process\");\n\n\n\n println!(\"[clone child ] after mount\");\n\n\n\n Command::new(\"mkdir\")\n\n .arg(\"-p\")\n\n .arg(\"/sys/fs/cgroup/cg1\")\n", "file_path": "src/clone_child_cgroup.rs", "rank": 53, "score": 33678.62975410987 }, { "content": " .output()\n\n .expect(\"failed to execute process\");\n\n\n\n println!(\"[clone child ] after mkdir\");\n\n\n\n\n\n let output = Command::new(\"ls\")\n\n .arg(\"/sys/fs/cgroup/cg1\")\n\n .output()\n\n .expect(\"failed to execute process\");\n\n\n\n println!(\"[clone child ] aft ls cg1 process\");\n\n\n\n if let Ok(s) = String::from_utf8(output.stdout) {\n\n println!(\"{}\", s);\n\n }\n\n\n\n let output = Command::new(\"ls\")\n\n .arg(\"-la\")\n\n .arg(\"/sys/fs/cgroup/foo/cgroup.procs\")\n", "file_path": "src/clone_child_cgroup.rs", "rank": 54, "score": 33678.14536414359 }, { "content": "use std::time::Duration;\n\nuse std::thread::sleep;\n\nuse std::process::Command;\n\nuse std::fs;\n\nuse nix::sys::signal::Signal;\n\nuse nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus};\n\nuse nix::unistd::{getpid, getppid};\n\nuse nix::sched::{self, CloneFlags};\n\n\n\n/// allocate an array and new a \"ip\" process\n", "file_path": "src/clone_waitpid_cgroup_memory.rs", "rank": 55, "score": 32612.64710042822 }, { "content": "\n\n Ok(status) => {\n\n println!(\"[main] Child exited with status {:?}.\", status);\n\n break;\n\n }\n\n\n\n Err(err) => panic!(\"[main] waitpid() failed: {}\", err),\n\n }\n\n }\n\n\n\n println!(\"[main] Bye Bye!\");\n\n}\n", "file_path": "src/clone_waitpid_cgroup_memory.rs", "rank": 56, "score": 32607.63147447288 }, { "content": " // sleep(Duration::from_secs(10));\n\n\n\n fs::write(\"/sys/fs/cgroup/foo/cgroup.procs\", pid_string).expect(\"Unable to write file\");\n\n\n\n let data = fs::read_to_string(\"/sys/fs/cgroup/foo/cgroup.procs\").expect(\"Unable to read file\");\n\n \n\n println!(\"[main] read cgroup.procs get {}\", data);\n\n\n\n // println!(\"Wait 10 seconds for the child process to run, after changing cgroup\");\n\n // sleep(Duration::from_secs(10));\n\n\n\n \n\n println!(\"[main] I'll be doing my own stuff while waiting for the child {} termination...\", child_pid);\n\n loop {\n\n match waitpid(child_pid, Some(WaitPidFlag::WNOHANG)) {\n\n Ok(WaitStatus::StillAlive) => {\n\n println!(\"[main] Child is still alive, do my own stuff while waiting.\");\n\n // ... replace sleep with the work to be done in main\n\n sleep(Duration::from_millis(500));\n\n }\n", "file_path": "src/clone_waitpid_cgroup_memory.rs", "rank": 57, "score": 32607.22864262408 }, { "content": "use std::process::exit;\n\nuse std::thread::sleep;\n\nuse std::time::Duration;\n\n\n\nuse nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus};\n\nuse nix::unistd::ForkResult::{Child, Parent};\n\nuse nix::unistd::{fork, getpid, getppid};\n\n\n", "file_path": "src/fork_waitpid.rs", "rank": 58, "score": 23963.085241192624 }, { "content": " getpid(),\n\n child\n\n );\n\n child // return child Pid\n\n }\n\n\n\n Err(err) => {\n\n panic!(\"[main] fork() failed: {}\", err);\n\n }\n\n };\n\n println!(\"[main] I'll be doing my own stuff while waiting for the child termination...\");\n\n loop {\n\n match waitpid(child_pid, Some(WaitPidFlag::WNOHANG)) {\n\n Ok(WaitStatus::StillAlive) => {\n\n println!(\"[main] Child is still alive, do my own stuff while waiting.\");\n\n // ... replace sleep with the payload\n\n sleep(Duration::from_millis(500));\n\n }\n\n\n\n Ok(status) => {\n", "file_path": "src/fork_waitpid.rs", "rank": 59, "score": 23962.786657999248 }, { "content": " println!(\"[main] Child exited with status {:?}.\", status);\n\n break;\n\n }\n\n\n\n Err(err) => panic!(\"[main] waitpid() failed: {}\", err),\n\n }\n\n }\n\n\n\n println!(\"[main] Bye Bye!\");\n\n}\n", "file_path": "src/fork_waitpid.rs", "rank": 60, "score": 23956.035306197522 }, { "content": "# cgroup: cloning and limiting the momory of the cloned child process, and child process require more than limit, expecting error reported: Child exited with status Signaled(Pid(31887), SIGKILL, false).\n\n$ sudo ./target/debug/clone_waitpid_cgroup_memory_fail\n\n\n\n# clone process and sync between child and parent process with signals. Child process wait for cgroup setting ready before proceeding to allocate memory, and signal its termination to parent process after done\n\n$ sudo ./target/debug/clone_wait_signal_cgroup_memory\n\n\n\n# clone process and sync between child and parent process with signals. Child process wait for cgroup setting ready before proceeding to allocate memory more than limit, and signal its termination by system.\n\n$ sudo ./target/debug/clone_wait_signal_cgroup_memory_fail\n\n\n\n```\n\n\n", "file_path": "README.md", "rank": 61, "score": 7458.670072970833 }, { "content": "# RUST_clone_cgroup - playing with Clone, Fork, Linux processes termination and Cgroup in Rust\n\n\n\nThe project covers the following scenarios:\n\n\n\n- forking and awaiting the forking-generated child process termination;\n\n- cloning with new namespaces and awaiting the cloning-generated child process termination;\n\n- Through cloned child process delay to wait for cgroup changing, put the cloned child process into a cgroup (cgroup v2) with memory max set to 4096 byte, and child process require less than 4K bytes, expect success.\n\n- Through cloned child process delay to wait for cgroup changing, put the cloned child process into a cgroup (cgroup v2) with memory max set to 4096 byte, and child process require more than 4K bytes, expect failure.\n\n- Through cloned child process synchronize with parent process to wait for cgroup changing, which is to put the cloned child process into a cgroup (cgroup v2) with memory max set to 4096 byte by parent process, and child process require less than 4K bytes, expect success. (clone_wait_signal_cgroup_memory)\n\n- Through cloned child process synchronize with parent process to wait for cgroup changing, which is to put the cloned child process into a cgroup (cgroup v2) with memory max set to 4096 byte by parent process, and child process require MORE than 4K bytes, expect child process termination by system. (clone_wait_signal_cgroup_memory_fail)\n\n\n\n## Configuration Before running cgroup scenarios on cgroup v2\n\n```bash\n\n$ sudo su\n\n# Command to set max memory in V2 cgroup as root\n\n# mkdir -p /sys/fs/cgroup/foo\n\n# nano /sys/fs/cgroup/foo/memory.max \n\nReplace \"max\" with \"4096\"\n\n```\n\n\n\n## Usage\n\n```bash\n\n$ cargo build\n\n\n\n# cloning with new namespaces and awaiting the cloning-generated child process termination\n\n$ cargo run --bin clone_waitpid\n\n\n\n# forking and awaiting the forking-generated child process termination\n\n$ cargo run --bin fork_waitpid\n\n\n\n# cgroup: cloning and limiting the momory of the cloned child process\n\n$ sudo ./target/debug/clone_waitpid_cgroup_memory \n\n\n", "file_path": "README.md", "rank": 62, "score": 7454.416237936878 } ]
Rust
gaoya/src/minhash/string_index.rs
serega/gaoya
b0fd049bcb5aa4c8f865462c209b48cf4d78349b
use crate::minhash::min_hasher64::MinHasher64V1; use crate::minhash::{MinHasher, MinHashIndex}; use crate::text::whitespace_split; use rayon::prelude::*; use std::collections::HashMap; use std::fmt; use std::fmt::{Display, Formatter, write}; use std::fs::File; use std::io::{BufRead, BufReader, Error}; use std::io::{Read, Write}; use fnv::FnvBuildHasher; pub struct MinHashStringIndex { lsh_index: MinHashIndex<u64, u64>, min_hash: MinHasher64V1<FnvBuildHasher>, doc_map: HashMap<u64, String>, doc_id: u64, } impl Display for MinHashStringIndex { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "MinHashStringIndex {{ "); self.lsh_index.fmt(f); write!(f, " }} ") } } impl MinHashStringIndex { pub fn new(num_bands: usize, band_width: usize, jaccard_threshold: f64) -> Self { MinHashStringIndex { lsh_index: MinHashIndex::new(num_bands, band_width, jaccard_threshold), min_hash: MinHasher64V1::new(num_bands * band_width), doc_map: HashMap::new(), doc_id: 0, } } pub fn insert(&mut self, text: String) { let min_hashes = self.min_hash.create_signature(whitespace_split(text.as_str())); self.doc_id += 1; self.doc_map.insert(self.doc_id, text); self.lsh_index.insert(self.doc_id, min_hashes); } pub fn query(&self, text: &str) -> Vec<&String> { let min_hashes = self.min_hash.create_signature(whitespace_split(text)); let ids = self.lsh_index.query(&min_hashes); ids.iter().map(|id| self.doc_map.get(id).unwrap()).collect() } pub fn load_from_lines<R: Read>(&mut self, reader: &mut BufReader<R>) { for line_result in reader.lines() { match line_result { Ok(line) => self.insert(line), Err(e) => (), } } } pub fn load_from_file(&mut self, file_name: &str) -> Result<usize, Error> { match File::open(file_name) { Ok(file) => { let current_size = self.size(); let mut reader: BufReader<File> = BufReader::new(file); self.load_from_lines(&mut reader); let new_count = self.size() - current_size; Ok(new_count) } Err(e) => Err(e), } } pub fn load_from_file_parallel(&mut self, file_name: &str) -> Result<usize, Error> { match File::open(file_name) { Ok(file) => { let current_size = self.size(); let mut reader: BufReader<File> = BufReader::new(file); let lines: Vec<(u64, String)> = reader .lines() .enumerate() .map(|v| (v.0 as u64 + self.doc_id, v.1.unwrap())) .collect(); let minhashes = lines .par_iter() .map(|line| { ( line.0, self.min_hash.create_signature(whitespace_split(&line.1)), ) }) .collect(); self.lsh_index.par_bulk_insert_pairs(minhashes); self.doc_id += lines.len() as u64; for line in lines { self.doc_map.insert(line.0, line.1); } let new_count = self.size() - current_size; Ok(new_count) } Err(e) => Err(e), } } pub fn size(&self) -> usize { return self.doc_id as usize; } } #[cfg(test)] mod tests { use super::MinHashStringIndex; use std::io::{BufReader, Read, Write}; #[test] fn test_load_from_file() { let strings: Vec<String> = [ "locality sensitive hashing is a cool algorithm", "locality sensitive hashing is a great algorithm", "locality sensitive hashing is a awesome algorithm", "we all scream for ice cream", "we all scream for ice cream", "we all scream for ice cream sandwich", ] .iter() .map(|s| s.to_string()) .collect(); let mut file = Vec::new(); for bytes in strings.iter().map(|s| s.as_bytes()) { file.write_all(&bytes).unwrap(); file.write_all("\n".as_bytes()).unwrap(); } let mut lsh_index = MinHashStringIndex::new(42, 4, 0.5); lsh_index.load_from_lines(&mut BufReader::new(file.as_slice())); assert_eq!(6, lsh_index.size()); println!("{}", lsh_index); let result = lsh_index.query(&strings[0]); assert_eq!(result.len(), 3); assert!(result.contains(&(&strings[0]))); assert!(result.contains(&(&strings[1]))); assert!(result.contains(&(&strings[2]))); } }
use crate::minhash::min_hasher64::MinHasher64V1; use crate::minhash::{MinHasher, MinHashIndex}; use crate::text::whitespace_split; use rayon::prelude::*; use std::collections::HashMap; use std::fmt; use std::fmt::{Display, Formatter, write}; use std::fs::File; use std::io::{BufRead, BufReader, Error}; use std::io::{Read, Write}; use fnv::FnvBuildHasher; pub struct MinHashStringIndex { lsh_index: MinHashIndex<u64, u64>, min_hash: MinHasher64V1<FnvBuildHasher>, doc_map: HashMap<u64, String>, doc_id: u64, } impl Display for MinHashStringIndex { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "MinHashStringIndex {{ "); self.lsh_index.fmt(f); write!(f, " }} ") } } impl MinHashStringIndex { pub fn new(num_bands: usize, band_width: usize, jaccard_threshold: f64) -> Self { MinHashStringIndex { lsh_index: MinHashIndex::new(num_bands, band_width, jaccard_threshold), min_hash: MinHasher64V1::new(num_bands * band_width), doc_map: HashMap::new(), doc_id: 0, } } pub fn insert(&mut self, text: String) { let min_hashes = self.min_hash.create_signature(whitespace_split(text.as_str())); self.doc_id += 1; self.doc_map.insert(self.doc_id, text); self.lsh_index.insert(self.doc_id, min_hashes); } pub fn query(&self, text: &str) -> Vec<&String> { let min_hashes = self.min_hash.create_signature(whitespace_split(text)); let ids = self.lsh_index.query(&min_hashes); ids.iter().map(|id| self.doc_map.get(id).unwrap()).collect() } pub fn load_from_lines<R: Read>(&mut self, reader: &mut BufReader<R>) { for line_result in reader.lines() { match line_result { Ok(line) => self.insert(line), Err(e) => (), } } } pub fn load_from_file(&mut self, file_name: &str) -> Result<usize, Error> { match File::open(file_name) { Ok(file) => { let current_size = self.size(); let mut reader: BufReader<File> = BufReader::new(file); self.load_from_lines(&mut reader); let new_count = self.size() - current_size; Ok(new_count) } Err(e) => Err(e), } } pub fn load_from_file_parallel(&mut self, file_name: &str) -> Result<usize, Error> { match File::open(file_name) { Ok(file) => { let current_size = self.size(); let mut reader: BufReader<File> = BufReader::new(file); let lines: Vec<(u64, String)> = reader .lines() .enumerate() .map(|v| (v.0 as u64 + self.doc_id, v.1.unwrap())) .collect(); let minhashes = lines .par_iter() .map(|line| { ( line.0, self.min_hash.create_signature(whitespace_split(&line.1)), ) }) .collect(); self.lsh_index.par_bulk_insert_pairs(minhashes); self.doc_id += lines.len() as u64; for line in lines { self.doc_map.insert(line.0, line.1); } let new_count = self.size() - current_size; Ok(new_count) } Err(e) => Err(e), } } pub fn size(&self) -> usize { return self.doc_id as usize; } } #[cfg(test)] mod tests { use super::MinHashStringIndex; use std::io::{BufReader, Read, Write}; #[test] fn test_load_from_file() {
let mut file = Vec::new(); for bytes in strings.iter().map(|s| s.as_bytes()) { file.write_all(&bytes).unwrap(); file.write_all("\n".as_bytes()).unwrap(); } let mut lsh_index = MinHashStringIndex::new(42, 4, 0.5); lsh_index.load_from_lines(&mut BufReader::new(file.as_slice())); assert_eq!(6, lsh_index.size()); println!("{}", lsh_index); let result = lsh_index.query(&strings[0]); assert_eq!(result.len(), 3); assert!(result.contains(&(&strings[0]))); assert!(result.contains(&(&strings[1]))); assert!(result.contains(&(&strings[2]))); } }
let strings: Vec<String> = [ "locality sensitive hashing is a cool algorithm", "locality sensitive hashing is a great algorithm", "locality sensitive hashing is a awesome algorithm", "we all scream for ice cream", "we all scream for ice cream", "we all scream for ice cream sandwich", ] .iter() .map(|s| s.to_string()) .collect();
assignment_statement
[ { "content": "pub fn shingle_text_range<'a>(text: &'a str, from: usize, to: usize) -> impl Iterator<Item = &'a str> {\n\n MultiShingles::new(text, from, to)\n\n}\n\n\n", "file_path": "gaoya/src/text/tokenizers.rs", "rank": 0, "score": 228558.83057087305 }, { "content": "pub fn shingle_text<'a>(text: &'a str, size: usize) -> impl Iterator<Item = &'a str> {\n\n Shingles::new(text, size)\n\n}\n\n\n\n\n", "file_path": "gaoya/src/text/tokenizers.rs", "rank": 1, "score": 227368.3451176425 }, { "content": "pub fn shingle_tokens<'a>(tokens: &'a Vec<&'a str>, size: usize) -> impl Iterator<Item = String> {\n\n Shingles::new(tokens.as_slice(), size)\n\n .into_iter().map(|tokens| tokens.join(\"\"))\n\n .collect_vec()\n\n .into_iter()\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "gaoya/src/text/tokenizers.rs", "rank": 2, "score": 214450.37363248153 }, { "content": "pub fn whitespace_split<'a>(text: &'a str) -> impl Iterator<Item = &'a str> {\n\n text\n\n .split(|c: char| c.is_ascii_punctuation() || c.is_ascii_whitespace())\n\n .filter(|&x| !x.is_empty())\n\n}\n\n\n", "file_path": "gaoya/src/text/tokenizers.rs", "rank": 3, "score": 210498.1981566255 }, { "content": "/// Calculates number of bands `b` and band width `r` (number of rows) given\n\n/// the minimum `jaccard similarity`, number of hashes `num_hashes`, and desired\n\n/// probability `desired_proba` of two sets with similarity > `jaccard_similarity` to\n\n/// share a bucket\n\n/// For more info see 3.4.2 in http://infolab.stanford.edu/~ullman/mmds/ch3n.pdf\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use gaoya::minhash::calculate_minhash_params;\n\n/// let (b, r) = calculate_minhash_params(0.5, 128);\n\n/// assert_eq!(b, 42);\n\n/// assert_eq!(r, 3);\n\n///\n\n/// let (b, r) = calculate_minhash_params(0.7, 196);\n\n/// assert_eq!(b, 39);\n\n/// assert_eq!(r, 5);\n\n/// ```\n\npub fn calculate_minhash_params(jaccard_similarity: f64, num_hashes: usize)\n\n -> (usize, usize) {\n\n calculate_b_and_r(jaccard_similarity, num_hashes, 0.99)\n\n}\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 4, "score": 198403.64790389698 }, { "content": "pub fn calculate_minhash_params_with_proba(jaccard_similarity: f64, num_hashes: usize, desired_proba: f64)\n\n -> (usize, usize) {\n\n calculate_b_and_r(jaccard_similarity, num_hashes, desired_proba)\n\n}\n\n\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 5, "score": 195457.84985551107 }, { "content": "fn calculate_b_and_r(s: f64, n: usize, p: f64) -> (usize, usize) {\n\n let proba = |b, r| {\n\n 1.0 - (1.0 - s.powf(r)).powf(b)\n\n };\n\n let mut b = n;\n\n let mut r = 1;\n\n while b > 1 {\n\n let r1 = r + 1;\n\n let b1 = n / r1;\n\n if proba(b1 as f64, r1 as f64) > p {\n\n b = b1;\n\n r = r1;\n\n } else {\n\n break;\n\n }\n\n }\n\n (b, r)\n\n}\n\n\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 6, "score": 189257.46294912774 }, { "content": "pub fn shingle_text_boxed<'a>(text: &'a str, size: usize) -> Box<dyn Iterator<Item = &'a str> + 'a> {\n\n Box::new(Shingles::new(text, size))\n\n}\n\n\n\n\n", "file_path": "gaoya/src/text/tokenizers.rs", "rank": 7, "score": 182596.11244406863 }, { "content": "pub fn whitespace_split_boxed<'a>(text: &'a str) -> Box<dyn Iterator<Item = &'a str> + 'a> {\n\n Box::new(text\n\n .split(|c: char| c.is_ascii_punctuation() || c.is_ascii_whitespace())\n\n .filter(|&x| !x.is_empty()))\n\n}\n\n\n", "file_path": "gaoya/src/text/tokenizers.rs", "rank": 8, "score": 163183.81088714892 }, { "content": "/// Calculates jaccard similarity between two minhashes\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use gaoya::minhash::compute_minhash_similarity;\n\n///\n\n/// let m1 = [1, 2, 3, 4, 5, 6];\n\n/// let m2 = [1, 2, 3, 7, 5, 8];\n\n/// assert!((compute_minhash_similarity(&m1, &m2) - 0.666) < 0.01);\n\n///\n\n/// ```\n\npub fn compute_minhash_similarity<T>(min_hashes_1: &[T], min_hashes_2: &[T]) -> f64\n\n where\n\n T: Eq,\n\n{\n\n assert_eq!(min_hashes_1.len(), min_hashes_2.len());\n\n let num_hashes = min_hashes_1.len();\n\n let matches: u64 = min_hashes_1\n\n .iter()\n\n .zip(min_hashes_2.iter())\n\n .map(|(min_hash_1, min_hash_2)| (min_hash_1 == min_hash_2) as u64)\n\n .sum();\n\n (matches as f64) / (num_hashes as f64)\n\n}\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 9, "score": 162387.21625369755 }, { "content": "pub fn compute_minhash_distance<T>(min_hashes_1: &[T], min_hashes_2: &[T]) -> f64\n\n where\n\n T: Eq,\n\n{\n\n 1.0 - compute_minhash_similarity(min_hashes_1, min_hashes_2)\n\n}\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 10, "score": 162376.1146596774 }, { "content": "pub fn similarity_greater_than_threshold<T>(\n\n min_hashes_1: &[T],\n\n min_hashes_2: &[T],\n\n threshold: f64,\n\n) -> bool\n\n where\n\n T: Eq,\n\n{\n\n assert_eq!(min_hashes_1.len(), min_hashes_2.len());\n\n let num_hashes = min_hashes_1.len();\n\n let expected_matches = (num_hashes as f64 * threshold) as u32;\n\n let mut num_matches: u32 = 0;\n\n for pair in min_hashes_1.iter().zip(min_hashes_2.iter()) {\n\n if pair.0 == pair.1 {\n\n num_matches += 1;\n\n }\n\n if num_matches >= expected_matches {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 11, "score": 146683.68141323078 }, { "content": "struct MinHashBand<T, Id>\n\nwhere\n\n T: MinHashType,\n\n Id: Hash + Eq + Clone,\n\n{\n\n hash_table: FxHashMap<BandKey<T>, FxHashSet<Id>>,\n\n band_start: isize,\n\n band_end: isize,\n\n len: usize,\n\n}\n\n\n\nimpl<T, Id> MinHashBand<T, Id>\n\nwhere\n\n T: MinHashType,\n\n Id: Hash + Eq + Clone,\n\n{\n\n pub fn new(band_start: isize, band_end: isize) -> Self {\n\n let mut hash_table = FxHashMap::default();\n\n hash_table.reserve(1000);\n\n MinHashBand {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 12, "score": 129361.8676801541 }, { "content": "pub fn compute_jaccard_distance<T, U>(iter_1: T, iter_2: T) -> f32\n\n where\n\n T: Iterator<Item=U>,\n\n U: Hash + Eq,\n\n{\n\n 1.0 - compute_jaccard_similarity(iter_1, iter_2)\n\n}\n\n\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 13, "score": 117062.40618043588 }, { "content": "pub fn compute_jaccard_similarity<T, U>(iter_1: T, iter_2: T) -> f32\n\n where\n\n T: Iterator<Item=U>,\n\n U: Hash + Eq,\n\n{\n\n let h1 = HashSet::<U>::from_iter(iter_1);\n\n let h2 = HashSet::<U>::from_iter(iter_2);\n\n let intersection_len = h1.intersection(&h2).count();\n\n intersection_len as f32 / (h1.len() + h2.len() - intersection_len) as f32\n\n}\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 14, "score": 117062.40618043588 }, { "content": "fn fscore(precision: f64, recall: f64) -> f64 {\n\n 2.0 / (1.0 / precision + 1.0 / recall)\n\n}\n\n\n", "file_path": "gaoya-bench/src/metrics.rs", "rank": 15, "score": 110591.9445133039 }, { "content": "fn mean(list: &[f64]) -> f64 {\n\n let sum: f64 = Iterator::sum(list.iter());\n\n f64::from(sum) / (list.len() as f64)\n\n}\n\n\n\nimpl Metrics {\n\n\n\n pub fn new() -> Self {\n\n Metrics { precisions: Vec::new(), recalls: Vec::new(), fscores: Vec::new() }\n\n }\n\n\n\n pub fn update_metrics<T: Eq + Hash, S: BuildHasher>(&mut self, found: &HashSet<T, S>, reference: &HashSet<T>) {\n\n if found.len() == 0 && reference.len() == 0 {\n\n return;\n\n }\n\n let intersection: f64 = reference.iter().map(|i| (found.contains(i) as i32) as f64).sum();\n\n //let intersect = intersection as f64;\n\n let precision = if found.len() == 0 { 0.0 } else { intersection / found.len() as f64 };\n\n let recall = if reference.len() == 0 { 1.0 } else { intersection / reference.len() as f64 };\n\n\n", "file_path": "gaoya-bench/src/metrics.rs", "rank": 16, "score": 110153.68539740489 }, { "content": "fn minhash_band_centroid_from_refs<T>(signatures: &Vec<&Vec<T>>, num_bands: usize, band_size: usize) -> Vec<T>\n\n where\n\n T: Hash + Copy + Eq,\n\n{\n\n let mut band_counters: Vec<HashMap<&[T], usize>> = Vec::new();\n\n for i in 0..num_bands {\n\n band_counters.push(HashMap::new());\n\n }\n\n\n\n for signature in signatures.iter() {\n\n for i in 0..num_bands {\n\n let band: &[T] = &signature[i * band_size..(i + 1) * band_size];\n\n let count = band_counters[i].entry(band).or_insert(1);\n\n *count += 1;\n\n }\n\n }\n\n\n\n let mut centroid = Vec::new();\n\n for counter in band_counters {\n\n let most_frequent_band = counter.into_iter()\n\n .max_by(|x, y| x.1.cmp(&y.1))\n\n .unwrap().0;\n\n centroid.extend_from_slice(most_frequent_band);\n\n }\n\n centroid\n\n\n\n}\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 17, "score": 108184.47563264327 }, { "content": "pub trait MinHasher {\n\n /// The data type of individual hash.\n\n /// This should be one of u-numeric types such as u64, u32, u16, u8\n\n type V: MinHashType;\n\n\n\n fn create_signature<T, U>(&self, iter: T) -> Vec<Self::V>\n\n where\n\n T: Iterator<Item=U>,\n\n U: Hash;\n\n\n\n fn bulk_create_signature<U>(&self, batch: &Vec<Vec<U>>) -> Vec<Vec<Self::V>>\n\n where\n\n U: Hash + Sync,\n\n Self: Sync + Send {\n\n batch\n\n .par_iter()\n\n .map(|tokens| self.create_signature(tokens.iter()))\n\n .collect()\n\n }\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 18, "score": 107018.73797904777 }, { "content": "pub fn init_minhash_module(m: &PyModule) -> PyResult<()> {\n\n m.add_class::<MinHash64StringIntIndex>()?;\n\n m.add_class::<MinHash32StringIntIndex>()?;\n\n m.add_class::<MinHash16StringIntIndex>()?;\n\n m.add_class::<MinHash8StringIntIndex>()?;\n\n Ok(())\n\n}\n", "file_path": "py-gaoya/src/min_hash.rs", "rank": 19, "score": 104467.0009561264 }, { "content": "struct SimHashTable<S, Id>\n\nwhere\n\n Id: Hash + Eq + Clone,\n\n S: SimHashBits,\n\n{\n\n permutation: Permutation<S>,\n\n table: FxHashMap<S, Vec<Id>>,\n\n}\n\n\n\nimpl<S, Id> SimHashTable<S, Id>\n\nwhere\n\n Id: Hash + Eq + Clone,\n\n S: SimHashBits,\n\n{\n\n fn new(permutation: Permutation<S>) -> Self {\n\n SimHashTable {\n\n permutation: permutation,\n\n table: FxHashMap::default(),\n\n }\n\n }\n", "file_path": "gaoya/src/simhash/sim_hash_index.rs", "rank": 20, "score": 95419.90411029966 }, { "content": "fn minhash_centroid<T>(signatures: &Vec<Vec<T>>) -> Vec<T>\n\nwhere\n\n T: Hash + Copy + Eq,\n\n{\n\n let signature_len = signatures[0].len();\n\n let mut centroid = Vec::with_capacity(signature_len);\n\n let mut hash_counters: Vec<HashMap<T, usize>> = vec![HashMap::new(); signature_len];\n\n for signature in signatures.iter() {\n\n for (i, hash) in signature.iter().enumerate() {\n\n let count = hash_counters[i].entry(*hash).or_insert(1);\n\n *count += 1;\n\n }\n\n\n\n }\n\n for counter in hash_counters {\n\n let most_frequent_hash = counter.into_iter()\n\n .max_by(|x, y| x.1.cmp(&y.1))\n\n .unwrap().0;\n\n centroid.push(most_frequent_hash);\n\n }\n\n\n\n centroid\n\n\n\n}\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 21, "score": 93495.11984131942 }, { "content": "fn minhash_centroid_from_refs<T>(signatures: &Vec<&Vec<T>>) -> Vec<T>\n\n where\n\n T: Hash + Copy + Eq,\n\n{\n\n let signature_len = signatures[0].len();\n\n let mut centroid = Vec::with_capacity(signature_len);\n\n let mut hash_counters: Vec<HashMap<T, usize>> = vec![HashMap::new(); signature_len];\n\n for signature in signatures.iter() {\n\n for (i, hash) in signature.iter().enumerate() {\n\n let count = hash_counters[i].entry(*hash).or_insert(1);\n\n *count += 1;\n\n }\n\n\n\n }\n\n for counter in hash_counters {\n\n let most_frequent_hash = counter.into_iter()\n\n .max_by(|x, y| x.1.cmp(&y.1))\n\n .unwrap().0;\n\n centroid.push(most_frequent_hash);\n\n }\n\n\n\n centroid\n\n}\n\n\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 22, "score": 91614.87291668818 }, { "content": "def test_return_similarity():\n\n def _jaccard(s1: Set, s2: Set): return len(s1 & s2) / len(s1 | s2)\n\n index = MinHashStringIndex(32, 0.5, 45, 3, None, 'word', False, (1,1))\n\n corpus = [\n\n \"a b c d e f g h k l m n o p q\",\n\n \"a b c d e f g h k l m n o p\",\n\n \"a b c d e f g h k l m n o\",\n\n \"a b c d e f g h k l m n\",\n\n \"1 2 3 4 5 6 9 8 9 10\",\n\n \"1 2 3 4 5 6 9 8 9 10 11 12\",\n\n \"1 2 3 4 5 6 9 8 9 10 11\",\n\n \"1 2 3 4 5 6 9 8 9 10 11 12 13\"\n\n ]\n\n\n\n def check_jaccard(doc1, doc2, value):\n\n true_jaccard = _jaccard(set(doc1.split(\" \")), set(doc2.split(\" \")))\n\n assert abs(true_jaccard - value) < 0.1\n\n\n\n index.par_bulk_insert_docs(list(range(0, len(corpus))), corpus)\n\n assert index.size() == 8\n\n result = index.query(corpus[0], return_similarity=True)\n\n assert len(result) == 4\n\n\n\n assert result[0][1] > result[1][1]\n\n check_jaccard(corpus[0], corpus[result[0][0]], result[0][1])\n\n\n\n assert result[1][1] > result[2][1]\n\n check_jaccard(corpus[0], corpus[result[1][0]], result[1][1])\n\n\n\n assert result[2][1] > result[3][1]\n\n check_jaccard(corpus[0], corpus[result[2][0]], result[2][1])\n\n check_jaccard(corpus[0], corpus[result[3][0]], result[3][1])\n\n\n\n\n\n result = index.query(corpus[7], return_similarity=True)\n\n assert len(result) == 4\n\n\n\n assert result[0][1] > result[1][1]\n\n check_jaccard(corpus[7], corpus[result[0][0]], result[0][1])\n\n\n\n assert result[1][1] > result[2][1]\n\n check_jaccard(corpus[7], corpus[result[1][0]], result[1][1])\n\n\n\n assert result[2][1] > result[3][1]\n\n check_jaccard(corpus[7], corpus[result[2][0]], result[2][1])\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 23, "score": 90883.67956635264 }, { "content": "/// MinHashType can be any integer.\n\npub trait MinHashType: Hash + Send + Sync + PrimInt {}\n\nimpl MinHashType for u64 {}\n\nimpl MinHashType for u32 {}\n\nimpl MinHashType for u16 {}\n\nimpl MinHashType for u8 {}\n\nimpl MinHashType for i64 {}\n\nimpl MinHashType for i32 {}\n\nimpl MinHashType for i16 {}\n\nimpl MinHashType for i8 {}\n\n\n\n\n\n\n\n\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 24, "score": 87304.9449609734 }, { "content": "pub fn init_simhash_module(m: &PyModule) -> PyResult<()> {\n\n m.add_class::<SimHash64StringIntIndex>()?;\n\n m.add_class::<SimHash128StringIntIndex>()?;\n\n Ok(())\n\n}\n", "file_path": "py-gaoya/src/sim_hash.rs", "rank": 25, "score": 82249.06789054454 }, { "content": "pub trait QueryIndex {\n\n type Id: Sized;\n\n\n\n fn query(&self, id: &Self::Id) -> HashSet<&Self::Id, FxBuildHasher>;\n\n}\n", "file_path": "gaoya/src/clustering/mod.rs", "rank": 26, "score": 80461.67874514252 }, { "content": "struct BandKey<T: MinHashType> {\n\n v: *const T,\n\n len: usize,\n\n}\n\n\n\nunsafe impl<T: MinHashType> Send for BandKey<T> {}\n\nunsafe impl<T: MinHashType> Sync for BandKey<T> {}\n\n\n\nimpl<T: MinHashType> Eq for BandKey<T> {}\n\n\n\nimpl<T: MinHashType> PartialEq for BandKey<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n unsafe {\n\n for i in 0..self.len {\n\n if *self.v.add(i) != *other.v.add(i) {\n\n return false;\n\n }\n\n }\n\n return true;\n\n };\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 27, "score": 79383.74794690943 }, { "content": "pub trait SimHashBits:\n\n Sized\n\n + Clone\n\n + Copy\n\n + Zero\n\n + One\n\n + Debug\n\n + PartialOrd\n\n + PartialEq\n\n + Not<Output = Self>\n\n + BitAnd<Output = Self>\n\n + BitOr<Output = Self>\n\n + BitXor<Output = Self>\n\n + BitOrAssign\n\n + Shl<usize, Output = Self>\n\n + Shr<usize, Output = Self>\n\n + ShrAssign<usize>\n\n + Hash\n\n + Eq\n\n{\n", "file_path": "gaoya/src/simhash/mod.rs", "rank": 28, "score": 78584.4618748863 }, { "content": "class MinHashStringIndex:\n\n \"\"\"\n\n MinHashStringIndex for indexing and searching text documents based jaccard similarity.\n\n\n\n\n\n Reference: `Chapter 3, Mining of Massive Datasets <http://www.mmds.org/>`\n\n If `hash_size` and `num_bands` is specified `num_hashes` is not used, otherwise\n\n `hash_size` and `num_bands` will be calculated according to S-curve.\n\n\n\n Parameters\n\n ----------\n\n hash_size: int, default=32\n\n The size of individual hashes in bits in minhash signature. Supported sizes are (16, 32, 64).\n\n Bigger hashes offer better accuracy, smaller hashes use less memory.\n\n\n\n num_bands: int, default=25\n\n The number of bands\n\n\n\n band_size: int, default=5\n\n The number of hashes in individual band .\n\n The signature length is `num_bands` * `band_size`\n\n The signature length in bytes is\n\n (`num_bands` * `band_size` * `hash_size`) / 8\n\n\n\n num_hashes: int, default=None.\n\n The number of hashes in the signature. The argument is not used when\n\n `num_bands` and `band_size` are provided.\n\n\n\n\n\n jaccard_threshold: float, default=0.75.\n\n The jaccard similarity threshold. The query method will return documents that\n\n have jaccard similarity threshold greater than ``jaccard_threshold``.\n\n\n\n analyzer : {'word', 'char' } or callable, default='word'\n\n To create MinHash signature document must be tokenized into smaller units (features).\n\n Whether the feature should be made of word or character n-grams.\n\n If a callable is passed it is used to extract the sequence of features\n\n out of the raw, unprocessed input. Note, that built-in analyzers are implemented in Rust,\n\n and generally faster that similar implementation in Python.\n\n\n\n lowercase : bool, default=False\n\n Convert all characters to lowercase before tokenizing.\n\n\n\n ngram_range : tuple (min_n, max_n), default=(1, 1)\n\n The lower and upper boundary of the range of n-values for different\n\n n-grams to be extracted. All values of n such that min_n <= n <= max_n\n\n will be used. For example an `ngram_range` of `(1, 1)` means only\n\n unigrams, `(1, 2)` means unigrams and bigrams, and `(2, 2)` means\n\n only bigrams.\n\n Only applies if `analyzer` is not callable.\n\n\n\n Examples\n\n --------\n\n >>> index = gaoya.minhash.MinHashStringIndex(32, 0.5, 42, 3, None, 'word', True, (1,1))\n\n >>> corpus = [\n\n ... 'This is the first document.',\n\n ... 'This document is the second document.',\n\n ... 'And this is the third document.',\n\n ... 'Is this the first document?',\n\n ... 'This not the first nor the second nor the third, but the fourth document'\n\n ... ]\n\n >>> for i, doc in enumerate(corpus): index.insert_document(i, doc)\n\n >>> for i, doc in enumerate(corpus):\n\n ... if i < 4:\n\n ... assert set(index.query(doc)) == {0, 1, 2, 3}, str(index.query(doc))\n\n ... else:\n\n ... assert set(index.query(doc)) == {4}, str(index.query(doc))\n\n >>>\n\n \"\"\"\n\n\n\n\n\n\n\n def __init__(self, hash_size=32,\n\n jaccard_threshold=0.75,\n\n num_bands=20,\n\n band_size=5,\n\n num_hashes=None,\n\n analyzer='word',\n\n lowercase=False,\n\n ngram_range=None):\n\n if hash_size not in [8, 16, 32, 64]:\n\n raise ValueError(f\"Invalid hash_size {hash_size}. hash_size must be on of 8, 16, 32 or 64\")\n\n if jaccard_threshold < 0.0 or jaccard_threshold > 1.0:\n\n raise ValueError(f\"Jaccard threshold must be between 0 and 1\")\n\n self.analyzer = analyzer\n\n # if analyzer is callable we need to pass something to index's constructor.\n\n analyzer = 'word' if callable(self.analyzer) else analyzer\n\n if hash_size == 64:\n\n self.index = m.MinHash64StringIntIndex(jaccard_threshold, num_bands, band_size, num_hashes, analyzer, lowercase, ngram_range)\n\n elif hash_size == 32:\n\n self.index = m.MinHash32StringIntIndex(jaccard_threshold, num_bands, band_size, num_hashes, analyzer, lowercase, ngram_range)\n\n elif hash_size == 16:\n\n self.index = m.MinHash16StringIntIndex(jaccard_threshold, num_bands, band_size, num_hashes, analyzer, lowercase, ngram_range)\n\n elif hash_size == 8:\n\n self.index = m.MinHash8StringIntIndex(jaccard_threshold, num_bands, band_size, num_hashes, analyzer, lowercase, ngram_range)\n\n else:\n\n raise ValueError(f\"Invalid hash size {hash_size}\")\n\n\n\n\n\n def insert_document(self, id, doc):\n\n \"\"\"\n\n Inserts a document `doc` with id `id` into the index.\n\n\n\n Parameters\n\n ----------\n\n\n\n id: int\n\n Id of the document\n\n doc: str\n\n Document text\n\n \"\"\"\n\n if callable(self.analyzer):\n\n self.index.insert_tokens(id, self.analyzer(doc))\n\n else:\n\n self.index.insert_document(id, doc)\n\n\n\n def query(self, doc: str, return_similarity=False) -> Union[List[int], List[Tuple[int, float]]]:\n\n \"\"\"\n\n Searches the index for documents similar to `doc`.\n\n Returns list of similar document ids.\n\n If return_similarity is `True` method returns a list of tuples where the first element\n\n is document id and the second is jaccard similarity. The result is sorted by similarity from\n\n highest to lowest.\n\n\n\n Parameters\n\n ----------\n\n doc: str\n\n return_similarity: Bool, default=False\n\n Whether to return jaccard similarity values\n\n\n\n Returns:\n\n ----------\n\n List of ids or list of tuples\n\n \"\"\"\n\n if callable(self.analyzer):\n\n if return_similarity:\n\n return self.index.query_tokens_return_similarity(self.analyzer(doc))\n\n else:\n\n return self.index.query_tokens(self.analyzer(doc))\n\n else:\n\n if return_similarity:\n\n return self.index.query_return_similarity(doc)\n\n else:\n\n return self.index.query(doc)\n\n\n\n def par_bulk_query(self, docs: List[str], return_similarity=False):\n\n \"\"\"\n\n Searches the index for documents similar to `docs`.\n\n This method uses multiple native threads to execute `query` operation on a batch of documents\n\n Returns list of lists of similar document ids or list of lists of tuples\n\n Parameters\n\n ----------\n\n doc: list\n\n List of strings\n\n return_similarity: Bool, default=False\n\n Whether to return jaccard similarity values\n\n\n\n Returns:\n\n ----------\n\n List Lists of ids or list of lists of tuples\n\n \"\"\"\n\n\n\n if callable(self.analyzer):\n\n analyzed_docs = [self.analyzer(doc) for doc in docs]\n\n if return_similarity:\n\n return self.index.par_bulk_query_tokens_return_similarity(analyzed_docs)\n\n else:\n\n return self.index.par_bulk_query_tokens(analyzed_docs)\n\n else:\n\n if return_similarity:\n\n return self.index.par_bulk_query_return_similarity(docs)\n\n else:\n\n return self.index.par_bulk_query(docs)\n\n\n\n\n\n\n\n def par_bulk_insert_docs(self, ids: List[int], docs: List[str]):\n\n \"\"\"\n\n Inserts a batch of documents. This method will use multiple cores to insert a batch\n\n of documents into the index. If analyzer is callable tokenization will be single threaded.\n\n\n\n Parameters\n\n ----------\n\n ids: list\n\n List of ids\n\n\n\n docs: list\n\n List of strings\n\n \"\"\"\n\n if callable(self.analyzer):\n\n tokens = [self.analyzer(doc) for doc in docs]\n\n self.index.bulk_insert_tokens(ids, tokens)\n\n else:\n\n self.index.par_bulk_insert_docs(ids, docs)\n\n\n\n def remove(self, id: int):\n\n \"\"\"\n\n Removes id from the index.\n\n\n\n Currently, this method may not remove the minhash associated with `id` from memory if there is another minhash\n\n with the same value at any band.\n\n To fully remove minhash from memory gaoya needs to be compiled on nightly Rust channel with `--features \"unstable\"`\n\n This will work on the stable when this issue is resolved\n\n https://github.com/rust-lang/rust/issues/56167\n\n\n\n\n\n Parameters\n\n ----------\n\n id: int\n\n Id of the document\n\n \"\"\"\n\n self.index.remove(id)\n\n\n\n def size(self):\n\n \"\"\"\n\n Returns the number of documents in the index\n\n \"\"\"\n\n return self.index.size()\n\n\n\n def __str__(self):\n\n return self.index.__str__()\n\n\n\n def __repr__(self):\n", "file_path": "py-gaoya/gaoya/minhash.py", "rank": 29, "score": 78369.8590353478 }, { "content": "def test_minhash_64bit():\n\n index = MinHashStringIndex(64, jaccard_threshold=0.5, num_bands=20, band_size=5, analyzer=\"word\")\n\n index.insert_document(1, \" a b c d e\")\n\n\n\n assert index.query(\"a b c d e\") == [1]\n\n assert index.query(\"a b c d f\") == [1]\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 30, "score": 72347.3622348972 }, { "content": "def test_minhash_16bit():\n\n index = MinHashStringIndex(16, 0.5, 30, 5)\n\n index.insert_document(1, \"a b c d e f\")\n\n index.insert_document(2, \"1 2 3 4 5 6 8\")\n\n\n\n assert index.query(\" a b c d e f\") == [1]\n\n assert index.query(\" a b c d e g\") == [1]\n\n assert index.query(\"a b h g f\") == []\n\n\n\n assert index.query(\"1 2 3 4 5 6 8\") == [2]\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 31, "score": 72347.3622348972 }, { "content": "def test_minhash_32bit():\n\n index = MinHashStringIndex(32, 0.5, 30, 5)\n\n index.insert_document(1, \"a b c d e f\")\n\n index.insert_document(2, \"1 2 3 4 5 6 8\")\n\n\n\n assert index.query(\" a b c d e f\") == [1]\n\n assert index.query(\" a b c d e g\") == [1]\n\n assert index.query(\"a b h g f\") == []\n\n\n\n assert index.query(\"1 2 3 4 5 6 8\") == [2]\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 32, "score": 72347.3622348972 }, { "content": "def test_minhash_8bit():\n\n index = MinHashStringIndex(8, 0.5, 30, 5)\n\n index.insert_document(1, \"a b c d e f\")\n\n index.insert_document(2, \"1 2 3 4 5 6 8\")\n\n\n\n assert index.query(\" a b c d e f\") == [1]\n\n assert index.query(\" a b c d e g\") == [1]\n\n assert index.query(\"a b h g f\") == []\n\n\n\n assert index.query(\"1 2 3 4 5 6 8\") == [2]\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 33, "score": 72347.3622348972 }, { "content": "pub trait AsShingles<'a, T: ?Sized + 'a> {\n\n fn as_shingles(&'a self, from_size: usize, to_size: usize) -> MultiShingles<'a, T>;\n\n}\n\n\n\nimpl<'a, T: 'a> AsShingles<'a, [T]> for [T] {\n\n fn as_shingles(&'a self, from_size: usize, to_size: usize) -> MultiShingles<'a, [T]> {\n\n MultiShingles::new(self, from_size, to_size)\n\n }\n\n}\n\n\n\nimpl<'a> AsShingles<'a, str> for str {\n\n fn as_shingles(&'a self, from_size: usize, to_size: usize) -> MultiShingles<'a, str> {\n\n MultiShingles::new(self, from_size, to_size)\n\n }\n\n}", "file_path": "gaoya/src/text/multi_shingles.rs", "rank": 34, "score": 71388.3478206864 }, { "content": "def test_minhash_16bit_custom_analyzer():\n\n def split_and_uppercase(doc):\n\n return [token.upper() for token in doc.split(\" \")]\n\n\n\n index = MinHashStringIndex(\n\n hash_size=16,\n\n jaccard_threshold=0.5,\n\n num_bands=40, band_size=5, num_hashes=None,\n\n analyzer=split_and_uppercase,\n\n lowercase=False)\n\n index.insert_document(1, \"a b c d e f g\")\n\n index.insert_document(2, \"foo bar baz\")\n\n\n\n assert index.query(\"A B C D E F G\") == [1]\n\n assert index.query(\"a b c d e f g\") == [1]\n\n assert index.query(\"FOO bar baz\") == [2]\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 35, "score": 70001.53787407342 }, { "content": "mod tokenizers;\n\nmod multi_shingles;\n\n\n\npub use self::tokenizers::whitespace_split;\n\npub use self::tokenizers::whitespace_split_boxed;\n\n\n\npub use self::tokenizers::shingle_text;\n\npub use self::tokenizers::shingle_text_range;\n\npub use self::tokenizers::shingle_text_boxed;\n\npub use self::tokenizers::shingle_tokens;\n\npub use self::multi_shingles::MultiShingles;\n\n\n\n\n", "file_path": "gaoya/src/text/mod.rs", "rank": 36, "score": 66182.75987346511 }, { "content": "def test_documents():\n\n index = MinHashStringIndex(32, 0.5, 42, 3, None, 'word', True, (1,1))\n\n corpus = [\n\n 'This is the first document.',\n\n 'This document is the second document.',\n\n 'And this is the third document.',\n\n 'Is this the first document?',\n\n 'This not the first nor the second nor the third, but the fourth document'\n\n\n\n ]\n\n for i, doc in enumerate(corpus):\n\n index.insert_document(i, doc)\n\n\n\n for i, doc in enumerate(corpus):\n\n if i < 4:\n\n assert set(index.query(doc)) == {0, 1, 2, 3}, str(index.query(doc))\n\n else:\n\n assert set(index.query(doc)) == {4}, str(index.query(doc))\n\n\n\n index.remove(0)\n\n index.remove(4)\n\n\n\n for i, doc in enumerate(corpus):\n\n if i < 4:\n\n assert set(index.query(doc)) == {1, 2, 3}, str(index.query(doc))\n\n else:\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 37, "score": 65419.60110511035 }, { "content": "from typing import Set\n\n\n\nfrom gaoya.minhash import MinHashStringIndex\n\n\n\n\n\ndef test_minhash_64bit():\n\n index = MinHashStringIndex(64, jaccard_threshold=0.5, num_bands=20, band_size=5, analyzer=\"word\")\n\n index.insert_document(1, \" a b c d e\")\n\n\n\n assert index.query(\"a b c d e\") == [1]\n\n assert index.query(\"a b c d f\") == [1]\n\n assert index.query(\"a b g h f\") == []\n\n\n\n\n\n\n\ndef test_minhash_32bit():\n\n index = MinHashStringIndex(32, 0.5, 30, 5)\n\n index.insert_document(1, \"a b c d e f\")\n\n index.insert_document(2, \"1 2 3 4 5 6 8\")\n\n\n\n assert index.query(\" a b c d e f\") == [1]\n\n assert index.query(\" a b c d e g\") == [1]\n\n assert index.query(\"a b h g f\") == []\n\n\n\n assert index.query(\"1 2 3 4 5 6 8\") == [2]\n\n assert index.query(\"1 2 3 4 5 6 9 10\") == [2]\n\n\n\ndef test_minhash_16bit():\n\n index = MinHashStringIndex(16, 0.5, 30, 5)\n\n index.insert_document(1, \"a b c d e f\")\n\n index.insert_document(2, \"1 2 3 4 5 6 8\")\n\n\n\n assert index.query(\" a b c d e f\") == [1]\n\n assert index.query(\" a b c d e g\") == [1]\n\n assert index.query(\"a b h g f\") == []\n\n\n\n assert index.query(\"1 2 3 4 5 6 8\") == [2]\n\n assert index.query(\"1 2 3 4 5 6 9 10\") == [2]\n\n\n\n\n\ndef test_minhash_8bit():\n\n index = MinHashStringIndex(8, 0.5, 30, 5)\n\n index.insert_document(1, \"a b c d e f\")\n\n index.insert_document(2, \"1 2 3 4 5 6 8\")\n\n\n\n assert index.query(\" a b c d e f\") == [1]\n\n assert index.query(\" a b c d e g\") == [1]\n\n assert index.query(\"a b h g f\") == []\n\n\n\n assert index.query(\"1 2 3 4 5 6 8\") == [2]\n\n assert index.query(\"1 2 3 4 5 6 9 10\") == [2]\n\n\n\ndef test_minhash_16bit_custom_analyzer():\n\n def split_and_uppercase(doc):\n\n return [token.upper() for token in doc.split(\" \")]\n\n\n\n index = MinHashStringIndex(\n\n hash_size=16,\n\n jaccard_threshold=0.5,\n\n num_bands=40, band_size=5, num_hashes=None,\n\n analyzer=split_and_uppercase,\n\n lowercase=False)\n\n index.insert_document(1, \"a b c d e f g\")\n\n index.insert_document(2, \"foo bar baz\")\n\n\n\n assert index.query(\"A B C D E F G\") == [1]\n\n assert index.query(\"a b c d e f g\") == [1]\n\n assert index.query(\"FOO bar baz\") == [2]\n\n assert index.query(\"FOO bar BAZ\") == [2]\n\n\n\n\n\ndef test_documents():\n\n index = MinHashStringIndex(32, 0.5, 42, 3, None, 'word', True, (1,1))\n\n corpus = [\n\n 'This is the first document.',\n\n 'This document is the second document.',\n\n 'And this is the third document.',\n\n 'Is this the first document?',\n\n 'This not the first nor the second nor the third, but the fourth document'\n\n\n\n ]\n\n for i, doc in enumerate(corpus):\n\n index.insert_document(i, doc)\n\n\n\n for i, doc in enumerate(corpus):\n\n if i < 4:\n\n assert set(index.query(doc)) == {0, 1, 2, 3}, str(index.query(doc))\n\n else:\n\n assert set(index.query(doc)) == {4}, str(index.query(doc))\n\n\n\n index.remove(0)\n\n index.remove(4)\n\n\n\n for i, doc in enumerate(corpus):\n\n if i < 4:\n\n assert set(index.query(doc)) == {1, 2, 3}, str(index.query(doc))\n\n else:\n\n assert set(index.query(doc)) == set(), str(index.query(doc))\n\n\n\n\n\n\n\ndef test_return_similarity():\n\n def _jaccard(s1: Set, s2: Set): return len(s1 & s2) / len(s1 | s2)\n\n index = MinHashStringIndex(32, 0.5, 45, 3, None, 'word', False, (1,1))\n\n corpus = [\n\n \"a b c d e f g h k l m n o p q\",\n\n \"a b c d e f g h k l m n o p\",\n\n \"a b c d e f g h k l m n o\",\n\n \"a b c d e f g h k l m n\",\n\n \"1 2 3 4 5 6 9 8 9 10\",\n\n \"1 2 3 4 5 6 9 8 9 10 11 12\",\n\n \"1 2 3 4 5 6 9 8 9 10 11\",\n\n \"1 2 3 4 5 6 9 8 9 10 11 12 13\"\n\n ]\n\n\n\n def check_jaccard(doc1, doc2, value):\n\n true_jaccard = _jaccard(set(doc1.split(\" \")), set(doc2.split(\" \")))\n\n assert abs(true_jaccard - value) < 0.1\n\n\n\n index.par_bulk_insert_docs(list(range(0, len(corpus))), corpus)\n\n assert index.size() == 8\n\n result = index.query(corpus[0], return_similarity=True)\n\n assert len(result) == 4\n\n\n\n assert result[0][1] > result[1][1]\n\n check_jaccard(corpus[0], corpus[result[0][0]], result[0][1])\n\n\n\n assert result[1][1] > result[2][1]\n\n check_jaccard(corpus[0], corpus[result[1][0]], result[1][1])\n\n\n\n assert result[2][1] > result[3][1]\n\n check_jaccard(corpus[0], corpus[result[2][0]], result[2][1])\n\n check_jaccard(corpus[0], corpus[result[3][0]], result[3][1])\n\n\n\n\n\n result = index.query(corpus[7], return_similarity=True)\n\n assert len(result) == 4\n\n\n\n assert result[0][1] > result[1][1]\n\n check_jaccard(corpus[7], corpus[result[0][0]], result[0][1])\n\n\n\n assert result[1][1] > result[2][1]\n\n check_jaccard(corpus[7], corpus[result[1][0]], result[1][1])\n\n\n\n assert result[2][1] > result[3][1]\n\n check_jaccard(corpus[7], corpus[result[2][0]], result[2][1])\n\n check_jaccard(corpus[7], corpus[result[3][0]], result[3][1])\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 38, "score": 63820.28481553991 }, { "content": "mod hashers;\n\nmod min_hasher;\n\nmod min_hasher64;\n\n\n\nmod minhash_index;\n\nmod string_index;\n\nmod super_min_hash;\n\n\n\n\n\npub use self::hashers::SipHasher24BuildHasher;\n\npub use self::hashers::Sha1Hasher;\n\npub use self::min_hasher64::MinHasher64V1;\n\npub use self::min_hasher::MinHasher8;\n\npub use self::min_hasher::MinHasher16;\n\npub use self::min_hasher::MinHasher32;\n\npub use self::minhash_index::MinHashIndex;\n\npub use self::string_index::MinHashStringIndex;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::hash::Hash;\n\nuse std::iter::FromIterator;\n\nuse fxhash::FxBuildHasher;\n\nuse num_traits::{AsPrimitive, PrimInt};\n\nuse rayon::prelude::*;\n\n\n\n/// MinHashType can be any integer.\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 39, "score": 62408.38573189001 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use std::cmp::min;\n\n use crate::minhash::{minhash_centroid, compute_minhash_similarity};\n\n\n\n\n\n #[test]\n\n fn test_min_hash_centroid() {\n\n let min_hashes = vec![\n\n vec![1, 2, 3, 4, 5],\n\n vec![1, 2, 3, 40, 51],\n\n vec![1, 20, 3, 40, 52],\n\n vec![1, 2, 3, 50, 55],\n\n vec![1, 2, 3, 60, 55],\n\n ];\n\n\n\n let centroid = minhash_centroid(&min_hashes);\n\n assert_eq!(vec![1, 2, 3, 40, 55], centroid);\n\n\n\n // the minhash jaccard similarity from centroid to any point should be\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 40, "score": 62403.74639868934 }, { "content": " // greater than pairwise similarity of every point\n\n let pairwise_similarities: Vec<Vec<f64>> = min_hashes.iter()\n\n .map(|m1| {\n\n min_hashes.iter()\n\n .map(|m2| compute_minhash_similarity(m1, m2))\n\n .collect()\n\n\n\n }).collect();\n\n\n\n let sums_similarity_from_points: Vec<f64> = pairwise_similarities.iter()\n\n .map(|similarities| similarities.iter().sum())\n\n .collect();\n\n\n\n let sum_similarity_from_centroid: f64 = min_hashes.iter()\n\n .map(|minhash| compute_minhash_similarity(minhash, &centroid))\n\n .sum();\n\n\n\n for s in sums_similarity_from_points {\n\n assert!(sum_similarity_from_centroid > s);\n\n }\n\n }\n\n}", "file_path": "gaoya/src/minhash/mod.rs", "rank": 41, "score": 62400.276420055234 }, { "content": " fn bulk_create_signature_refs<U>(&self, batch: &Vec<&Vec<U>>) -> Vec<Vec<Self::V>>\n\n where\n\n U: Hash + Sync,\n\n Self: Sync + Send {\n\n batch\n\n .par_iter()\n\n .map(|tokens| self.create_signature(tokens.iter()))\n\n .collect()\n\n }\n\n\n\n\n\n fn compute_similarity<T, U>(&self, iter_1: T, iter_2: T) -> f64\n\n where\n\n T: Iterator<Item=U>,\n\n U: Hash {\n\n compute_minhash_similarity(\n\n &self.create_signature(iter_1),\n\n &self.create_signature(iter_2),\n\n )\n\n }\n\n}\n\n\n", "file_path": "gaoya/src/minhash/mod.rs", "rank": 42, "score": 62399.4748620795 }, { "content": " def _jaccard(s1: Set, s2: Set): return len(s1 & s2) / len(s1 | s2)\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 43, "score": 62263.508415908254 }, { "content": " def split_and_uppercase(doc):\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 44, "score": 60783.915477167684 }, { "content": " def check_jaccard(doc1, doc2, value):\n\n true_jaccard = _jaccard(set(doc1.split(\" \")), set(doc2.split(\" \")))\n", "file_path": "py-gaoya/tests/test_minhash.py", "rank": 45, "score": 60783.915477167684 }, { "content": " def __str__(self):\n", "file_path": "py-gaoya/gaoya/minhash.py", "rank": 54, "score": 54888.68783424712 }, { "content": "fn main() {\n\n let mut generator = ClusterGenerator::new(0.6, 200, 30, 5000, 0, 300_000, DifferenceMode::SameIndices);\n\n let generated_clusters = generator.generate();\n\n println!(\"Generated {} clusters\", generated_clusters.len());\n\n let params = (50, 5);\n\n println!(\"{:?}\", params);\n\n\n\n run_clustering(&generated_clusters, MinHasher8::new(params.0 * params.1), params.0, params.1, 0.6);\n\n run_clustering(&generated_clusters, MinHasher16::new(params.0 * params.1), params.0, params.1, 0.6);\n\n run_clustering(&generated_clusters, MinHasher32::new(params.0 * params.1), params.0, params.1, 0.6);\n\n}", "file_path": "gaoya-bench/src/main.rs", "rank": 55, "score": 54410.333863498105 }, { "content": "pub trait SimHasher: Sized {\n\n type T;\n\n fn hash<U>(&self, item: &U) -> Self::T\n\n where\n\n Self: Sized,\n\n U: Hash;\n\n}\n\n\n\npub struct SimSipHasher64 {\n\n key1: u64,\n\n key2: u64,\n\n}\n\n\n\nimpl SimSipHasher64 {\n\n pub fn new(key1: u64, key2: u64) -> Self {\n\n SimSipHasher64 {\n\n key1: key1,\n\n key2: key2,\n\n }\n\n }\n", "file_path": "gaoya/src/simhash/sim_hasher.rs", "rank": 56, "score": 47129.759879247824 }, { "content": "#[pymodule]\n\nfn gaoya(py: Python, module: &PyModule) -> PyResult<()> {\n\n let minhash_module = PyModule::new(py, \"minhash\")?;\n\n init_minhash_module(minhash_module)?;\n\n module.add_submodule(minhash_module)?;\n\n\n\n let simhash_module = PyModule::new(py, \"simhash\")?;\n\n init_simhash_module(simhash_module)?;\n\n module.add_submodule(simhash_module)?;\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum TokenizerSpecification {\n\n CharShingle((usize, Option<usize>)),\n\n WhiteSpace(),\n\n WhiteSpaceShingle((usize, Option<usize>)),\n\n}\n\n\n\nimpl TokenizerSpecification {\n\n pub fn new(name: &str, range: Option<(usize, usize)>) -> Self {\n", "file_path": "py-gaoya/src/lib.rs", "rank": 57, "score": 40551.36217484217 }, { "content": "fn run_clustering<M: MinHasher>(generated_clusters: &Vec<GeneratedCluster>,\n\n minhash: M,\n\n num_bands: usize, band_width: usize, jaccard_threshold: f64)\n\n where M::V: Clone, M: Sync + Send {\n\n println!(\"Creating index {}\", std::any::type_name::<M>());\n\n let mut lsh = MinHashIndex::new(num_bands, band_width, jaccard_threshold);\n\n let mut ids = Vec::new();\n\n let mut vals = Vec::new();\n\n for cluster in generated_clusters {\n\n for pair in cluster.points.iter() {\n\n ids.push(pair.0.clone());\n\n vals.push(pair.1);\n\n }\n\n }\n\n let hashes = minhash.bulk_create_signature_refs(&vals);\n\n let ids = ids.par_iter()\n\n .map(|id| ClusterPoint::new(ClusterPointInner::new(id.clone()))).collect();\n\n lsh.par_bulk_insert(ids, hashes);\n\n println!(\"Starting clustering {}\", lsh);\n\n let clusterer = Clusterer::<u32>::new(50, 10);\n", "file_path": "gaoya-bench/src/main.rs", "rank": 58, "score": 39195.251045703124 }, { "content": "\n\n fn query(&self, id: &Self::Id) -> HashSet<&Self::Id, FxBuildHasher> {\n\n match self.id_signatures.get(id) {\n\n Some(signature) => {\n\n self::MinHashIndex::query(self, signature)\n\n }\n\n None => HashSet::default()\n\n }\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::minhash::min_hasher64::MinHasher64V1;\n\n use crate::minhash::{calculate_b_and_r, calculate_minhash_params, MinHasher, MinHashIndex};\n\n use rand::distributions::{Distribution, Uniform};\n\n use rand::prelude::ThreadRng;\n\n use rand::{thread_rng, Rng};\n\n use std::borrow::Borrow;\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 59, "score": 35937.76314232664 }, { "content": " T: MinHashType,\n\n Id: Hash + Eq + Clone,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"MinHashIndex<{}> {{ threshold = {}, num_hashes = {}, bands = {}, rows_per_band = {}, size = {} }}\",\n\n type_name::<T>(),\n\n self.threshold, self.b * self.r, self.b, self.r, self.size)\n\n }\n\n}\n\n\n\nimpl<T, Id> MinHashIndex<T, Id>\n\nwhere\n\n T: MinHashType,\n\n Id: Hash + Eq + Clone,\n\n{\n\n /// Create a new MinHashIndex\n\n pub fn new(num_bands: usize, band_width: usize, jaccard_threshold: f64) -> Self {\n\n let mut bands = Vec::new();\n\n for i in 0..num_bands {\n\n let (start, end) = (i * band_width, (i + 1) * band_width);\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 60, "score": 35937.017640302016 }, { "content": " }\n\n\n\n pub fn par_bulk_query_return_similarity(&self, signatures: &Vec<Vec<T>>) -> Vec<Vec<(Id, f64)>>\n\n where\n\n Id: Hash + Eq + Clone + Send + Sync,\n\n T: Send + Sync\n\n {\n\n signatures.par_iter()\n\n .map(|signature| self.query_owned_return_similarity(signature))\n\n .collect()\n\n }\n\n\n\n\n\n pub fn query_owned_return_similarity(&self, query_signature: &Vec<T>) -> Vec<(Id, f64)>\n\n where\n\n Id: Hash + Eq + Clone,\n\n {\n\n let mut match_ids = HashSet::with_capacity_and_hasher(10, FxBuildHasher::default());\n\n for band in &self.bands {\n\n band.query_to_owned(query_signature, &mut match_ids);\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 61, "score": 35936.999729814925 }, { "content": " .collect();\n\n\n\n for id in fully_removed_ids {\n\n self.removed_ids.remove(&id);\n\n self.id_signatures.remove(&id);\n\n }\n\n }\n\n\n\n pub fn size(&self) -> usize {\n\n self.size\n\n }\n\n\n\n pub fn num_perms(&self) -> usize {\n\n self.b * self.r\n\n }\n\n\n\n /// This method filters candidates by measuring similarity between query_minhash and\n\n /// each candidate using full minhash similarity measure.\n\n fn filter_by_minhash_similarity<'a>(\n\n &self,\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 62, "score": 35936.5190075777 }, { "content": " }\n\n if self.removed_ids.len() > 0 {\n\n match_ids.retain(|item| !self.removed_ids.contains(item));\n\n }\n\n let mut result = Vec::new();\n\n for id in match_ids.into_iter() {\n\n let signature = &self.id_signatures[&id];\n\n let similarity = compute_minhash_similarity(signature, query_signature);\n\n if similarity >= self.threshold {\n\n result.push((id, similarity))\n\n }\n\n }\n\n result.sort_unstable_by(|a, b| b.1.partial_cmp(&a.1).unwrap());\n\n\n\n result\n\n }\n\n\n\n\n\n pub fn query_top_k(&self, query_signature: &Vec<T>, k: usize) -> Vec<(Id, f64)> {\n\n let mut match_ids = HashSet::with_capacity_and_hasher(10, FxBuildHasher::default());\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 63, "score": 35935.24013668063 }, { "content": " /// Used by centroid calculation to choose the most optimal\n\n /// band portion of the hash\n\n fn find_signature_with_highest_recall<'a>(\n\n &'a self,\n\n signatures: &Vec<&[T]>,\n\n all_ids: &mut HashSet<&'a Id>,\n\n ) -> Option<usize> {\n\n let mut max_count: usize = 0;\n\n let mut best_index: isize = -1;\n\n for minhash in signatures.iter().enumerate() {\n\n let band_key = BandKey {\n\n v: minhash.1.as_ptr(),\n\n len: self.len,\n\n };\n\n match self.hash_table.get(&band_key) {\n\n Some(ids) => {\n\n let new_count = ids.iter()\n\n .map(|id| !all_ids.contains(&id) as usize)\n\n .count();\n\n if new_count > max_count {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 64, "score": 35934.480634106934 }, { "content": " assert_eq!(c.len(), v.len());\n\n for i in indices.iter().zip(changes.iter()) {\n\n c[*i.0] = i.1.clone();\n\n }\n\n c\n\n })\n\n .collect()\n\n }\n\n\n\n #[test]\n\n pub fn test_lsh_index_batch_construction2() {\n\n let (b, r) = calculate_minhash_params(0.5, 128);\n\n let min_hash = MinHasher64V1::new(128);\n\n let mut lsh_index: MinHashIndex<u64, u64> = MinHashIndex::new(b, r, 0.5);\n\n\n\n let mut vecs = Vec::new();\n\n let rand_range = Uniform::from(1..100000);\n\n let mut rng = thread_rng();\n\n let v1: Vec<u64> = (0..1000).map(|_| rand_range.sample(&mut rng)).collect();\n\n let v2: Vec<u64> = (0..1000).map(|_| rand_range.sample(&mut rng)).collect();\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 65, "score": 35934.247550913096 }, { "content": " ///\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use gaoya::minhash::{MinHasher,MinHasher16, MinHashIndex};\n\n /// use gaoya::text::whitespace_split;\n\n ///\n\n /// let mut index = MinHashIndex::new(33, 3, 0.6);\n\n /// let minhasher = MinHasher16::new(33 * 3);\n\n /// let signature1 = minhasher.create_signature(whitespace_split(\"This is the first minhashed document\"));\n\n /// let signature2 = minhasher.create_signature(whitespace_split(\"This is the second minhashed document\"));\n\n /// let query = signature1.clone();\n\n /// index.insert(1u32, signature1);\n\n /// index.insert(2u32, signature2);\n\n /// assert_eq!(index.query_owned(&query).into_iter().collect::<Vec<_>>(), vec![1,2]);\n\n /// assert_eq!(index.remove(&1), true);\n\n /// assert_eq!(index.remove(&1), false);\n\n /// ```\n\n pub fn remove(&mut self, id: &Id) -> bool {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 66, "score": 35933.77067607591 }, { "content": "/// ```\n\n#[derive()]\n\npub struct MinHashIndex<T, Id>\n\n where\n\n T: MinHashType,\n\n Id: Hash + Eq + Clone,\n\n{\n\n bands: Vec<MinHashBand<T, Id>>,\n\n removed_ids: HashSet<Id>,\n\n id_signatures: FxHashMap<Id, Vec<T>>,\n\n threshold: f64,\n\n r: usize,\n\n b: usize,\n\n size: usize,\n\n}\n\n\n\nstatic REMOVED_KEYS_COUNT_CLEAN_TRIGGER: usize = 1000;\n\n\n\nimpl<T, Id> fmt::Display for MinHashIndex<T, Id>\n\nwhere\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 67, "score": 35933.487373206684 }, { "content": " for band in &self.bands {\n\n band.query_to_owned(query_signature, &mut match_ids);\n\n }\n\n if self.removed_ids.len() > 0 {\n\n match_ids.retain(|item| !self.removed_ids.contains(item));\n\n }\n\n let mut ids_distances: Vec<(Id, f64)> = match_ids\n\n .into_iter()\n\n .map(|id| {\n\n let signature = &self.id_signatures[&id];\n\n let distance = compute_minhash_distance(query_signature, signature);\n\n (id, distance)\n\n })\n\n .collect();\n\n ids_distances.sort_unstable_by(|a, b| a.1.partial_cmp(&b.1).unwrap());\n\n ids_distances[0..std::cmp::min(ids_distances.len(), k)].to_vec()\n\n }\n\n\n\n /// Removes a key from the index, returning true if the key\n\n /// was previously in the index.\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 68, "score": 35933.20579025733 }, { "content": " #[test]\n\n pub fn test_lsh_index_batch_construction() {\n\n let (b, r) = calculate_minhash_params(0.5, 200);\n\n let min_hash = MinHasher64V1::new(b * r);\n\n let mut lsh_index: MinHashIndex<u64, u64> = MinHashIndex::new(b, r, 0.5);\n\n let mut signatures: Vec<(u64, Vec<u64>)> = Vec::new();\n\n signatures.push((1, min_hash.create_signature(S1.split_whitespace())));\n\n signatures.push((2, min_hash.create_signature(S2.split_whitespace())));\n\n signatures.push((3, min_hash.create_signature(S3.split_whitespace())));\n\n signatures.push((4, min_hash.create_signature(S4.split_whitespace())));\n\n signatures.push((5, min_hash.create_signature(S5.split_whitespace())));\n\n signatures.push((6, min_hash.create_signature(S6.split_whitespace())));\n\n\n\n lsh_index.par_bulk_insert_pairs(signatures);\n\n assert_eq!(lsh_index.size, 6);\n\n\n\n let ret = lsh_index.query(&min_hash.create_signature(S2.split_whitespace()));\n\n\n\n let ret_str: String = ret.iter().map(|x| x.to_string()).collect();\n\n assert_eq!(ret.len(), 3, \"{}\", ret_str);\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 69, "score": 35932.74251112146 }, { "content": " band.query_to_owned(query_signature, &mut match_ids);\n\n }\n\n if self.removed_ids.len() > 0 {\n\n match_ids.retain(|item| !self.removed_ids.contains(item));\n\n }\n\n match_ids.retain(|id| {\n\n let signature = &self.id_signatures[id];\n\n compute_minhash_similarity(signature, query_signature) >= self.threshold\n\n });\n\n match_ids\n\n }\n\n\n\n pub fn par_bulk_query(&self, signatures: &Vec<Vec<T>>) -> Vec<HashSet<Id, FxBuildHasher>>\n\n where\n\n Id: Hash + Eq + Clone + Send + Sync,\n\n T: Send + Sync\n\n {\n\n signatures.par_iter()\n\n .map(|signature| self.query_owned(signature))\n\n .collect()\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 70, "score": 35932.464098718265 }, { "content": " bands.push(MinHashBand::<T, Id>::new(start as isize, end as isize));\n\n }\n\n let mut hash_table = FxHashMap::default();\n\n hash_table.reserve(1000);\n\n MinHashIndex {\n\n bands: bands,\n\n removed_ids: HashSet::new(),\n\n threshold: jaccard_threshold,\n\n id_signatures: hash_table,\n\n b: num_bands,\n\n r: band_width,\n\n size: 0,\n\n }\n\n }\n\n\n\n pub fn new_with_capacity(num_bands: usize, band_width: usize,\n\n jaccard_threshold: f64, initial_capacity: usize) -> Self {\n\n let mut bands = Vec::new();\n\n for i in 0..num_bands {\n\n let (start, end) = (i * band_width, (i + 1) * band_width);\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 71, "score": 35931.81138077608 }, { "content": " use fxhash::FxHashSet;\n\n use crate::minhash::min_hasher::MinHasher32;\n\n use crate::text::whitespace_split;\n\n\n\n static S1: &'static str = \"local sensitive hashing is cool\";\n\n static S2: &'static str = \"local sensitive hashing is great\";\n\n static S3: &'static str = \"local sensitive hashing is awesome\";\n\n static S4: &'static str = \"we all scream for ice cream\";\n\n static S5: &'static str = \"we all scream for ice cream sandwich\";\n\n static S6: &'static str = \"i like ice cream sandwich\";\n\n\n\n #[test]\n\n pub fn test_lsh_index() {\n\n let (b, r) = calculate_minhash_params(0.5, 200);\n\n let min_hash = MinHasher64V1::new(b * r);\n\n let mut lsh_index = MinHashIndex::new(b, r, 0.5);\n\n lsh_index.insert(1, min_hash.create_signature(S1.split_whitespace()));\n\n lsh_index.insert(2, min_hash.create_signature(S2.split_whitespace()));\n\n lsh_index.insert(3, min_hash.create_signature(S3.split_whitespace()));\n\n lsh_index.insert(4, min_hash.create_signature(S4.split_whitespace()));\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 72, "score": 35931.71106767172 }, { "content": " let band_data = unsafe { signature.as_ptr().offset(self.band_start) };\n\n let band_key = BandKey {\n\n v: band_data,\n\n len: (self.band_end - self.band_start) as usize,\n\n };\n\n match self.hash_table.get(&band_key) {\n\n Some(ids) => ids.len() > 0,\n\n None => false,\n\n }\n\n }\n\n\n\n pub fn shrink_to_fit(&mut self) {\n\n for item in self.hash_table.iter_mut() {\n\n item.1.shrink_to_fit();\n\n }\n\n self.hash_table.shrink_to_fit();\n\n }\n\n}\n\n\n\n/// Data Structure to index minhashes into bands.\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 73, "score": 35931.61458169403 }, { "content": "use crate::minhash::{calculate_b_and_r, compute_minhash_distance, compute_minhash_similarity, minhash_band_centroid_from_refs, minhash_centroid, MinHashType};\n\nuse fxhash::FxBuildHasher;\n\nuse fxhash::FxHashMap;\n\nuse fxhash::FxHashSet;\n\nuse rayon::prelude::*;\n\nuse std::any::type_name;\n\nuse std::collections::{BinaryHeap, HashMap, HashSet};\n\nuse std::hash::{BuildHasher, Hash, Hasher};\n\nuse std::{fmt, slice};\n\n#[cfg(all(feature = \"unstable\"))]\n\nuse std::collections::hash_map::RawEntryMut;\n\n\n\nuse std::fmt::{Display, Formatter};\n\nuse std::ops::Range;\n\nuse itertools::Itertools;\n\nuse crate::clustering::QueryIndex;\n\n\n\n\n\n\n\n/*\n\nMinHashIndex stores all minhashes as Vec<T> in a hashmap, and uses unsafe pointer arithmetic\n\nto access the band portion of the minhash directly in the vector.\n\n\n\nHaving full simhashes is useful for computing a centroid of a cluster. The unsafe data structure\n\ngives free access to whole minhashes, and bands without sacrificing neither performance nor\n\nmemory utilization.\n\n */\n\n\n\n\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 74, "score": 35931.48980205491 }, { "content": " }\n\n\n\n pub fn shrink_to_fit(&mut self)\n\n where Id: Send + Sync,\n\n T: Send + Sync\n\n {\n\n self.bands.par_iter_mut()\n\n .for_each(|band| band.shrink_to_fit());\n\n self.id_signatures.shrink_to_fit();\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n self.bands.iter_mut().for_each(|band| band.clear());\n\n self.id_signatures.clear();\n\n }\n\n\n\n pub fn query_one(&self, query_signature: &Vec<T>) -> Option<&Id> {\n\n let mut match_ids = HashSet::with_capacity_and_hasher(10, FxBuildHasher::default());\n\n for band in &self.bands {\n\n band.query(query_signature, &mut match_ids);\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 75, "score": 35930.97397270692 }, { "content": " where\n\n Id: Hash + Eq + Clone,\n\n {\n\n let mut match_ids = HashSet::with_capacity_and_hasher(10, FxBuildHasher::default());\n\n for band in &self.bands {\n\n band.query(query_signature, &mut match_ids);\n\n }\n\n\n\n if self.removed_ids.len() > 0 {\n\n match_ids.retain(|item| !self.removed_ids.contains(item));\n\n }\n\n\n\n match_ids.retain(|id| {\n\n let signature = &self.id_signatures[id];\n\n compute_minhash_similarity(signature, query_signature) >= self.threshold\n\n });\n\n\n\n match_ids\n\n }\n\n\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 76, "score": 35930.75498293709 }, { "content": " .filter(|option| option.is_some())\n\n .map(|option| option.unwrap())\n\n .collect();\n\n minhash_band_centroid_from_refs(&signatures, self.b, self.r)\n\n }\n\n\n\n pub fn calculate_centroid_experimental<I>(&self, ids: I) -> Vec<T>\n\n where\n\n I: Iterator<Item = Id> {\n\n let mut bands: Vec<HashSet<&[T]>> = Vec::new();\n\n for i in 0..self.b {\n\n bands.push(HashSet::new());\n\n }\n\n let mut first_signature = None;\n\n for id in ids {\n\n let mut signature = self.id_signatures.get(&id).unwrap();\n\n for i in 0..self.b {\n\n let band: &[T] = &signature[self.band_range(i)];\n\n bands[i].insert(band);\n\n }\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 77, "score": 35930.48841199038 }, { "content": " }\n\n\n\n pub fn get_keys_refs(&self) -> Vec<&Id> {\n\n self.bands[0].hash_table.values()\n\n .into_iter().flat_map(|s| s.iter())\n\n .collect()\n\n }\n\n\n\n\n\n\n\n pub fn insert(&mut self, id: Id, signature: Vec<T>) {\n\n for band in &mut self.bands {\n\n band.insert(id.clone(), &signature);\n\n }\n\n self.id_signatures.insert(id, signature);\n\n self.size += 1;\n\n }\n\n\n\n pub fn par_bulk_insert(&mut self, ids: Vec<Id>, signatures: Vec<Vec<T>>)\n\n where\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 78, "score": 35930.32434750564 }, { "content": " None\n\n }\n\n }\n\n\n\n /// Removes id from the band\n\n /// Returns true if the band portion of the signature is not in the hashtable\n\n #[cfg(not(feature = \"unstable\"))]\n\n fn remove(&mut self, id: &Id, signature: &Vec<T>) -> bool {\n\n let band_data = unsafe { signature.as_ptr().offset(self.band_start) };\n\n let band_key = BandKey {\n\n v: band_data,\n\n len: (self.band_end - self.band_start) as usize,\n\n };\n\n\n\n match self.hash_table.get_mut(&band_key) {\n\n Some(ids) => {\n\n ids.remove(id);\n\n if ids.is_empty() {\n\n self.hash_table.remove(&band_key);\n\n true\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 79, "score": 35930.225254782556 }, { "content": " assert!(ret.contains(&1));\n\n assert!(ret.contains(&2));\n\n assert!(ret.contains(&3));\n\n }\n\n\n\n fn random_change_values(\n\n v: &Vec<u64>,\n\n num_changes: usize,\n\n num_vecs: usize,\n\n rng: &mut ThreadRng,\n\n ) -> Vec<Vec<u64>> {\n\n let rand_range = Uniform::from(1..100000);\n\n let index_rand_range = Uniform::from(0..1000);\n\n (0..num_vecs)\n\n .map(|_| {\n\n let indices: Vec<usize> = (0..num_changes)\n\n .map(|_| index_rand_range.sample(rng))\n\n .collect();\n\n let changes: Vec<u64> = (0..num_changes).map(|_| rand_range.sample(rng)).collect();\n\n let mut c = v.clone();\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 80, "score": 35929.650130575064 }, { "content": " }\n\n\n\n if self.removed_ids.len() > 0 {\n\n match_ids.retain(|item| !self.removed_ids.contains(item));\n\n }\n\n\n\n let best_match = match_ids.into_iter()\n\n .map(|id| {\n\n let signature = &self.id_signatures[id];\n\n (id, compute_minhash_similarity(signature, query_signature))\n\n })\n\n .filter(|pair| pair.1 > self.threshold)\n\n .max_by(|x, y| x.1.partial_cmp(&y.1).unwrap());\n\n match best_match {\n\n Some(pair) => Some(pair.0),\n\n None => None\n\n }\n\n }\n\n\n\n pub fn query(&self, query_signature: &Vec<T>) -> HashSet<&Id, FxBuildHasher>\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 81, "score": 35929.58359514962 }, { "content": " let removed = match self.id_signatures.get(id) {\n\n Some(hashes) => {\n\n if self.removed_ids.contains(id) {\n\n return false;\n\n }\n\n let fully_removed = self\n\n .bands\n\n .iter_mut()\n\n .map(|band| band.remove(id, hashes) as usize)\n\n .sum::<usize>()\n\n == self.b;\n\n if fully_removed {\n\n self.id_signatures.remove(id);\n\n } else {\n\n self.removed_ids.insert(id.clone());\n\n }\n\n self.size -= 1;\n\n true\n\n }\n\n None => false,\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 82, "score": 35929.53823971635 }, { "content": " hash_table: hash_table,\n\n band_start: band_start,\n\n band_end: band_end,\n\n len: (band_end - band_start) as usize,\n\n }\n\n }\n\n\n\n pub fn new_with_capacity(band_start: isize, band_end: isize, capacity: usize) -> Self {\n\n let mut hash_table = FxHashMap::default();\n\n hash_table.reserve(capacity);\n\n MinHashBand {\n\n hash_table: hash_table,\n\n band_start: band_start,\n\n band_end: band_end,\n\n len: (band_end - band_start) as usize,\n\n }\n\n }\n\n\n\n\n\n fn insert(&mut self, id: Id, signature: &Vec<T>) {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 83, "score": 35929.45931775277 }, { "content": " pub fn query_by_id(&self, id: &Id) -> HashSet<&Id, FxBuildHasher> {\n\n match self.id_signatures.get(id) {\n\n Some(signature) => self.query(signature),\n\n None => HashSet::default()\n\n }\n\n }\n\n\n\n pub fn query_by_id_owned(&self, id: &Id) -> HashSet<Id, FxBuildHasher> {\n\n match self.id_signatures.get(id) {\n\n Some(signature) => self.query_owned(signature),\n\n None => HashSet::default()\n\n }\n\n }\n\n\n\n pub fn query_owned(&self, query_signature: &Vec<T>) -> HashSet<Id, FxBuildHasher>\n\n where\n\n Id: Hash + Eq + Clone,\n\n {\n\n let mut match_ids = HashSet::with_capacity_and_hasher(10, FxBuildHasher::default());\n\n for band in &self.bands {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 84, "score": 35929.41037355423 }, { "content": " max_count = new_count;\n\n best_index = minhash.0 as isize;\n\n }\n\n }\n\n None => (),\n\n }\n\n }\n\n let band_key = BandKey {\n\n v: signatures[best_index as usize].as_ptr(),\n\n len: self.len,\n\n };\n\n match self.hash_table.get(&band_key) {\n\n Some(ids) => {\n\n all_ids.extend(ids.iter())\n\n }\n\n None => (),\n\n }\n\n if best_index >= 0 {\n\n Some(best_index as usize)\n\n } else {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 85, "score": 35929.23586241738 }, { "content": " None => (),\n\n }\n\n }\n\n\n\n fn query_to_owned<S: BuildHasher>(&self, signature: &Vec<T>, match_ids: &mut HashSet<Id, S>) {\n\n let band_data = unsafe { signature.as_ptr().offset(self.band_start) };\n\n let band_key = BandKey {\n\n v: band_data,\n\n len: self.len,\n\n };\n\n match self.hash_table.get(&band_key) {\n\n Some(ids) => {\n\n match_ids.extend(ids.iter().cloned());\n\n }\n\n None => (),\n\n }\n\n }\n\n\n\n /// Returns the index of signature that gives highest recall\n\n /// of this band on points that are not in all_ids.\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 86, "score": 35928.98078414236 }, { "content": "\n\n match first_signature {\n\n None => {\n\n first_signature.insert(signature);\n\n }\n\n Some(_) => {}\n\n };\n\n }\n\n let first_signature = first_signature.unwrap();\n\n let mut all_ids = HashSet::new();\n\n let mut centroid_signature = Vec::new();\n\n for i in 0..self.b {\n\n let band: &MinHashBand<T, Id> = &self.bands[i];\n\n let band_signatures: Vec<&[T]> = bands[i].iter().map(|k| *k).collect();\n\n let index = band.find_signature_with_highest_recall(&band_signatures, &mut all_ids);\n\n match index {\n\n Some(index) => {\n\n centroid_signature.extend_from_slice(&band_signatures[index]);\n\n }\n\n None => {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 87, "score": 35928.769278373205 }, { "content": "\n\n lsh_index.clean_removed();\n\n assert_eq!(lsh_index.removed_ids.len(), 1);\n\n assert_eq!(lsh_index.removed_ids, vec![1].into_iter().collect());\n\n\n\n lsh_index.remove(&3);\n\n lsh_index.remove(&4);\n\n lsh_index.clean_removed();\n\n assert_eq!(lsh_index.removed_ids.len(), 0);\n\n assert_eq!(lsh_index.size(), 0);\n\n }\n\n\n\n #[cfg(not(feature = \"unstable\"))]\n\n #[test]\n\n pub fn test_remove() {\n\n let mut lsh_index = MinHashIndex::new(4, 2, 0.5);\n\n lsh_index.insert(1, vec![1, 1, 1, 1, 1, 1, 1, 1]);\n\n lsh_index.insert(2, vec![1, 1, 1, 1, 1, 1, 1, 1]);\n\n\n\n lsh_index.insert(3, vec![1, 1, 1, 1, 1, 1, 2, 2]);\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 88, "score": 35928.75728199191 }, { "content": " let v3: Vec<u64> = (0..1000).map(|_| rand_range.sample(&mut rng)).collect();\n\n assert_eq!(v1.len(), 1000);\n\n vecs.push(v1.clone());\n\n vecs.extend_from_slice(random_change_values(&v1, 100, 99, &mut rng).as_slice());\n\n vecs.push(v2.clone());\n\n vecs.extend_from_slice(random_change_values(&v2, 50, 99, &mut rng).as_slice());\n\n vecs.push(v3.clone());\n\n vecs.extend_from_slice(random_change_values(&v3, 10, 99, &mut rng).as_slice());\n\n\n\n let mut ids: Vec<u64> = (0..300).collect();\n\n let signatures = vecs\n\n .iter()\n\n .map(|v| min_hash.create_signature(v.iter()))\n\n .collect();\n\n\n\n assert_eq!(vecs.len(), ids.len());\n\n lsh_index.par_bulk_insert(ids, signatures);\n\n assert_eq!(lsh_index.size, 300);\n\n\n\n let ret = lsh_index.query(&min_hash.create_signature(v1.iter()));\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 89, "score": 35928.64516385889 }, { "content": "\n\n #[test]\n\n pub fn test_example() {\n\n let corpus = [\n\n \"This is the first document.\",\n\n \"This document is the second document.\",\n\n \"And this is the third document.\",\n\n \"Is this the first document?\",\n\n \"This not the first nor the second nor the third, but the fourth document\"];\n\n let minhasher = MinHasher32::new(42 * 3);\n\n let mut index = MinHashIndex::new(42, 3, 0.5);\n\n for (i, doc) in corpus.iter().enumerate() {\n\n index.insert(i, minhasher.create_signature(whitespace_split(&doc.to_lowercase())));\n\n }\n\n for (i, doc) in corpus.iter().enumerate() {\n\n if i < 4 {\n\n let mut expected = FxHashSet::default();\n\n expected.extend(vec![0, 1, 2, 3].into_iter());\n\n assert_eq!(index.query_owned(&minhasher.create_signature(whitespace_split(&doc.to_lowercase()))), expected);\n\n } else {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 90, "score": 35928.03419402403 }, { "content": " query_signature: &Vec<T>,\n\n candidates: HashSet<&'a Id>,\n\n ) -> HashSet<&'a Id> {\n\n let mut result = HashSet::new();\n\n for candidate in candidates {\n\n let candidate_signature = &self.id_signatures[candidate];\n\n let similarity = compute_minhash_distance(query_signature, candidate_signature);\n\n if similarity >= self.threshold {\n\n result.insert(candidate);\n\n }\n\n }\n\n result\n\n }\n\n\n\n /// Calculates minhash centroid that optimizes recall for this `MinHashIndex` configuration\n\n pub fn calculate_centroid(&self, ids: &[Id]) -> Vec<T> where\n\n {\n\n\n\n let signatures = ids.iter()\n\n .map(|id| self.id_signatures.get(&id))\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 91, "score": 35928.03010360752 }, { "content": " } else {\n\n false\n\n }\n\n }\n\n None => false,\n\n }\n\n }\n\n\n\n /// Removes id from the band\n\n /// Returns true if the band portion of the signature is not in the hashtable\n\n /// This method currently only compiles on nightly channel because it relies on\n\n /// HashMap::raw_entry_mut() to compare the pointers of bands\n\n #[cfg(all(feature = \"unstable\"))]\n\n fn remove(&mut self, id: &Id, signature: &Vec<T>) -> bool {\n\n let band_data = unsafe { signature.as_ptr().offset(self.band_start) };\n\n let band_key = BandKey {\n\n v: band_data,\n\n len: (self.band_end - self.band_start) as usize,\n\n };\n\n\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 92, "score": 35927.58091908392 }, { "content": " match self.hash_table.raw_entry_mut().from_key(&band_key) {\n\n RawEntryMut::Occupied(mut entry) => {\n\n let mut ids = entry.get_mut();\n\n ids.remove(id);\n\n if ids.is_empty() {\n\n entry.remove();\n\n true\n\n } else {\n\n !std::ptr::eq(entry.key().v, band_key.v)\n\n }\n\n }\n\n RawEntryMut::Vacant(entry) => false\n\n }\n\n }\n\n\n\n fn clear(&mut self) {\n\n self.hash_table.clear();\n\n }\n\n\n\n fn has_ids(&self, signature: &Vec<T>) -> bool {\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 93, "score": 35927.412271476285 }, { "content": " let mut expected = FxHashSet::default();\n\n expected.insert(4);\n\n assert_eq!(index.query_owned(&minhasher.create_signature(whitespace_split(&doc.to_lowercase()))), expected);\n\n }\n\n }\n\n\n\n }\n\n\n\n #[cfg(all(feature = \"unstable\"))]\n\n #[test]\n\n pub fn test_remove() {\n\n let mut lsh_index = MinHashIndex::new(4, 2, 0.5);\n\n lsh_index.insert(1, vec![1, 1, 1, 1, 1, 1, 1, 1]);\n\n lsh_index.insert(2, vec![1, 1, 1, 1, 1, 1, 1, 1]);\n\n\n\n lsh_index.insert(3, vec![1, 1, 1, 1, 1, 1, 2, 2]);\n\n lsh_index.insert(4, vec![1, 1, 1, 1, 1, 1, 2, 3]);\n\n\n\n lsh_index.insert(5, vec![2, 2, 2, 3, 3, 3, 4, 4]);\n\n\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 94, "score": 35927.39375391658 }, { "content": " centroid_signature.extend_from_slice(&first_signature[self.band_range(i)]);\n\n }\n\n }\n\n\n\n }\n\n\n\n centroid_signature\n\n }\n\n\n\n fn band_range(&self, band_index: usize) -> Range<usize> {\n\n band_index * self.r..(band_index + 1) * self.r\n\n }\n\n\n\n}\n\n\n\nimpl<T, Id> QueryIndex for MinHashIndex<T, Id>\n\n where\n\n T: MinHashType ,\n\n Id: Hash + Eq + Clone {\n\n type Id = Id;\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 95, "score": 35926.928427047635 }, { "content": " bands.push(MinHashBand::<T, Id>::new_with_capacity(start as isize, end as isize, initial_capacity));\n\n }\n\n let mut hash_table = FxHashMap::default();\n\n hash_table.reserve(initial_capacity);\n\n MinHashIndex {\n\n bands: bands,\n\n removed_ids: HashSet::new(),\n\n threshold: jaccard_threshold,\n\n id_signatures: hash_table,\n\n b: num_bands,\n\n r: band_width,\n\n size: 0,\n\n }\n\n }\n\n\n\n pub fn get_keys(&self) -> Vec<Id> {\n\n self.bands[0].hash_table.values()\n\n .into_iter().flat_map(|s| s.iter())\n\n .map(|id| id.clone())\n\n .collect()\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 96, "score": 35926.65112013702 }, { "content": " pub fn par_bulk_insert_pairs(&mut self, id_signature_pairs: Vec<(Id, Vec<T>)>)\n\n where\n\n Id: Hash + Eq + Clone + Send + Sync,\n\n T: Send + Sync,\n\n {\n\n unsafe {\n\n self.bands.par_iter_mut().for_each(|band| {\n\n for item in id_signature_pairs.iter() {\n\n let i: &(Id, Vec<T>) = item;\n\n let (a, b) = i;\n\n let k: Id = a.clone();\n\n band.insert(k, &b);\n\n }\n\n });\n\n }\n\n for id_hash in id_signature_pairs {\n\n self.id_signatures.insert(id_hash.0, id_hash.1);\n\n self.size += 1;\n\n }\n\n self.id_signatures.shrink_to_fit();\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 97, "score": 35926.30166563143 }, { "content": " let band_data = unsafe { signature.as_ptr().offset(self.band_start) };\n\n let band_key = BandKey {\n\n v: band_data,\n\n len: self.len,\n\n };\n\n self.hash_table\n\n .entry(band_key)\n\n .or_insert(FxHashSet::default())\n\n .insert(id.clone());\n\n ()\n\n }\n\n\n\n fn query<'a, S: BuildHasher>(&'a self, signature: &Vec<T>, match_ids: &mut HashSet<&'a Id, S>) {\n\n let band_data = unsafe { signature.as_ptr().offset(self.band_start) };\n\n let band_key = BandKey {\n\n v: band_data,\n\n len: self.len,\n\n };\n\n match self.hash_table.get(&band_key) {\n\n Some(ids) => match_ids.extend(ids.iter()),\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 98, "score": 35926.147023465695 }, { "content": " Id: Hash + Eq + Clone + Send + Sync,\n\n T: Send + Sync,\n\n {\n\n unsafe {\n\n self.bands.par_iter_mut().for_each(|band| {\n\n for item in signatures.iter().zip(ids.iter()) {\n\n let hashes = item.0;\n\n let id = item.1.clone();\n\n band.insert(id, hashes);\n\n }\n\n });\n\n }\n\n for id_hash in ids.into_iter().zip(signatures.into_iter()) {\n\n match self.id_signatures.insert(id_hash.0, id_hash.1) {\n\n None => self.size += 1,\n\n Some(_) => ()\n\n }\n\n }\n\n }\n\n\n", "file_path": "gaoya/src/minhash/minhash_index.rs", "rank": 99, "score": 35926.02566748859 } ]
Rust
fsb5/src/lib.rs
raftario/assetbundle
58ce22c0e0774c8c88471f3cdcad65569097a4b7
use byteorder::{LittleEndian, ReadBytesExt}; use std::{ collections::HashMap, convert::{TryFrom, TryInto}, io::{BufRead, Read, Seek, SeekFrom}, }; mod error; pub use error::Error; #[cfg(feature = "pcm")] mod pcm; #[derive(Debug, Copy, Clone)] pub enum SoundFormat { None, PCM8, PCM16, PCM24, PCM32, PCMFloat, GCADPCM, IMAADPCM, VAG, HEVAG, XMA, MPEG, CELT, AT9, XWMA, Vorbis, } impl SoundFormat { pub fn file_extension(self) -> &'static str { match self { SoundFormat::MPEG => "mp3", SoundFormat::Vorbis => "ogg", SoundFormat::PCM8 | SoundFormat::PCM16 | SoundFormat::PCM32 => "wav", _ => "bin", } } } impl TryFrom<u32> for SoundFormat { type Error = Error; fn try_from(value: u32) -> Result<Self, Self::Error> { match value { 0 => Ok(SoundFormat::None), 1 => Ok(SoundFormat::PCM8), 2 => Ok(SoundFormat::PCM16), 3 => Ok(SoundFormat::PCM24), 4 => Ok(SoundFormat::PCM32), 5 => Ok(SoundFormat::PCMFloat), 6 => Ok(SoundFormat::GCADPCM), 7 => Ok(SoundFormat::IMAADPCM), 8 => Ok(SoundFormat::VAG), 9 => Ok(SoundFormat::HEVAG), 10 => Ok(SoundFormat::XMA), 11 => Ok(SoundFormat::MPEG), 12 => Ok(SoundFormat::CELT), 13 => Ok(SoundFormat::AT9), 14 => Ok(SoundFormat::XWMA), 15 => Ok(SoundFormat::Vorbis), _ => Err(Error::SoundFormat(value)), } } } #[derive(Debug, Copy, Clone)] pub struct FSB5Header { pub id: [u8; 4], pub version: u32, pub num_samples: usize, pub sample_headers_size: usize, pub name_table_size: usize, pub data_size: usize, pub mode: SoundFormat, pub zero: [u8; 8], pub hash: [u8; 16], pub dummy: [u8; 8], pub unknown: u32, pub size: usize, } impl FSB5Header { fn read<R: Read + Seek>(reader: &mut R) -> Result<Self, Error> { let mut id: [u8; 4] = [0; 4]; reader.read_exact(&mut id)?; let version = reader.read_u32::<LittleEndian>()?; let num_samples = reader.read_u32::<LittleEndian>()? as usize; let sample_headers_size = reader.read_u32::<LittleEndian>()? as usize; let name_table_size = reader.read_u32::<LittleEndian>()? as usize; let data_size = reader.read_u32::<LittleEndian>()? as usize; let mode = reader.read_u32::<LittleEndian>()?; let mut zero = [0; 8]; reader.read_exact(&mut zero)?; let mut hash = [0; 16]; reader.read_exact(&mut hash)?; let mut dummy = [0; 8]; reader.read_exact(&mut dummy)?; let unknown = match version { 0 => reader.read_u32::<LittleEndian>()?, _ => 0, }; let mode = mode.try_into()?; let size = reader.seek(SeekFrom::Current(0))? as usize; Ok(Self { id, version, num_samples, sample_headers_size, name_table_size, data_size, mode, zero, hash, dummy, unknown, size, }) } } #[derive(Debug, Clone)] pub struct Sample { pub name: String, pub frequency: u32, pub channels: u64, pub data_offset: usize, pub samples: usize, pub metadata: HashMap<u64, MetadataChunk>, pub data: Vec<u8>, } #[derive(Debug, Clone)] pub enum MetadataChunk { Channels(u8), Frequency(u32), Loop(u32, u32), XMASeek(Vec<u8>), DSPCOEFF(Vec<u8>), XWMAData(Vec<u8>), VorbisData { crc32: u32, unknown: Vec<u8> }, } impl MetadataChunk { fn read<R: Read>(reader: &mut R, chunk_size: usize, chunk_type: u64) -> Result<Self, Error> { match chunk_type { 1 => { let channels = reader.read_u8()?; Ok(MetadataChunk::Channels(channels)) } 2 => { let frequency = reader.read_u32::<LittleEndian>()?; Ok(MetadataChunk::Frequency(frequency)) } 3 => { let loop_tuple = ( reader.read_u32::<LittleEndian>()?, reader.read_u32::<LittleEndian>()?, ); Ok(MetadataChunk::Loop(loop_tuple.0, loop_tuple.1)) } 6 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::XMASeek(data.to_vec())) } 7 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::DSPCOEFF(data.to_vec())) } 10 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::XWMAData(data.to_vec())) } 11 => { let crc32 = reader.read_u32::<LittleEndian>()?; let mut unknown = vec![0; chunk_size]; reader.read_exact(&mut unknown)?; Ok(MetadataChunk::VorbisData { crc32, unknown: unknown.to_vec(), }) } _ => Err(Error::MetadataChunkType(chunk_type)), } } } fn bits(val: u64, start: u64, len: u64) -> u64 { let stop = start + len; let r = val & ((1 << stop) - 1); r >> start } #[derive(Debug, Clone)] pub struct FSB5 { pub header: FSB5Header, pub raw_size: usize, pub samples: Vec<Sample>, } impl FSB5 { pub fn read<R: BufRead + Seek>(mut reader: R) -> Result<Self, Error> { let mut magic = [0; 4]; reader.read_exact(&mut magic)?; if magic != *b"FSB5" { return Err(Error::MagicHeader(magic)); } reader.seek(SeekFrom::Start(0))?; let header = FSB5Header::read(&mut reader)?; let raw_size = header.size + header.sample_headers_size + header.name_table_size + header.data_size; let mut samples = Vec::with_capacity(header.num_samples); for i in 0..header.num_samples { let mut raw = reader.read_u64::<LittleEndian>()?; let mut next_chunk = bits(raw, 0, 1); let mut frequency = bits(raw, 1, 4) as u32; let channels = bits(raw, 1 + 4, 1) + 1; let data_offset = (bits(raw, 1 + 4 + 1, 28) * 16) as usize; let self_samples = bits(raw, 1 + 4 + 1 + 28, 30) as usize; let mut chunks = HashMap::new(); while next_chunk != 0 { raw = reader.read_u32::<LittleEndian>()? as u64; next_chunk = bits(raw, 0, 1); let chunk_size = bits(raw, 1, 24) as usize; let chunk_type = bits(raw, 1 + 24, 7); let chunk_data = match MetadataChunk::read(&mut reader, chunk_size, chunk_type) { Ok(cd) => cd, Err(e) => match e { Error::MetadataChunkType(_) => { eprintln!("{}", e); continue; } _ => return Err(e), }, }; chunks.insert(chunk_type, chunk_data); } if let Some(MetadataChunk::Frequency(f)) = chunks.get(&2) { frequency = *f; } else { frequency = match frequency { 1 => 8000, 2 => 11000, 3 => 11025, 4 => 16000, 5 => 22050, 6 => 24000, 7 => 32000, 8 => 44100, 9 => 48000, _ => { return Err(Error::Frequency(frequency)); } } } samples.push(Sample { name: format!("{}", i), frequency, channels, data_offset, samples: self_samples, metadata: chunks, data: Vec::new(), }); } if header.name_table_size > 0 { let nametable_start = reader.seek(SeekFrom::Current(0))? as usize; let mut samplename_offsets = vec![0; header.num_samples]; for i in samplename_offsets.iter_mut() { *i = reader.read_u32::<LittleEndian>()? as usize; } for (i, sample) in samples.iter_mut().enumerate() { reader.seek(SeekFrom::Start( (nametable_start + samplename_offsets[i]) as u64, ))?; let mut name = Vec::new(); reader.read_until(0, &mut name)?; sample.name = String::from_utf8(name).map_err(|_| Error::NameTable(i))?; } } reader.seek(SeekFrom::Start( (header.size + header.sample_headers_size + header.name_table_size) as u64, ))?; for i in 0..header.num_samples { let data_start = samples.get(i).unwrap().data_offset; let data_end = if i < header.num_samples - 1 { samples.get(i + 1).unwrap().data_offset } else { data_start + header.data_size }; let mut data = Vec::with_capacity(data_end - data_start); reader.read_exact(&mut data)?; samples.get_mut(i).unwrap().data = data; } Ok(Self { header, raw_size, samples, }) } pub fn rebuild(&self, sample: Sample) -> Result<Vec<u8>, Error> { match self.header.mode { SoundFormat::MPEG => Ok(sample.data.unwrap()), #[cfg(feature = "pcm")] SoundFormat::PCM8 => pcm::rebuild(sample, 1), #[cfg(feature = "pcm")] SoundFormat::PCM16 => pcm::rebuild(sample, 2), #[cfg(feature = "pcm")] SoundFormat::PCM32 => pcm::rebuild(sample, 4), _ => Err(Error::RebuildFormat(self.header.mode)), } } }
use byteorder::{LittleEndian, ReadBytesExt}; use std::{ collections::HashMap, convert::{TryFrom, TryInto}, io::{BufRead, Read, Seek, SeekFrom}, }; mod error; pub use error::Error; #[cfg(feature = "pcm")] mod pcm; #[derive(Debug, Copy, Clone)] pub enum SoundFormat { None, PCM8, PCM16, PCM24, PCM32, PCMFloat, GCADPCM, IMAADPCM, VAG, HEVAG, XMA, MPEG, CELT, AT9, XWMA, Vorbis, } impl SoundFormat {
} impl TryFrom<u32> for SoundFormat { type Error = Error; fn try_from(value: u32) -> Result<Self, Self::Error> { match value { 0 => Ok(SoundFormat::None), 1 => Ok(SoundFormat::PCM8), 2 => Ok(SoundFormat::PCM16), 3 => Ok(SoundFormat::PCM24), 4 => Ok(SoundFormat::PCM32), 5 => Ok(SoundFormat::PCMFloat), 6 => Ok(SoundFormat::GCADPCM), 7 => Ok(SoundFormat::IMAADPCM), 8 => Ok(SoundFormat::VAG), 9 => Ok(SoundFormat::HEVAG), 10 => Ok(SoundFormat::XMA), 11 => Ok(SoundFormat::MPEG), 12 => Ok(SoundFormat::CELT), 13 => Ok(SoundFormat::AT9), 14 => Ok(SoundFormat::XWMA), 15 => Ok(SoundFormat::Vorbis), _ => Err(Error::SoundFormat(value)), } } } #[derive(Debug, Copy, Clone)] pub struct FSB5Header { pub id: [u8; 4], pub version: u32, pub num_samples: usize, pub sample_headers_size: usize, pub name_table_size: usize, pub data_size: usize, pub mode: SoundFormat, pub zero: [u8; 8], pub hash: [u8; 16], pub dummy: [u8; 8], pub unknown: u32, pub size: usize, } impl FSB5Header { fn read<R: Read + Seek>(reader: &mut R) -> Result<Self, Error> { let mut id: [u8; 4] = [0; 4]; reader.read_exact(&mut id)?; let version = reader.read_u32::<LittleEndian>()?; let num_samples = reader.read_u32::<LittleEndian>()? as usize; let sample_headers_size = reader.read_u32::<LittleEndian>()? as usize; let name_table_size = reader.read_u32::<LittleEndian>()? as usize; let data_size = reader.read_u32::<LittleEndian>()? as usize; let mode = reader.read_u32::<LittleEndian>()?; let mut zero = [0; 8]; reader.read_exact(&mut zero)?; let mut hash = [0; 16]; reader.read_exact(&mut hash)?; let mut dummy = [0; 8]; reader.read_exact(&mut dummy)?; let unknown = match version { 0 => reader.read_u32::<LittleEndian>()?, _ => 0, }; let mode = mode.try_into()?; let size = reader.seek(SeekFrom::Current(0))? as usize; Ok(Self { id, version, num_samples, sample_headers_size, name_table_size, data_size, mode, zero, hash, dummy, unknown, size, }) } } #[derive(Debug, Clone)] pub struct Sample { pub name: String, pub frequency: u32, pub channels: u64, pub data_offset: usize, pub samples: usize, pub metadata: HashMap<u64, MetadataChunk>, pub data: Vec<u8>, } #[derive(Debug, Clone)] pub enum MetadataChunk { Channels(u8), Frequency(u32), Loop(u32, u32), XMASeek(Vec<u8>), DSPCOEFF(Vec<u8>), XWMAData(Vec<u8>), VorbisData { crc32: u32, unknown: Vec<u8> }, } impl MetadataChunk { fn read<R: Read>(reader: &mut R, chunk_size: usize, chunk_type: u64) -> Result<Self, Error> { match chunk_type { 1 => { let channels = reader.read_u8()?; Ok(MetadataChunk::Channels(channels)) } 2 => { let frequency = reader.read_u32::<LittleEndian>()?; Ok(MetadataChunk::Frequency(frequency)) } 3 => { let loop_tuple = ( reader.read_u32::<LittleEndian>()?, reader.read_u32::<LittleEndian>()?, ); Ok(MetadataChunk::Loop(loop_tuple.0, loop_tuple.1)) } 6 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::XMASeek(data.to_vec())) } 7 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::DSPCOEFF(data.to_vec())) } 10 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::XWMAData(data.to_vec())) } 11 => { let crc32 = reader.read_u32::<LittleEndian>()?; let mut unknown = vec![0; chunk_size]; reader.read_exact(&mut unknown)?; Ok(MetadataChunk::VorbisData { crc32, unknown: unknown.to_vec(), }) } _ => Err(Error::MetadataChunkType(chunk_type)), } } } fn bits(val: u64, start: u64, len: u64) -> u64 { let stop = start + len; let r = val & ((1 << stop) - 1); r >> start } #[derive(Debug, Clone)] pub struct FSB5 { pub header: FSB5Header, pub raw_size: usize, pub samples: Vec<Sample>, } impl FSB5 { pub fn read<R: BufRead + Seek>(mut reader: R) -> Result<Self, Error> { let mut magic = [0; 4]; reader.read_exact(&mut magic)?; if magic != *b"FSB5" { return Err(Error::MagicHeader(magic)); } reader.seek(SeekFrom::Start(0))?; let header = FSB5Header::read(&mut reader)?; let raw_size = header.size + header.sample_headers_size + header.name_table_size + header.data_size; let mut samples = Vec::with_capacity(header.num_samples); for i in 0..header.num_samples { let mut raw = reader.read_u64::<LittleEndian>()?; let mut next_chunk = bits(raw, 0, 1); let mut frequency = bits(raw, 1, 4) as u32; let channels = bits(raw, 1 + 4, 1) + 1; let data_offset = (bits(raw, 1 + 4 + 1, 28) * 16) as usize; let self_samples = bits(raw, 1 + 4 + 1 + 28, 30) as usize; let mut chunks = HashMap::new(); while next_chunk != 0 { raw = reader.read_u32::<LittleEndian>()? as u64; next_chunk = bits(raw, 0, 1); let chunk_size = bits(raw, 1, 24) as usize; let chunk_type = bits(raw, 1 + 24, 7); let chunk_data = match MetadataChunk::read(&mut reader, chunk_size, chunk_type) { Ok(cd) => cd, Err(e) => match e { Error::MetadataChunkType(_) => { eprintln!("{}", e); continue; } _ => return Err(e), }, }; chunks.insert(chunk_type, chunk_data); } if let Some(MetadataChunk::Frequency(f)) = chunks.get(&2) { frequency = *f; } else { frequency = match frequency { 1 => 8000, 2 => 11000, 3 => 11025, 4 => 16000, 5 => 22050, 6 => 24000, 7 => 32000, 8 => 44100, 9 => 48000, _ => { return Err(Error::Frequency(frequency)); } } } samples.push(Sample { name: format!("{}", i), frequency, channels, data_offset, samples: self_samples, metadata: chunks, data: Vec::new(), }); } if header.name_table_size > 0 { let nametable_start = reader.seek(SeekFrom::Current(0))? as usize; let mut samplename_offsets = vec![0; header.num_samples]; for i in samplename_offsets.iter_mut() { *i = reader.read_u32::<LittleEndian>()? as usize; } for (i, sample) in samples.iter_mut().enumerate() { reader.seek(SeekFrom::Start( (nametable_start + samplename_offsets[i]) as u64, ))?; let mut name = Vec::new(); reader.read_until(0, &mut name)?; sample.name = String::from_utf8(name).map_err(|_| Error::NameTable(i))?; } } reader.seek(SeekFrom::Start( (header.size + header.sample_headers_size + header.name_table_size) as u64, ))?; for i in 0..header.num_samples { let data_start = samples.get(i).unwrap().data_offset; let data_end = if i < header.num_samples - 1 { samples.get(i + 1).unwrap().data_offset } else { data_start + header.data_size }; let mut data = Vec::with_capacity(data_end - data_start); reader.read_exact(&mut data)?; samples.get_mut(i).unwrap().data = data; } Ok(Self { header, raw_size, samples, }) } pub fn rebuild(&self, sample: Sample) -> Result<Vec<u8>, Error> { match self.header.mode { SoundFormat::MPEG => Ok(sample.data.unwrap()), #[cfg(feature = "pcm")] SoundFormat::PCM8 => pcm::rebuild(sample, 1), #[cfg(feature = "pcm")] SoundFormat::PCM16 => pcm::rebuild(sample, 2), #[cfg(feature = "pcm")] SoundFormat::PCM32 => pcm::rebuild(sample, 4), _ => Err(Error::RebuildFormat(self.header.mode)), } } }
pub fn file_extension(self) -> &'static str { match self { SoundFormat::MPEG => "mp3", SoundFormat::Vorbis => "ogg", SoundFormat::PCM8 | SoundFormat::PCM16 | SoundFormat::PCM32 => "wav", _ => "bin", } }
function_block-full_function
[ { "content": "pub fn rebuild(sample: Sample, width: u16) -> Result<Vec<u8>, Error> {\n\n let data = &sample.data.unwrap()[..(sample.samples * width as usize)];\n\n let mut writer = BufWriter::new(Cursor::new(Vec::new()));\n\n\n\n let spec = WavSpec {\n\n channels: sample.channels as u16,\n\n sample_rate: sample.frequency,\n\n bits_per_sample: width,\n\n sample_format: SampleFormat::Int,\n\n };\n\n {\n\n let mut chunk_writer = ChunksWriter::new(&mut writer)?;\n\n chunk_writer.write_fmt(spec)?;\n\n {\n\n let mut embedded_writer = chunk_writer.start_chunk(*b\"data\")?;\n\n embedded_writer.write_all(data)?;\n\n }\n\n chunk_writer.finalize()?;\n\n }\n\n\n\n Ok(writer.into_inner().unwrap().into_inner())\n\n}\n", "file_path": "fsb5/src/pcm.rs", "rank": 0, "score": 42873.93132757013 }, { "content": "enum CompressionType {\n\n None,\n\n LZMA,\n\n LZ4,\n\n LZ4HC,\n\n LZ4AM,\n\n}\n\n\n\nimpl TryFrom<u32> for CompressionType {\n\n type Error = Error;\n\n\n\n fn try_from(value: u32) -> Result<Self, Self::Error> {\n\n match value {\n\n 0 => Ok(CompressionType::None),\n\n 1 => Ok(CompressionType::LZMA),\n\n 2 => Ok(CompressionType::LZ4),\n\n 3 => Ok(CompressionType::LZ4HC),\n\n 4 => Ok(CompressionType::LZ4AM),\n\n _ => Err(Error::CompressionType(value)),\n\n }\n\n }\n\n}\n", "file_path": "src/enums.rs", "rank": 1, "score": 33458.67593260722 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum Signature {\n\n Raw,\n\n Web,\n\n FS,\n\n}\n\n\n\nimpl TryFrom<Vec<u8>> for Signature {\n\n type Error = Error;\n\n\n\n fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {\n\n match value.as_slice() {\n\n b\"UnityRaw\" => Ok(Signature::Raw),\n\n b\"UnityWeb\" => Ok(Signature::Web),\n\n b\"UnityFS\" => Ok(Signature::FS),\n\n _ => Err(Error::Signature(value)),\n\n }\n\n }\n\n}\n\n\n\nimpl AssetBundle {\n", "file_path": "src/assetbundle.rs", "rank": 2, "score": 28600.625497854162 }, { "content": "#[derive(Debug, Clone)]\n\nenum AssetBundleHeader {\n\n FS {\n\n signature: Signature,\n\n format_version: i32,\n\n unity_version: String,\n\n generator_version: String,\n\n file_size: usize,\n\n ciblock_size: usize,\n\n uiblock_size: usize,\n\n },\n\n Raw {\n\n signature: Signature,\n\n format_version: i32,\n\n unity_version: String,\n\n generator_version: String,\n\n file_size: usize,\n\n header_size: usize,\n\n file_count: usize,\n\n bundle_count: usize,\n\n bundle_size: Option<usize>,\n\n uncompressed_bundle_size: Option<usize>,\n\n compressed_file_size: Option<usize>,\n\n asset_header_size: Option<usize>,\n\n name: String,\n\n },\n\n}\n\n\n", "file_path": "src/assetbundle.rs", "rank": 3, "score": 26440.822768830592 }, { "content": "use crate::Error;\n\nuse std::convert::TryFrom;\n\n\n", "file_path": "src/enums.rs", "rank": 4, "score": 19449.658034547305 }, { "content": "use std::{io, string::FromUtf8Error};\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum Error {\n\n #[error(\"File does not start with b\\\"Unity\\\": `{0:?}`\")]\n\n Magic([u8; 5]),\n\n\n\n #[error(\"Unrecognized file signature {0:?}\")]\n\n Signature(Vec<u8>),\n\n\n\n #[error(\"Expected compression type in the [1, 5[ range but got `{0}`\")]\n\n CompressionType(u32),\n\n\n\n #[error(\"IO error\")]\n\n IO(#[from] io::Error),\n\n\n\n #[error(\"Invalid UTF-8\")]\n\n UTF8(#[from] FromUtf8Error),\n\n}\n", "file_path": "src/error.rs", "rank": 5, "score": 19449.39591570969 }, { "content": "use crate::{Error, Sample};\n\nuse hound::{ChunksWriter, SampleFormat, WavSpec};\n\nuse std::io::{BufWriter, Cursor, Write};\n\n\n", "file_path": "fsb5/src/pcm.rs", "rank": 6, "score": 18430.06707206707 }, { "content": "use crate::SoundFormat;\n\nuse std::io;\n\nuse thiserror::Error;\n\n\n\n#[cfg(feature = \"pcm\")]\n\nuse hound;\n\n\n\n#[derive(Error, Debug)]\n\npub enum Error {\n\n #[error(\"Expected audio mode in the [0, 16[ range but got `{0}`\")]\n\n SoundFormat(u32),\n\n\n\n #[error(\"Expected metadata chunk type in the [1, 8[ or [10, 12[ range but got `{0}`\")]\n\n MetadataChunkType(u64),\n\n\n\n #[error(\"Expected magic header `FBS5` but got `{0:?}`\")]\n\n MagicHeader([u8; 4]),\n\n\n\n #[error(\"Frequency value `{0}` is not valid and no frequency metadata chunk was provided\")]\n\n Frequency(u32),\n", "file_path": "fsb5/src/error.rs", "rank": 7, "score": 18137.292296745523 }, { "content": "\n\n #[error(\"Non UTF-8 content in name table for sample `{0}`\")]\n\n NameTable(usize),\n\n\n\n #[error(\"Sample to decode did not originate from the FSB archive decoding it\")]\n\n Mismatched,\n\n\n\n #[error(\"Decoding samples of type `{0:?}` is not supported\")]\n\n RebuildFormat(SoundFormat),\n\n\n\n #[error(\"IO error\")]\n\n IO(#[from] io::Error),\n\n\n\n #[cfg(feature = \"pcm\")]\n\n #[error(\"PCM error\")]\n\n PCM(#[from] hound::Error),\n\n}\n", "file_path": "fsb5/src/error.rs", "rank": 8, "score": 18134.404140754574 }, { "content": "use crate::Error;\n\nuse byteorder::{BigEndian, ReadBytesExt};\n\nuse std::convert::TryInto;\n\nuse std::{\n\n convert::TryFrom,\n\n io::{BufRead, Read, Seek, SeekFrom},\n\n};\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/assetbundle.rs", "rank": 12, "score": 11.472381201402936 }, { "content": "mod assetbundle;\n\nmod enums;\n\n\n\nmod error;\n\npub use error::Error;\n", "file_path": "src/lib.rs", "rank": 15, "score": 9.38182054532453 }, { "content": "# fsb5\n\n\n\nRust library and tool to extract FSB5 (FMOD Sample Bank) files\n\n\n\nCode almost identical to [HearthSim/python-fsb5](https://github.com/HearthSim/python-fsb5)\n\n\n\n## Supported formats\n\n\n\n* MPEG\n\n* WAVE (PCM8, PCM16, PCM32)\n\n\n\nVorbis (OGG) planned\n\n\n\n## License\n\n\n\n[MIT](../LICENSE)\n", "file_path": "fsb5/README.md", "rank": 17, "score": 6.750291202734633 }, { "content": " let uiblock_size = reader.read_u32::<BigEndian>()? as usize;\n\n let flags = reader.read_u32::<BigEndian>()?;\n\n let compression = (flags & 0x3F).try_into()?;\n\n let eof_metadata = flags & 0x80;\n\n let mut orig_pos = None;\n\n if eof_metadata != 0 {\n\n orig_pos = Some(reader.seek(SeekFrom::Current(0))?) as u64;\n\n reader.seek(SeekFrom::End(-ciblock_size as i64))?;\n\n }\n\n // TODO\n\n if eof_metadata != 0 {\n\n reader.seek(SeekFrom::Start(orig_pos.unwrap()))?;\n\n }\n\n\n\n Ok(Self)\n\n }\n\n}\n", "file_path": "src/assetbundle.rs", "rank": 20, "score": 4.822147098334334 }, { "content": " fn load<R: BufRead + Seek>(mut reader: R) -> Result<Self, Error> {\n\n let mut magic = [0; 5];\n\n reader.read_exact(&mut magic)?;\n\n if magic != *b\"Unity\" {\n\n return Err(Error::Magic(magic));\n\n }\n\n reader.seek(SeekFrom::Start(0))?;\n\n\n\n let mut signature = Vec::new();\n\n reader.read_until(0, &mut signature);\n\n let signature = signature.try_into()?;\n\n let format_version = reader.read_i32::<BigEndian>()?;\n\n let mut unity_version = Vec::new();\n\n reader.read_until(0, &mut unity_version)?;\n\n let unity_version = String::from_utf8(unity_version)?;\n\n let mut generator_version = Vec::new();\n\n reader.read_until(0, &mut generator_version)?;\n\n let generator_version = String::from_utf8(generator_version)?;\n\n let header = PartialAssetBundleHeader {\n\n signature,\n", "file_path": "src/assetbundle.rs", "rank": 21, "score": 4.547669532046682 }, { "content": "\n\n let mut bundle_size = None;\n\n let mut uncompressed_bundle_size = None;\n\n if header.format_version >= 2 {\n\n bundle_size = Some(reader.read_u32::<BigEndian>()? as usize);\n\n\n\n if header.format_version >= 3 {\n\n uncompressed_bundle_size = Some(reader.read_u32::<BigEndian>()? as usize);\n\n }\n\n }\n\n\n\n let mut compressed_file_size = None;\n\n let mut asset_header_size = None;\n\n if header_size >= 60 {\n\n compressed_file_size = Some(reader.read_u32::<BigEndian>()? as usize);\n\n asset_header_size = Some(reader.read_u32::<BigEndian>()? as usize);\n\n }\n\n\n\n reader.read_i32::<BigEndian>()?;\n\n reader.read_u8()?;\n", "file_path": "src/assetbundle.rs", "rank": 22, "score": 4.121450186490848 }, { "content": " uncompressed_bundle_size,\n\n compressed_file_size,\n\n asset_header_size,\n\n name,\n\n };\n\n let mut assets = Vec::with_capacity(num_assets);\n\n for i in 0..num_assets {\n\n let asset = Asset::read(&header, &mut reader)?;\n\n assets.push(asset);\n\n }\n\n\n\n Ok(Self { header, assets })\n\n }\n\n\n\n fn load_unityfs<R: Read + Seek>(\n\n mut reader: R,\n\n header: AssetBundleHeader,\n\n ) -> Result<Self, Error> {\n\n let file_size = reader.read_i64::<BigEndian>()? as usize;\n\n let ciblock_size = reader.read_u32::<BigEndian>()? as usize;\n", "file_path": "src/assetbundle.rs", "rank": 25, "score": 3.570306986801807 }, { "content": " format_version,\n\n unity_version,\n\n generator_version,\n\n };\n\n\n\n match signature {\n\n Signature::FS => Self::load_raw(reader, header),\n\n Signature::Raw | Signature::Web => Self::load_unityfs(reader, header),\n\n }\n\n }\n\n\n\n fn load_raw<R: BufRead + Seek>(\n\n mut reader: R,\n\n header: PartialAssetBundleHeader,\n\n ) -> Result<Self, Error> {\n\n let file_size = reader.read_u32::<BigEndian>()? as usize;\n\n let header_size = reader.read_i32::<BigEndian>()? as usize;\n\n\n\n let file_count = reader.read_i32::<BigEndian>()? as usize;\n\n let bundle_count = reader.read_i32::<BigEndian>()? as usize;\n", "file_path": "src/assetbundle.rs", "rank": 27, "score": 3.453665536026512 }, { "content": " let mut name = Vec::new();\n\n reader.read_until(0, &mut name);\n\n let name = String::from_utf8(name)?;\n\n\n\n reader.seek(SeekFrom::Start(header_size as u64))?;\n\n let num_assets = match header.signature {\n\n Signature::Raw => reader.read_i32::<BigEndian>() as usize,\n\n Signature::Web => 1,\n\n _ => unreachable!(),\n\n };\n\n let header = AssetBundleHeader::Raw {\n\n signature: header.signature,\n\n format_version: header.format_version,\n\n unity_version: header.unity_version,\n\n generator_version: header.generator_version,\n\n file_size,\n\n header_size,\n\n file_count,\n\n bundle_count,\n\n bundle_size,\n", "file_path": "src/assetbundle.rs", "rank": 31, "score": 2.6264368198469192 }, { "content": "#[derive(Debug, Clone)]\n\nstruct AssetBundle {\n\n header: AssetBundleHeader,\n\n assets: Vec<Asset>,\n\n}\n\n\n", "file_path": "src/assetbundle.rs", "rank": 32, "score": 2.3532156456788083 }, { "content": "#[derive(Debug, Clone)]\n\nstruct PartialAssetBundleHeader {\n\n signature: Signature,\n\n format_version: i32,\n\n unity_version: String,\n\n generator_version: String,\n\n}\n\n\n", "file_path": "src/assetbundle.rs", "rank": 33, "score": 2.3532156456788083 } ]
Rust
day18/src/main.rs
theonejb/advent-of-code-20
d0cbefd5ef88cae566df1260750c1c7d7a98a9ec
use std::path::Path; use std::fs::File; use std::io::{BufReader, BufRead}; use std::cmp::Ordering; mod tests; #[derive(Debug, PartialEq, Eq)] enum Operator { Add, Sub, Mul, Div, } impl Ord for Operator { fn cmp(&self, other: &Self) -> Ordering { match self { Operator::Add => match other { Operator::Mul => Ordering::Greater, Operator::Div => Ordering::Greater, Operator::Add => Ordering::Equal, Operator::Sub => Ordering::Equal }, Operator::Sub => match other { Operator::Mul => Ordering::Greater, Operator::Div => Ordering::Greater, Operator::Add => Ordering::Equal, Operator::Sub => Ordering::Equal }, Operator::Mul => match other { Operator::Mul => Ordering::Equal, Operator::Div => Ordering::Equal, Operator::Add => Ordering::Less, Operator::Sub => Ordering::Less }, Operator::Div => match other { Operator::Mul => Ordering::Equal, Operator::Div => Ordering::Equal, Operator::Add => Ordering::Less, Operator::Sub => Ordering::Less }, } } } impl PartialOrd for Operator { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Operator { pub fn apply(&self, op1: i64, op2: i64) -> i64 { match self { Operator::Add => op1 + op2, Operator::Sub => op1 - op2, Operator::Mul => op1 * op2, Operator::Div => op1 / op2 } } } #[derive(Debug, PartialEq)] enum Token { Operator(Operator), Operand(i64), ParenthesisGroup(Vec<Token>), ParenthesisedToken(String), } /* Returns the next token and the left over string */ fn next_token(input: &str) -> (Token, &str) { let input = input.trim(); if input.starts_with("(") { let input = &input[1..]; let mut parenthesised_group = String::new(); let mut num_parens = 1; for c in input.chars() { match c { ')' => { num_parens -= 1; if num_parens == 0 { break; } } '(' => { num_parens += 1; } _ => {} } parenthesised_group.push(c); } let left_over = input.strip_prefix(&parenthesised_group).unwrap(); let left_over = &left_over[1..]; let left_over = left_over.trim(); return (Token::ParenthesisedToken(parenthesised_group), left_over); } let split: Vec<&str> = input.splitn(2, " ").collect(); let token_str = split[0]; let left_over = if split.len() == 2 { split[1] } else { "" }; let token = match token_str { "+" => Token::Operator(Operator::Add), "-" => Token::Operator(Operator::Sub), "*" => Token::Operator(Operator::Mul), "/" => Token::Operator(Operator::Div), other => Token::Operand( other.parse::<i64>().unwrap() ) }; (token, left_over) } fn parse(input: &str) -> Vec<Token> { let mut input = input; let mut output = vec![]; loop { let (token, left_over) = next_token(input); match token { Token::ParenthesisedToken(new_input) => { output.push( Token::ParenthesisGroup(parse(&new_input)) ); } t => { output.push(t); } } if left_over.is_empty() { break; } input = left_over; } output } fn value_of(token: &Token, calculate: fn(&Vec<Token>) -> i64) -> i64 { match token { Token::Operand(v) => *v, Token::ParenthesisGroup(v) => calculate(v), _ => { panic!("This shouldn't happen."); } } } fn calculate(input: &Vec<Token>) -> i64 { let mut input_iter = input.iter(); let mut first_operand = value_of(input_iter.next().unwrap(), calculate); let mut operator = input_iter.next().unwrap(); let mut second_operand = value_of(input_iter.next().unwrap(), calculate); if let Token::Operator(op) = operator { first_operand = op.apply(first_operand, second_operand); } for token in input_iter { match token { Token::Operator(_) => { operator = token; } _ => { if let Token::Operator(op) = operator { first_operand = op.apply(first_operand, value_of(token, calculate)); } } } } first_operand } fn calculate2(input: &Vec<Token>) -> i64 { let mut output_stack: Vec<&Token> = vec![]; let mut operator_stack: Vec<&Token> = vec![]; for token in input.iter() { match token { Token::Operator(op) => { while !operator_stack.is_empty() { let other_op_token = *operator_stack.last().unwrap(); if let Token::Operator(other_op) = other_op_token { if other_op > op { output_stack.push( operator_stack.pop().unwrap() ); } else { break; } } } operator_stack.push(token); } _ => { output_stack.push(token); } } } while !operator_stack.is_empty() { output_stack.push( operator_stack.pop().unwrap() ); } let mut operand_stack: Vec<i64> = vec![]; for token in output_stack { match token { Token::Operator(op) => { let op1 = operand_stack.pop().unwrap(); let op2 = operand_stack.pop().unwrap(); operand_stack.push( op.apply(op1, op2) ); }, _ => { operand_stack.push( value_of(token, calculate2) ); } } } operand_stack.pop().unwrap() } fn get_input(filename: &str) -> Vec<String> { let p = Path::new(filename); let f = File::open(p).unwrap(); let lines = BufReader::new(f).lines(); let mut input = vec![]; for l in lines { input.push(l.unwrap()); } input } fn main() { let input = get_input("input.txt"); let mut sum = 0; for expression in input.iter() { let parsed_expression = parse(expression.as_str()); sum += calculate(&parsed_expression); } println!("Part 1: {}", sum); let mut sum = 0; for expression in input.iter() { let parsed_expression = parse(expression.as_str()); sum += calculate2(&parsed_expression); } println!("Part 2: {}", sum); }
use std::path::Path; use std::fs::File; use std::io::{BufReader, BufRead}; use std::cmp::Ordering; mod tests; #[derive(Debug, PartialEq, Eq)] enum Operator { Add, Sub, Mul, Div, } impl Ord for Operator { fn cmp(&self, other: &Self) -> Ordering { match self { Operator::Add => match other { Operator::Mul => Ordering::Greater, Operator::Div => Ordering::Greater, Operator::Add => Ordering::Equal, Operator::Sub => Ordering::Equal }, Operator::Sub => match other { Operator::Mul => Ordering::Greater, Operator::Div => Ordering::Greater, Operator::Add => Ordering::Equal, Operator::Sub => Ordering::Equal }, Operator::Mul => match other { Operator::Mul => Ordering::Equal, Operator::Div => Ordering::Equal, Operator::Add => Ordering::Less, Operator::Sub => Ordering::Less }, Operator::Div => match other { Operator::Mul => Ordering::Equal, Operator::Div => Ordering::Equal, Operator::Add => Ordering::Less, Operator::Sub => Ordering::Less }, } } } impl PartialOrd for Operator { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Operator { pub fn apply(&self, op1: i64, op2: i64) -> i64 { match self { Operator::Add => op1 + op2, Operator::Sub => op1 - op2, Operator::Mul => op1 * op2, Operator::Div => op1 / op2 } } } #[derive(Debug, PartialEq)] enum Token { Operator(Operator), Operand(i64), ParenthesisGroup(Vec<Token>), ParenthesisedToken(String), } /* Returns the next token and the left over string */ fn next_token(input: &str) -> (Token, &str) { let input = input.trim(); if input.starts_with("(") { let input = &input[1..]; let mut parenthesised_group = String::new(); let mut num_parens = 1; for c in input.chars() { match c { ')' => { num_parens -= 1; if num_parens == 0 { break; }
r() { let parsed_expression = parse(expression.as_str()); sum += calculate2(&parsed_expression); } println!("Part 2: {}", sum); }
} '(' => { num_parens += 1; } _ => {} } parenthesised_group.push(c); } let left_over = input.strip_prefix(&parenthesised_group).unwrap(); let left_over = &left_over[1..]; let left_over = left_over.trim(); return (Token::ParenthesisedToken(parenthesised_group), left_over); } let split: Vec<&str> = input.splitn(2, " ").collect(); let token_str = split[0]; let left_over = if split.len() == 2 { split[1] } else { "" }; let token = match token_str { "+" => Token::Operator(Operator::Add), "-" => Token::Operator(Operator::Sub), "*" => Token::Operator(Operator::Mul), "/" => Token::Operator(Operator::Div), other => Token::Operand( other.parse::<i64>().unwrap() ) }; (token, left_over) } fn parse(input: &str) -> Vec<Token> { let mut input = input; let mut output = vec![]; loop { let (token, left_over) = next_token(input); match token { Token::ParenthesisedToken(new_input) => { output.push( Token::ParenthesisGroup(parse(&new_input)) ); } t => { output.push(t); } } if left_over.is_empty() { break; } input = left_over; } output } fn value_of(token: &Token, calculate: fn(&Vec<Token>) -> i64) -> i64 { match token { Token::Operand(v) => *v, Token::ParenthesisGroup(v) => calculate(v), _ => { panic!("This shouldn't happen."); } } } fn calculate(input: &Vec<Token>) -> i64 { let mut input_iter = input.iter(); let mut first_operand = value_of(input_iter.next().unwrap(), calculate); let mut operator = input_iter.next().unwrap(); let mut second_operand = value_of(input_iter.next().unwrap(), calculate); if let Token::Operator(op) = operator { first_operand = op.apply(first_operand, second_operand); } for token in input_iter { match token { Token::Operator(_) => { operator = token; } _ => { if let Token::Operator(op) = operator { first_operand = op.apply(first_operand, value_of(token, calculate)); } } } } first_operand } fn calculate2(input: &Vec<Token>) -> i64 { let mut output_stack: Vec<&Token> = vec![]; let mut operator_stack: Vec<&Token> = vec![]; for token in input.iter() { match token { Token::Operator(op) => { while !operator_stack.is_empty() { let other_op_token = *operator_stack.last().unwrap(); if let Token::Operator(other_op) = other_op_token { if other_op > op { output_stack.push( operator_stack.pop().unwrap() ); } else { break; } } } operator_stack.push(token); } _ => { output_stack.push(token); } } } while !operator_stack.is_empty() { output_stack.push( operator_stack.pop().unwrap() ); } let mut operand_stack: Vec<i64> = vec![]; for token in output_stack { match token { Token::Operator(op) => { let op1 = operand_stack.pop().unwrap(); let op2 = operand_stack.pop().unwrap(); operand_stack.push( op.apply(op1, op2) ); }, _ => { operand_stack.push( value_of(token, calculate2) ); } } } operand_stack.pop().unwrap() } fn get_input(filename: &str) -> Vec<String> { let p = Path::new(filename); let f = File::open(p).unwrap(); let lines = BufReader::new(f).lines(); let mut input = vec![]; for l in lines { input.push(l.unwrap()); } input } fn main() { let input = get_input("input.txt"); let mut sum = 0; for expression in input.iter() { let parsed_expression = parse(expression.as_str()); sum += calculate(&parsed_expression); } println!("Part 1: {}", sum); let mut sum = 0; for expression in input.ite
random
[ { "content": "fn get_input(rules_filename: &str, data_filename: &str) -> (Vec<String>, Vec<String>) {\n\n let f = File::open(Path::new(rules_filename)).unwrap();\n\n let mut rules = vec![];\n\n for line in BufReader::new(f).lines() {\n\n rules.push(line.unwrap());\n\n }\n\n\n\n let f = File::open(Path::new(data_filename)).unwrap();\n\n let mut data = vec![];\n\n for line in BufReader::new(f).lines() {\n\n data.push(line.unwrap());\n\n }\n\n\n\n (rules, data)\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 5, "score": 192564.00294348435 }, { "content": "fn get_input(filename: &str) -> Vec<i64> {\n\n let path = Path::new(filename);\n\n let file = File::open(path).unwrap();\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut input_numbers = vec![];\n\n for line in lines {\n\n let line = line.unwrap();\n\n let number = line.parse::<i64>().unwrap();\n\n input_numbers.push(number);\n\n }\n\n\n\n input_numbers\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 6, "score": 187426.12034655595 }, { "content": "fn get_input(filename: &str) -> Vec<String> {\n\n let f = File::open(Path::new(filename)).unwrap();\n\n let lines = BufReader::new(f).lines();\n\n\n\n let mut input = vec![];\n\n\n\n for l in lines {\n\n input.push(l.unwrap());\n\n }\n\n\n\n input\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 7, "score": 187077.36607521013 }, { "content": "fn get_input(filename: &str) -> Vec<String> {\n\n let p = Path::new(filename);\n\n let f = File::open(p).unwrap();\n\n let lines = BufReader::new(f).lines();\n\n\n\n let mut input = vec![];\n\n for line in lines {\n\n input.push(line.unwrap());\n\n }\n\n\n\n input\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 8, "score": 187077.36607521013 }, { "content": "fn get_input(filename: &str) -> Vec<String> {\n\n let file_path = Path::new(filename);\n\n let file = File::open(file_path);\n\n let file = file.unwrap();\n\n\n\n let reader = BufReader::new(file);\n\n let lines = reader.lines();\n\n\n\n let mut instructions: Vec<String> = vec![];\n\n for line in lines {\n\n let line = line.unwrap();\n\n if line.len() > 0 {\n\n instructions.push(String::from(line));\n\n }\n\n }\n\n\n\n return instructions;\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 10, "score": 187077.36607521013 }, { "content": "fn get_input(filename: &str) -> Vec<String> {\n\n let path = Path::new(filename);\n\n let file = File::open(path).unwrap();\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut input = vec![];\n\n for line in lines {\n\n input.push(line.unwrap());\n\n }\n\n\n\n input\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 11, "score": 187077.36607521013 }, { "content": "fn get_input(filename: &str) -> Vec<String> {\n\n let path = Path::new(filename);\n\n let file = File::open(path).unwrap();\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut input = vec![];\n\n for line in lines {\n\n input.push(line.unwrap());\n\n }\n\n\n\n input\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 12, "score": 187077.36607521013 }, { "content": "fn read_input_lines(filename: &str) -> Vec<String> {\n\n let path = Path::new(filename);\n\n let file = File::open(path).unwrap();\n\n\n\n let mut lines = vec![];\n\n for line in BufReader::new(file).lines() {\n\n let line = line.unwrap();\n\n lines.push(line);\n\n }\n\n\n\n lines\n\n}\n\n\n", "file_path": "day7/src/main.rs", "rank": 13, "score": 183401.557788189 }, { "content": "fn get_input_groups(filename: &str) -> Vec<Vec<String>> {\n\n let path = Path::new(filename);\n\n let file = File::open(path).unwrap();\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut groups = vec![];\n\n let mut current_group = vec![];\n\n\n\n for line in lines {\n\n let line = line.unwrap();\n\n if line.trim().len() == 0 {\n\n groups.push(current_group);\n\n current_group = vec![];\n\n } else {\n\n current_group.push(line);\n\n }\n\n }\n\n\n\n if current_group.len() > 0 {\n\n groups.push(current_group);\n\n }\n\n\n\n groups\n\n}\n\n\n", "file_path": "day6/src/main.rs", "rank": 14, "score": 175641.58380588613 }, { "content": "fn get_input(filename: &str) -> Input {\n\n let path = Path::new(filename);\n\n let file = File::open(path).unwrap();\n\n let mut lines = BufReader::new(file).lines();\n\n\n\n let first_line = lines.next().unwrap().unwrap();\n\n let second_line = lines.next().unwrap().unwrap();\n\n\n\n let earliest_arrival_timestamp = first_line.parse::<u32>().unwrap();\n\n let busses = notes_to_busses(&second_line[..]);\n\n\n\n Input { earliest_arrival_timestamp, busses }\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 15, "score": 174329.95118514603 }, { "content": "fn get_test_input() -> Vec<i64> {\n\n [\n\n 35,\n\n 20,\n\n 15,\n\n 25,\n\n 47,\n\n 40,\n\n 62,\n\n 55,\n\n 65,\n\n 95,\n\n 102,\n\n 117,\n\n 150,\n\n 182,\n\n 127,\n\n 219,\n\n 299,\n\n 277,\n\n 309,\n\n 576\n\n ].to_vec()\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 16, "score": 172580.12056286214 }, { "content": "fn get_input_lines() -> Vec<String> {\n\n vec![\n\n String::from(\"mxmxvkd kfcds sqjhc nhms (contains dairy, fish)\"),\n\n String::from(\"trh fvjkl sbzzf mxmxvkd (contains dairy)\"),\n\n String::from(\"sqjhc fvjkl (contains soy)\"),\n\n String::from(\"sqjhc mxmxvkd sbzzf (contains fish)\"),\n\n ]\n\n}\n\n\n", "file_path": "day21/src/tests.rs", "rank": 17, "score": 172190.9276283505 }, { "content": "#[test]\n\nfn test_next_token() {\n\n assert_eq!(\n\n next_token(\"3 + 4\"), (Token::Operand(3), \"+ 4\")\n\n );\n\n assert_eq!(\n\n next_token(\"+ 4\"), (Token::Operator(Operator::Add), \"4\")\n\n );\n\n assert_eq!(\n\n next_token(\"4\"), (Token::Operand(4), \"\")\n\n );\n\n\n\n let mut input = \"1 + 2 * 3 + 4 * 5 + 6\";\n\n let mut tokens = vec![];\n\n loop {\n\n let (token, left_over) = next_token(input);\n\n tokens.push(token);\n\n\n\n input = left_over;\n\n if input.len() == 0 {\n\n break;\n", "file_path": "day18/src/tests.rs", "rank": 18, "score": 166595.3693022293 }, { "content": "fn tile_direction_input_to_coords(input: &str) -> HexCoord {\n\n let instructions = parse_tile_directions(input);\n\n let mut current_tile_coordinates = HexCoord::new(0, 0);\n\n\n\n for instruction in instructions.iter() {\n\n current_tile_coordinates = get_neighbour_in_direction(&current_tile_coordinates, instruction);\n\n }\n\n\n\n current_tile_coordinates\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 19, "score": 166396.94049370423 }, { "content": "fn find_weakness(input: &Vec<i64>, invalid_number: i64) -> Vec<i64> {\n\n // We go to input.len - 2 because we know for a fact that no 2 numbers in the input add up\n\n // to the invalid number, so we need at least 3 contiguous numbers\n\n for i in 0..input.len() - 2 {\n\n let mut number_of_inputs_to_consider = 3usize;\n\n loop {\n\n let sum: i64 = input[i..i+number_of_inputs_to_consider].iter().sum();\n\n if sum == invalid_number {\n\n return input[i..i+number_of_inputs_to_consider].to_vec();\n\n } else if sum > invalid_number {\n\n break\n\n } else {\n\n number_of_inputs_to_consider += 1;\n\n }\n\n }\n\n }\n\n\n\n return vec![];\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 20, "score": 165876.22127013112 }, { "content": "fn get_inputs() -> (String, String) {\n\n (\n\n String::from(\"42 29 12 40 47 26 11 39 41 13 8 50 44 33 5 27 10 25 17 1 28 22 6 32 35\"),\n\n String::from(\"19 34 38 21 43 14 23 46 16 3 36 31 37 45 30 15 49 48 24 9 2 18 4 7 20\")\n\n )\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 21, "score": 163782.92725067356 }, { "content": "fn solve1(input: &Vec<String>) -> usize {\n\n let mut flipped_tiles: HashSet<HexCoord> = HashSet::new();\n\n\n\n for instruction in input.iter() {\n\n let tile_coords = tile_direction_input_to_coords(&instruction);\n\n\n\n if flipped_tiles.contains(&tile_coords) {\n\n flipped_tiles.remove(&tile_coords);\n\n } else {\n\n flipped_tiles.insert(tile_coords);\n\n }\n\n }\n\n\n\n flipped_tiles.len()\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 22, "score": 159487.48275310997 }, { "content": "fn find_invalid_number(input: &Vec<i64>, preamble_length: usize) -> i64 {\n\n let mut buffer: VecDeque<i64> = VecDeque::new();\n\n\n\n for input_element in input.iter() {\n\n if buffer.len() < preamble_length {\n\n buffer.push_back(*input_element);\n\n } else {\n\n if !is_valid_number(&mut buffer, *input_element) {\n\n return *input_element;\n\n } else {\n\n buffer.pop_front();\n\n buffer.push_back(*input_element);\n\n }\n\n }\n\n }\n\n\n\n 0\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 23, "score": 158483.59031796758 }, { "content": "fn get_program(filename: &str) -> Vec<String> {\n\n let path = Path::new(filename);\n\n let file = File::open(path).unwrap();\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut program = vec![];\n\n for line in lines {\n\n program.push(line.unwrap());\n\n }\n\n\n\n program\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 24, "score": 156582.6457776041 }, { "content": "fn is_valid_number(component_numbers: &mut VecDeque<i64>, number: i64) -> bool {\n\n let component_numbers = component_numbers.make_contiguous();\n\n for (i, n) in component_numbers[..component_numbers.len() - 1].iter().enumerate() {\n\n for m in component_numbers[i..].iter() {\n\n if m + n == number {\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 25, "score": 153361.20011661877 }, { "content": "fn get_input() -> Vec<String> {\n\n vec![\n\n String::from(\"swsenenwneswnewseswwseswnwsweeswnw\"),\n\n String::from(\"esweeeeneeeneeeweeeenenenee\"),\n\n String::from(\"ewewsewswnewnwnewwwwsew\"),\n\n String::from(\"nwnwnwnwnenwwnwsenwnwnwnwnwnwnw\"),\n\n String::from(\"nwswseswneswseswswneswswseneseswswsenwswse\"),\n\n String::from(\"swswswneswswswswwneseswwswsw\"),\n\n String::from(\"newnwwnwnenenenenwsweenenwnenwnenese\"),\n\n String::from(\"senwsewseneneseneenenwwneneeswnewsw\"),\n\n String::from(\"eeeeneweeeseeeeeenewneswe\"),\n\n String::from(\"wnwswwewwwsewnwnwwnwnwwwnwnwnww\"),\n\n String::from(\"nenwnwnwnesenwwnwnwwneswnwnenesweneenwnw\"),\n\n String::from(\"nwwnwwwwenwwswnwwwwwewsesew\"),\n\n String::from(\"swnwswwswswewswseswnwne\"),\n\n String::from(\"wswnewswnwwwwenewsewwwwneswe\"),\n\n String::from(\"nwnwnenenewneneneneneenenesenwneswneswse\"),\n\n String::from(\"nwewsenwnwneseswwwwwwswwswwwwe\"),\n\n String::from(\"nesenwseneswseswnwneseseseseseswnwse\"),\n\n String::from(\"wnesweseswenwnenwnweseeseseeswnwse\"),\n", "file_path": "day24/src/main.rs", "rank": 26, "score": 145107.9420355527 }, { "content": "#[test]\n\nfn test_rule_match_do_match() {\n\n let rule_book = RuleBook::new();\n\n\n\n let (rule_id, rule) = Rule::from_input(\"1: \\\"a\\\"\");\n\n let mut printer = NestedPrinter::new();\n\n\n\n assert!(rule.do_match(&rule_book, \"a\", &mut printer).is_some());\n\n assert!(rule.do_match(&rule_book, \"b\", &mut printer).is_none());\n\n}\n\n\n", "file_path": "day19/src/tests.rs", "rank": 28, "score": 139334.09366867784 }, { "content": "fn get_input(filename: &str) -> Map {\n\n let input_path = Path::new(filename);\n\n let f = File::open(input_path).expect(\"Unable to open file\");\n\n let lines = BufReader::new(f).lines();\n\n\n\n let mut grid: Grid = vec![];\n\n for line in lines {\n\n let line = line.unwrap();\n\n let mut row = vec![];\n\n\n\n for terrain in line.chars() {\n\n row.push(terrain);\n\n }\n\n\n\n grid.push(row);\n\n }\n\n\n\n Map {\n\n pattern_width: grid[0].len(),\n\n height: grid.len(),\n\n grid,\n\n }\n\n}\n\n\n", "file_path": "day3/src/main.rs", "rank": 29, "score": 138268.94609658114 }, { "content": "#[test]\n\nfn test_rule_do_match() {\n\n let test_input = vec![\n\n String::from(\"0: 1 2\"),\n\n String::from(\"1: \\\"a\\\"\"),\n\n String::from(\"2: 1 3 | 3 1\"),\n\n String::from(\"3: \\\"b\\\"\"),\n\n ];\n\n\n\n let mut rule_book = RuleBook::new();\n\n for rule_pattern in test_input.iter() {\n\n let (rule_id, rule) = Rule::from_input(&rule_pattern);\n\n rule_book.insert(rule_id, rule);\n\n }\n\n\n\n let rule = rule_book.get(&0).unwrap();\n\n let mut printer = NestedPrinter::new();\n\n\n\n assert!(rule.do_match(&rule_book, \"aab\", &mut printer).is_some());\n\n assert!(rule.do_match(&rule_book, \"aba\", &mut printer).is_some());\n\n assert!(rule.do_match(&rule_book, \"abb\", &mut printer).is_none());\n\n}\n\n\n", "file_path": "day19/src/tests.rs", "rank": 30, "score": 132512.73370245183 }, { "content": "#[test]\n\nfn test_next_number() {\n\n let mut game = MemoryGame::new(&[0,3,6]);\n\n\n\n for expected_number in [0, 3, 6, 0, 3, 3, 1, 0, 4, 0].iter() {\n\n let next_number = game.next_number();\n\n // println!(\"Turn: {}; Number: {}\", turn+1, next_number);\n\n assert_eq!(next_number, *expected_number);\n\n }\n\n}\n\n\n", "file_path": "day15/src/tests.rs", "rank": 31, "score": 132488.5165816702 }, { "content": "fn read_input(filename: &str) -> Vec<Tile> {\n\n let mut tiles = vec![];\n\n\n\n let f = File::open(Path::new(filename)).unwrap();\n\n let mut current_tile_input = vec![];\n\n let mut current_tile_id = 0;\n\n\n\n for line in BufReader::new(f).lines() {\n\n let line = line.unwrap();\n\n\n\n if line.starts_with(\"Tile \") {\n\n let line_parts: Vec<&str> = line.split(\" \").collect();\n\n let id_part = line_parts[1];\n\n let id = &id_part[..id_part.len() - 1];\n\n let id = id.parse::<u32>().unwrap();\n\n current_tile_id = id;\n\n } else if line.is_empty() {\n\n tiles.push(\n\n Tile::from_input(current_tile_id, &current_tile_input)\n\n );\n", "file_path": "day20/src/main.rs", "rank": 32, "score": 132204.57182290882 }, { "content": "fn get_input(filename: &str) -> Vec<i32> {\n\n let path = Path::new(filename);\n\n let file = File::open(path).unwrap();\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut input: Vec<i32> = vec![];\n\n for line in lines {\n\n let line = line.unwrap();\n\n let input_element = line.parse().unwrap();\n\n input.push(input_element);\n\n }\n\n\n\n input\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 33, "score": 132204.57182290882 }, { "content": "#[test]\n\nfn test_deck_from_input() {\n\n let mut deck = Deck::from_input(\"9 2 6 3 1\");\n\n assert_eq!(deck.cards.pop_back().unwrap(), 1);\n\n assert_eq!(deck.cards.pop_back().unwrap(), 3);\n\n assert_eq!(deck.cards.pop_back().unwrap(), 6);\n\n assert_eq!(deck.cards.pop_back().unwrap(), 2);\n\n assert_eq!(deck.cards.pop_back().unwrap(), 9);\n\n assert!(deck.cards.is_empty());\n\n}\n\n\n", "file_path": "day22/src/tests.rs", "rank": 34, "score": 132063.19150306872 }, { "content": "#[test]\n\nfn test_rule_from_input() {\n\n let (rule_id, rule) = Rule::from_input(\"0: \\\"z\\\"\");\n\n assert_eq!(rule_id, 0);\n\n assert_eq!(rule, Rule::Match('z'));\n\n\n\n let (rule_id, rule) = Rule::from_input(\"10: 1 2\");\n\n assert_eq!(rule_id, 10);\n\n assert_eq!(\n\n rule,\n\n Rule::Chain(vec![1, 2])\n\n );\n\n\n\n let (rule_id, rule) = Rule::from_input(\"50: 10\");\n\n assert_eq!(rule_id, 50);\n\n assert_eq!(\n\n rule,\n\n Rule::Chain(vec![10])\n\n );\n\n\n\n let (rule_id, rule) = Rule::from_input(\"1: 2 3 | 3 2\");\n", "file_path": "day19/src/tests.rs", "rank": 35, "score": 132063.19150306872 }, { "content": "#[test]\n\nfn test_game_from_inputs() {\n\n let game = get_sample_game();\n\n\n\n assert_equal(game.current_arrangement.iter(), [8, 9, 1, 2, 5, 4, 6, 7, 3].iter());\n\n\n\n assert_eq!(game.min_cup, 1);\n\n assert_eq!(game.max_cup, 9);\n\n}\n\n\n", "file_path": "day23/src/tests.rs", "rank": 36, "score": 132063.19150306872 }, { "content": "#[test]\n\nfn test_tile_from_input() {\n\n let tile = get_tile_2311();\n\n\n\n assert_eq!(tile.get_pixel_value(&Point::new(0, 0)), Pixel::Black);\n\n assert_eq!(tile.get_pixel_value(&Point::new(0, 1)), Pixel::White);\n\n assert_eq!(tile.get_pixel_value(&Point::new(9, 9)), Pixel::White);\n\n assert_eq!(tile.get_pixel_value(&Point::new(9, 8)), Pixel::Black);\n\n assert_eq!(tile.width, 10);\n\n assert_eq!(tile.height, 10);\n\n}\n\n\n", "file_path": "day20/src/tests.rs", "rank": 37, "score": 132063.19150306872 }, { "content": "#[test]\n\nfn test_food_uses_ingredient() {\n\n let food = Food::from_description(&get_input_lines()[0]);\n\n assert!(food.uses_ingredient(\"kfcds\"));\n\n}\n\n\n", "file_path": "day21/src/tests.rs", "rank": 38, "score": 129462.59883833415 }, { "content": "#[test]\n\nfn test_deck_add_to_bottom() {\n\n let mut deck = Deck::from_input(\"9\");\n\n assert_eq!(deck.next_card(), 9);\n\n\n\n deck.add_to_bottom(10);\n\n deck.add_to_bottom(50);\n\n\n\n\n\n assert_eq!(deck.next_card(), 10);\n\n assert_eq!(deck.next_card(), 50);\n\n\n\n assert!(deck.cards.is_empty());\n\n}\n\n\n", "file_path": "day22/src/tests.rs", "rank": 39, "score": 129449.61480345287 }, { "content": "#[test]\n\nfn test_rule_does_match_completely() {\n\n let test_input = vec![\n\n String::from(\"0: 4 1 5\"),\n\n String::from(\"1: 2 3 | 3 2\"),\n\n String::from(\"2: 4 4 | 5 5\"),\n\n String::from(\"3: 4 5 | 5 4\"),\n\n String::from(\"4: \\\"a\\\"\"),\n\n String::from(\"5: \\\"b\\\"\"),\n\n ];\n\n\n\n let mut rule_book = RuleBook::new();\n\n for rule_pattern in test_input.iter() {\n\n let (rule_id, rule) = Rule::from_input(&rule_pattern);\n\n rule_book.insert(rule_id, rule);\n\n }\n\n\n\n let rule = rule_book.get(&0).unwrap();\n\n\n\n assert!(rule.does_match_completely(&rule_book, \"ababbb\"));\n\n assert!(rule.does_match_completely(&rule_book, \"abbbab\"));\n\n assert!(!rule.does_match_completely(&rule_book, \"bababa\"));\n\n assert!(!rule.does_match_completely(&rule_book, \"aaabbb\"));\n\n assert!(!rule.does_match_completely(&rule_book, \"aaaabbb\"));\n\n}", "file_path": "day19/src/tests.rs", "rank": 40, "score": 129402.11215190635 }, { "content": "#[test]\n\nfn test_rule_chain_do_match() {\n\n let mut rule_book = RuleBook::new();\n\n\n\n let (_, rule_1) = Rule::from_input(\"1: 2 3\");\n\n rule_book.insert(1, rule_1);\n\n\n\n let (_, rule_2) = Rule::from_input(\"2: \\\"a\\\"\");\n\n rule_book.insert(2, rule_2);\n\n\n\n let (_, rule_3) = Rule::from_input(\"3: \\\"b\\\"\");\n\n rule_book.insert(3, rule_3);\n\n\n\n let rule_1 = rule_book.get(&1).unwrap();\n\n let mut printer = NestedPrinter::new();\n\n\n\n assert!(rule_1.do_match(&rule_book, \"ab\", &mut printer).is_some());\n\n assert!(rule_1.do_match(&rule_book, \"ba\", &mut printer).is_none());\n\n}\n\n\n", "file_path": "day19/src/tests.rs", "rank": 41, "score": 129402.11215190635 }, { "content": "#[test]\n\nfn test_rule_options_do_match() {\n\n let test_input = vec![\n\n String::from(\"0: 1 2\"),\n\n String::from(\"1: \\\"a\\\"\"),\n\n String::from(\"2: 1 3 | 3 1\"),\n\n String::from(\"3: \\\"b\\\"\"),\n\n ];\n\n\n\n let mut rule_book = RuleBook::new();\n\n for rule_pattern in test_input.iter() {\n\n let (rule_id, rule) = Rule::from_input(&rule_pattern);\n\n rule_book.insert(rule_id, rule);\n\n }\n\n\n\n let options_rule = rule_book.get(&2).unwrap();\n\n let mut printer = NestedPrinter::new();\n\n\n\n assert!(options_rule.do_match(&rule_book, \"ab\", &mut printer).is_some());\n\n assert!(options_rule.do_match(&rule_book, \"ba\", &mut printer).is_some());\n\n assert!(options_rule.do_match(&rule_book, \"aa\", &mut printer).is_none());\n\n}\n\n\n", "file_path": "day19/src/tests.rs", "rank": 42, "score": 129402.11215190635 }, { "content": "#[test]\n\nfn test_get_next_state() {\n\n let waiting_area = waiting_area_with_sample_input();\n\n let (next_state, _) = waiting_area.get_next_state(0, 0);\n\n\n\n assert_eq!(OccupiedSeat, next_state);\n\n}\n\n\n", "file_path": "day11/src/tests.rs", "rank": 43, "score": 129378.70723290154 }, { "content": "#[test]\n\nfn test_deck_next_card() {\n\n let mut deck = Deck::from_input(\"9 2 6 3 1\");\n\n assert_eq!(deck.next_card(), 9);\n\n assert_eq!(deck.next_card(), 2);\n\n assert_eq!(deck.next_card(), 6);\n\n assert_eq!(deck.next_card(), 3);\n\n assert_eq!(deck.next_card(), 1);\n\n assert!(deck.cards.is_empty());\n\n}\n\n\n", "file_path": "day22/src/tests.rs", "rank": 44, "score": 129378.70723290154 }, { "content": "#[test]\n\nfn test_possible_ticket_from_input() {\n\n let possible_ticket = PossibleTicket::from_input(\"7,1,14\");\n\n assert_eq!(possible_ticket.field_values, vec![7, 1, 14]);\n\n}\n\n\n", "file_path": "day16/src/tests.rs", "rank": 45, "score": 128967.64684693907 }, { "content": "#[test]\n\nfn test_grid_new_from_input() {\n\n let input = vec![\n\n String::from(\".#.\"),\n\n String::from(\"..#\"),\n\n String::from(\"###\"),\n\n ];\n\n let mut grid = Grid::new_from_input(&input);\n\n\n\n for _ in 1..=6 {\n\n grid.tick();\n\n }\n\n\n\n assert_eq!(grid.number_of_active_cubes(), 848);\n\n}", "file_path": "day17/src/tests.rs", "rank": 46, "score": 128967.64684693908 }, { "content": "#[test]\n\nfn test_field_config_from_input() {\n\n let input = \"arrival location: 30-542 or 556-960\";\n\n let field_config = FieldConfig::from_input(input);\n\n assert_eq!(field_config.name, \"arrival location\");\n\n}\n\n\n", "file_path": "day16/src/tests.rs", "rank": 47, "score": 128967.64684693907 }, { "content": "#[test]\n\nfn test_ticket_format_from_input() {\n\n let input = vec![\n\n String::from(\"class: 1-3 or 5-7\"),\n\n String::from(\"row: 6-11 or 33-44\"),\n\n String::from(\"seat: 13-40 or 45-50\"),\n\n ];\n\n let ticket_format = TicketFormat::from_input(&input);\n\n\n\n assert_eq!(ticket_format.fields[0].name, \"class\");\n\n assert_eq!(ticket_format.fields[0].valid_ranges[0].0, 1);\n\n assert_eq!(ticket_format.fields[0].valid_ranges[0].1, 3);\n\n assert_eq!(ticket_format.fields[0].valid_ranges[1].0, 5);\n\n assert_eq!(ticket_format.fields[0].valid_ranges[1].1, 7);\n\n\n\n assert_eq!(ticket_format.fields[1].name, \"row\");\n\n assert_eq!(ticket_format.fields[1].valid_ranges[0].0, 6);\n\n assert_eq!(ticket_format.fields[1].valid_ranges[0].1, 11);\n\n assert_eq!(ticket_format.fields[1].valid_ranges[1].0, 33);\n\n assert_eq!(ticket_format.fields[1].valid_ranges[1].1, 44);\n\n\n\n assert_eq!(ticket_format.fields[2].name, \"seat\");\n\n assert_eq!(ticket_format.fields[2].valid_ranges[0].0, 13);\n\n assert_eq!(ticket_format.fields[2].valid_ranges[0].1, 40);\n\n assert_eq!(ticket_format.fields[2].valid_ranges[1].0, 45);\n\n assert_eq!(ticket_format.fields[2].valid_ranges[1].1, 50);\n\n}\n\n\n", "file_path": "day16/src/tests.rs", "rank": 48, "score": 128967.64684693908 }, { "content": "#[test]\n\nfn test_deck_add_winning_cards() {\n\n let mut deck = Deck::from_input(\"9\");\n\n deck.add_winning_cards(10, 50);\n\n\n\n assert_eq!(deck.next_card(), 9);\n\n assert_eq!(deck.next_card(), 10);\n\n assert_eq!(deck.next_card(), 50);\n\n}\n\n\n", "file_path": "day22/src/tests.rs", "rank": 49, "score": 126519.22893531094 }, { "content": "#[test]\n\nfn test_grid_next_state_for_point() {\n\n let mut grid = Grid::new();\n\n grid.update_cube_at_point(&Point(1, 0, 0, 0), CubeStatus::Active);\n\n grid.update_cube_at_point(&Point(2, 1, 0, 0), CubeStatus::Active);\n\n grid.update_cube_at_point(&Point(0, 2, 0, 0), CubeStatus::Active);\n\n grid.update_cube_at_point(&Point(1, 2, 0, 0), CubeStatus::Active);\n\n grid.update_cube_at_point(&Point(2, 2, 0, 0), CubeStatus::Active);\n\n\n\n assert_eq!(\n\n grid.next_state_for_point(&Point(0, 1, 0, 0)),\n\n CubeStatus::Active\n\n );\n\n}\n\n\n", "file_path": "day17/src/tests.rs", "rank": 50, "score": 126450.62231615988 }, { "content": "#[test]\n\nfn test_time_till_next_departure() {\n\n let bus = Bus{id: 7, offset: 0};\n\n assert_eq!(bus.time_till_next_departure(939), 6);\n\n\n\n let bus = Bus{id: 13, offset: 0};\n\n assert_eq!(bus.time_till_next_departure(939), 10);\n\n\n\n let bus = Bus{id: 59, offset: 0};\n\n assert_eq!(bus.time_till_next_departure(939), 5);\n\n\n\n let bus = Bus{id: 31, offset: 0};\n\n assert_eq!(bus.time_till_next_departure(939), 22);\n\n\n\n let bus = Bus{id: 19, offset: 0};\n\n assert_eq!(bus.time_till_next_departure(939), 11);\n\n}\n\n\n", "file_path": "day13/src/test.rs", "rank": 51, "score": 126450.62231615989 }, { "content": "#[test]\n\nfn test_game_select_next_current_cup() {\n\n let mut game = get_sample_game();\n\n game.select_next_current_cup();\n\n assert_eq!(game.get_current_cup(), 8);\n\n}", "file_path": "day23/src/tests.rs", "rank": 52, "score": 123688.3482210654 }, { "content": "fn get_input_iterator(filename: &str) -> Lines<BufReader<File>> {\n\n let file_path = Path::new(filename);\n\n let file = File::open(file_path).expect(\"Unable to open file.\");\n\n\n\n BufReader::new(file).lines()\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 53, "score": 121720.38306891339 }, { "content": "#[test]\n\nfn test_recursive_combat_play_till_winner_with_sub_games() {\n\n let mut game = RecursiveCombatGame::from_inputs(\n\n \"sub-games test\",\n\n \"9 2 6 3 1\",\n\n \"5 8 4 7 10\"\n\n );\n\n assert_eq!(game.play_till_winner(), Player::Two);\n\n}", "file_path": "day22/src/tests.rs", "rank": 54, "score": 118680.49706881105 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq)]\n\nenum Side { Left, Top, Right, Bottom }\n\n\n", "file_path": "day20/src/main.rs", "rank": 55, "score": 112548.98080872874 }, { "content": "fn get_input() -> Vec<TestCase> {\n\n let input_path = Path::new(\"input.txt\");\n\n let f = match File::open(input_path) {\n\n Err(why) => panic!(\"could not open {}: {}\", input_path.display(), why),\n\n Ok(file) => file\n\n };\n\n\n\n let mut test_cases = vec![];\n\n\n\n let lines = BufReader::new(f).lines();\n\n for line_r in lines {\n\n let line = line_r.unwrap();\n\n let input_parts: Vec<&str> = line.split_whitespace().collect();\n\n\n\n let length_specification = input_parts[0];\n\n let character_specification = input_parts[1];\n\n let password = input_parts[2];\n\n\n\n let length_specification_parts: Vec<&str> = length_specification.split('-').collect();\n\n let min_length_string = length_specification_parts[0];\n", "file_path": "day2/src/main.rs", "rank": 56, "score": 112445.53177812052 }, { "content": "fn waiting_area_with_sample_input() -> WaitingArea {\n\n let input = vec![\n\n String::from(\"L.LL.LL.LL\"),\n\n String::from(\"LLLLLLL.LL\"),\n\n String::from(\"L.L.L..L..\"),\n\n String::from(\"LLLL.LL.LL\"),\n\n String::from(\"L.LL.LL.LL\"),\n\n String::from(\"L.LLLLL.LL\"),\n\n String::from(\"..L.L.....\"),\n\n String::from(\"LLLLLLLLLL\"),\n\n String::from(\"L.LLLLLL.L\"),\n\n String::from(\"L.LLLLL.LL\"),\n\n ];\n\n WaitingArea::new(&input)\n\n}\n\n\n", "file_path": "day11/src/tests.rs", "rank": 57, "score": 111429.27181104023 }, { "content": "fn read_all_entries(input_iterator: &mut Lines<BufReader<File>>) -> Vec<PassportEntry> {\n\n let mut entires = vec![];\n\n\n\n loop {\n\n let next_entry = read_entry(input_iterator);\n\n match next_entry {\n\n None => { break; }\n\n Some(passport_entry) => { entires.push(passport_entry) }\n\n }\n\n }\n\n\n\n entires\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 58, "score": 111088.87214093505 }, { "content": "fn play_game_of_combat(player1_deck: &mut Deck, player2_deck: &mut Deck) {\n\n let mut round = 0;\n\n\n\n while !player1_deck.is_empty() && !player2_deck.is_empty() {\n\n round += 1;\n\n\n\n // println!(\"-- Round {} --\", round);\n\n // println!(\"Player 1's deck: {}\", player1_deck);\n\n // println!(\"Player 2's deck: {}\", player2_deck);\n\n\n\n let c1 = player1_deck.next_card();\n\n let c2 = player2_deck.next_card();\n\n\n\n // println!(\"Player 1 plays: {}\", c1);\n\n // println!(\"Player 2 plays: {}\", c2);\n\n\n\n if c1 > c2 {\n\n // println!(\"Player 1 wins this round.\");\n\n player1_deck.add_winning_cards(c1, c2);\n\n } else {\n", "file_path": "day22/src/main.rs", "rank": 59, "score": 108755.53590116499 }, { "content": "fn try_to_fix_program(program: Vec<String>) {\n\n for (i, instruction) in program.iter().enumerate() {\n\n if instruction.starts_with(\"jmp\") {\n\n println!(\"Changing jmp at {} to nop\", i);\n\n let mut program = program.clone();\n\n program[i] = String::from(\"nop 0\");\n\n\n\n if run_program_until_first_loop_or_exit(program) {\n\n break;\n\n }\n\n } else if instruction.starts_with(\"nop\") {\n\n println!(\"Changing nop at {} to jmp\", i);\n\n let mut program = program.clone();\n\n program[i] = String::from(\"nop \").add(&instruction[4..]);\n\n\n\n if run_program_until_first_loop_or_exit(program) {\n\n break;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 61, "score": 104925.99523610988 }, { "content": "fn does_program_loop(cpu: &mut CPU) -> bool {\n\n let mut visited_program_memory_locations = HashSet::new();\n\n\n\n while !cpu.program_finished() {\n\n if visited_program_memory_locations.contains(&cpu.get_instruction_pointer()) {\n\n return true;\n\n }\n\n\n\n visited_program_memory_locations.insert(cpu.get_instruction_pointer());\n\n cpu.next_cycle();\n\n }\n\n\n\n return false;\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 62, "score": 102216.85394116494 }, { "content": "#[test]\n\nfn test_calculate2() {\n\n assert_eq!(\n\n calculate2(&parse(\"3 + 4\")),\n\n 7\n\n );\n\n\n\n assert_eq!(\n\n calculate2(&parse(\"1 + 2 * 3 + 4 * 5 + 6\")),\n\n 231\n\n );\n\n\n\n assert_eq!(\n\n calculate2(&parse(\"1 + (2 * 3) + (4 * (5 + 6))\")),\n\n 51\n\n );\n\n\n\n assert_eq!(\n\n calculate2(&parse(\"2 * 3 + (4 * 5)\")),\n\n 46\n\n );\n", "file_path": "day18/src/tests.rs", "rank": 63, "score": 100477.68297456123 }, { "content": "#[test]\n\nfn test_calculate() {\n\n assert_eq!(\n\n calculate(&parse(\"3 + 4\")),\n\n 7\n\n );\n\n\n\n assert_eq!(\n\n calculate(&parse(\"1 + 2 * 3 + 4 * 5 + 6\")),\n\n 71\n\n );\n\n\n\n assert_eq!(\n\n calculate(&parse(\"1 + (2 * 3) + (4 * (5 + 6))\")),\n\n 51\n\n );\n\n\n\n assert_eq!(\n\n calculate(&parse(\"2 * 3 + (4 * 5)\")),\n\n 26\n\n );\n", "file_path": "day18/src/tests.rs", "rank": 64, "score": 100477.68297456123 }, { "content": "#[test]\n\nfn test_parse() {\n\n itertools::assert_equal(parse(\"3 + 4\"), vec![\n\n Token::Operand(3), Token::Operator(Operator::Add), Token::Operand(4)\n\n ]);\n\n\n\n itertools::assert_equal(parse(\"1 + 2 * 3 + 4 * 5 + 6\"), vec![\n\n Token::Operand(1),\n\n Token::Operator(Operator::Add),\n\n Token::Operand(2),\n\n Token::Operator(Operator::Mul),\n\n Token::Operand(3),\n\n Token::Operator(Operator::Add),\n\n Token::Operand(4),\n\n Token::Operator(Operator::Mul),\n\n Token::Operand(5),\n\n Token::Operator(Operator::Add),\n\n Token::Operand(6)\n\n ]);\n\n\n\n itertools::assert_equal(parse(\"1 + (2 * 3) + (4 * (5 + 6))\"), vec![\n", "file_path": "day18/src/tests.rs", "rank": 65, "score": 100477.68297456123 }, { "content": "#[test]\n\nfn test_tick2() {\n\n let mut waiting_area = waiting_area_with_sample_input();\n\n while waiting_area.tick2(){}\n\n assert_eq!(waiting_area.get_number_of_occupied_seats(), 26);\n\n}", "file_path": "day11/src/tests.rs", "rank": 66, "score": 100477.68297456123 }, { "content": "#[test]\n\nfn test_tick() {\n\n let mut waiting_area = waiting_area_with_sample_input();\n\n assert_eq!(waiting_area.tick(), true);\n\n assert_eq!(waiting_area.tick(), true);\n\n\n\n assert_eq!(waiting_area.map[0][2], EmptySeat);\n\n\n\n assert_eq!(waiting_area.tick(), true);\n\n assert_eq!(waiting_area.map[0][2], OccupiedSeat);\n\n\n\n assert_eq!(waiting_area.tick(), true);\n\n assert_eq!(waiting_area.tick(), true);\n\n assert_eq!(waiting_area.tick(), false);\n\n\n\n assert_eq!(waiting_area.get_number_of_occupied_seats(), 37);\n\n}\n\n\n", "file_path": "day11/src/tests.rs", "rank": 67, "score": 100477.68297456123 }, { "content": "#[test]\n\nfn test_solve1() {\n\n let inputs = vec![\n\n String::from(\"sesenwnenenewseeswwswswwnenewsewsw\"),\n\n String::from(\"neeenesenwnwwswnenewnwwsewnenwseswesw\"),\n\n String::from(\"seswneswswsenwwnwse\"),\n\n String::from(\"nwnwneseeswswnenewneswwnewseswneseene\"),\n\n String::from(\"swweswneswnenwsewnwneneseenw\"),\n\n String::from(\"eesenwseswswnenwswnwnwsewwnwsene\"),\n\n String::from(\"sewnenenenesenwsewnenwwwse\"),\n\n String::from(\"wenwwweseeeweswwwnwwe\"),\n\n String::from(\"wsweesenenewnwwnwsenewsenwwsesesenwne\"),\n\n String::from(\"neeswseenwwswnwswswnw\"),\n\n String::from(\"nenwswwsewswnenenewsenwsenwnesesenew\"),\n\n String::from(\"enewnwewneswsewnwswenweswnenwsenwsw\"),\n\n String::from(\"sweneswneswneneenwnewenewwneswswnese\"),\n\n String::from(\"swwesenesewenwneswnwwneseswwne\"),\n\n String::from(\"enesenwswwswneneswsenwnewswseenwsese\"),\n\n String::from(\"wnwnesenesenenwwnenwsewesewsesesew\"),\n\n String::from(\"nenewswnwewswnenesenwnesewesw\"),\n\n String::from(\"eneswnwswnwsenenwnwnwwseeswneewsenese\"),\n\n String::from(\"neswnwewnwnwseenwseesewsenwsweewe\"),\n\n String::from(\"wseweeenwnesenwwwswnew\"),\n\n ];\n\n assert_eq!(solve1(&inputs), 10);\n\n}", "file_path": "day24/src/tests.rs", "rank": 68, "score": 100477.68297456123 }, { "content": "#[test]\n\nfn test_rotation() {\n\n assert_eq!(Ship::rotate(&North, Left, 90), West, \"North to Left 90\");\n\n assert_eq!(Ship::rotate(&North, Left, 180), South, \"North to Left 180\");\n\n assert_eq!(Ship::rotate(&North, Left, 270), East, \"North to Left 270\");\n\n\n\n assert_eq!(Ship::rotate(&North, Right, 90), East, \"North to Right 90\");\n\n assert_eq!(Ship::rotate(&North, Right, 180), South, \"North to Right 180\");\n\n assert_eq!(Ship::rotate(&North, Right, 270), West, \"North to Right 270\");\n\n\n\n assert_eq!(Ship::rotate(&East, Left, 90), North, \"East to Left 90\");\n\n assert_eq!(Ship::rotate(&East, Left, 180), West, \"East to Left 180\");\n\n assert_eq!(Ship::rotate(&East, Left, 270), South, \"East to Left 270\");\n\n\n\n assert_eq!(Ship::rotate(&East, Right, 90), South, \"East to Right 90\");\n\n assert_eq!(Ship::rotate(&East, Right, 180), West, \"East to Right 180\");\n\n assert_eq!(Ship::rotate(&East, Right, 270), North, \"East to Right 270\");\n\n\n\n assert_eq!(Ship::rotate(&South, Left, 90), East, \"South to Left 90\");\n\n assert_eq!(Ship::rotate(&South, Left, 180), North, \"South to Left 180\");\n\n assert_eq!(Ship::rotate(&South, Left, 270), West, \"South to Left 270\");\n", "file_path": "day12/src/tests.rs", "rank": 69, "score": 100477.68297456123 }, { "content": "#[test]\n\nfn test_nth() {\n\n let mut game = MemoryGame::new(&[0, 3, 6]);\n\n assert_eq!(game.nth(2020), 436);\n\n\n\n let mut game = MemoryGame::new(&[1, 3, 2]);\n\n assert_eq!(game.nth(2020), 1);\n\n\n\n let mut game = MemoryGame::new(&[2, 1, 3]);\n\n assert_eq!(game.nth(2020), 10);\n\n\n\n let mut game = MemoryGame::new(&[1, 2, 3]);\n\n assert_eq!(game.nth(2020), 27);\n\n\n\n let mut game = MemoryGame::new(&[2, 3, 1]);\n\n assert_eq!(game.nth(2020), 78);\n\n\n\n let mut game = MemoryGame::new(&[3, 2, 1]);\n\n assert_eq!(game.nth(2020), 438);\n\n\n\n let mut game = MemoryGame::new(&[3, 1, 2]);\n\n assert_eq!(game.nth(2020), 1836);\n\n}\n\n\n", "file_path": "day15/src/tests.rs", "rank": 70, "score": 100477.68297456123 }, { "content": "fn parse_tile_directions(directions: &str) -> Vec<Direction> {\n\n let mut parsed_directions = vec![];\n\n\n\n let mut iter = directions.chars();\n\n loop {\n\n let c = iter.next();\n\n if c.is_none() {\n\n break;\n\n }\n\n\n\n let c = c.unwrap();\n\n\n\n parsed_directions.push(\n\n match c {\n\n 'e' => Direction::East,\n\n 'w' => Direction::West,\n\n 's' => {\n\n match iter.next().unwrap() {\n\n 'e' => Direction::SouthEast,\n\n _ => Direction::SouthWest\n", "file_path": "day24/src/main.rs", "rank": 71, "score": 99900.56718789891 }, { "content": "#[test]\n\nfn test_nop() {\n\n let mut cpu = CPU::new(vec![]);\n\n cpu.do_nop();\n\n\n\n assert_eq!(cpu.instruction_pointer, 1);\n\n assert_eq!(cpu.accumulator, 0);\n\n}\n\n\n", "file_path": "day8/src/cpu/tests.rs", "rank": 72, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_play_game() {\n\n let mut player1_deck = Deck::from_input(\"9 2 6 3 1\");\n\n let mut player2_deck = Deck::from_input(\"5 8 4 7 10\");\n\n\n\n play_game_of_combat(&mut player1_deck, &mut player2_deck);\n\n}\n\n\n", "file_path": "day22/src/tests.rs", "rank": 73, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_follow_instruction() {\n\n let input = [\n\n \"F10\",\n\n \"N3\",\n\n \"F7\",\n\n \"R90\",\n\n \"F11\",\n\n ];\n\n\n\n let mut ship = Ship::new();\n\n\n\n for instruction in input.iter() {\n\n ship.follow_instruction(&MovementInstruction::compile(*instruction));\n\n }\n\n\n\n assert_eq!(ship.east, 214.0);\n\n assert_eq!(ship.north, -72.0);\n\n\n\n assert_eq!(ship.waypoint.east, 4.0);\n\n assert_eq!(ship.waypoint.north, -10.0);\n\n\n\n assert_eq!(ship.get_manhattan_distance(), 286.0);\n\n}", "file_path": "day12/src/tests.rs", "rank": 74, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_compile_instruction() {\n\n let set_mask = DecoderChip::compile_instruction(\"mask = 00110X11X0000110X0000001000111010X00\");\n\n assert_eq!(set_mask, Instruction::SetMask(String::from(\"00110X11X0000110X0000001000111010X00\")));\n\n\n\n let set_memory = DecoderChip::compile_instruction(\"mem[61385] = 13441\");\n\n assert_eq!(set_memory, Instruction::SetMemory(61385, 13441));\n\n}\n\n\n", "file_path": "day14/src/tests.rs", "rank": 75, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_food_from_description() {\n\n let food = Food::from_description(&get_input_lines()[0]);\n\n assert_eq!(food.ingredients, vec![\n\n String::from(\"mxmxvkd\"),\n\n String::from(\"kfcds\"),\n\n String::from(\"sqjhc\"),\n\n String::from(\"nhms\"),\n\n ]);\n\n assert_eq!(food.allergens, vec![String::from(\"dairy\"), String::from(\"fish\")]);\n\n\n\n let food = Food::from_description(&get_input_lines()[2]);\n\n assert_eq!(food.allergens, vec![String::from(\"soy\")]);\n\n}\n\n\n", "file_path": "day21/src/tests.rs", "rank": 76, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_move_waypoint() {\n\n let waypoint = Waypoint { east:10.0, north: 1.0};\n\n let waypoint = Ship::move_waypoint(&waypoint, North, 3);\n\n assert_eq!(waypoint.east, 10.0);\n\n assert_eq!(waypoint.north, 4.0);\n\n}\n\n\n", "file_path": "day12/src/tests.rs", "rank": 77, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_part_2_nth() {\n\n let mut game = MemoryGame::new(&[0, 3, 6]);\n\n assert_eq!(game.nth(30_000_000), 175594);\n\n\n\n let mut game = MemoryGame::new(&[1, 3, 2]);\n\n assert_eq!(game.nth(30000000), 2578);\n\n\n\n let mut game = MemoryGame::new(&[2, 1, 3]);\n\n assert_eq!(game.nth(30000000), 3544142);\n\n\n\n let mut game = MemoryGame::new(&[1, 2, 3]);\n\n assert_eq!(game.nth(30000000), 261214);\n\n\n\n let mut game = MemoryGame::new(&[2, 3, 1]);\n\n assert_eq!(game.nth(30000000), 6895259);\n\n\n\n let mut game = MemoryGame::new(&[3, 2, 1]);\n\n assert_eq!(game.nth(30000000), 18);\n\n\n\n let mut game = MemoryGame::new(&[3, 1, 2]);\n\n assert_eq!(game.nth(30000000), 362);\n\n}", "file_path": "day15/src/tests.rs", "rank": 78, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_move_ship() {\n\n let mut ship = Ship::new();\n\n\n\n ship.move_ship(&MovementInstruction::compile(\"F10\"));\n\n assert_eq!(ship.east, 10.0);\n\n assert_eq!(ship.north, 0.0);\n\n\n\n ship.move_ship(&MovementInstruction::compile(\"N3\"));\n\n assert_eq!(ship.east, 10.0);\n\n assert_eq!(ship.north, 3.0);\n\n\n\n ship.move_ship(&MovementInstruction::compile(\"F7\"));\n\n assert_eq!(ship.east, 17.0);\n\n assert_eq!(ship.north, 3.0);\n\n\n\n ship.move_ship(&MovementInstruction::compile(\"R90\"));\n\n assert_eq!(ship.direction, South);\n\n assert_eq!(ship.east, 17.0);\n\n assert_eq!(ship.north, 3.0);\n\n\n\n ship.move_ship(&MovementInstruction::compile(\"F11\"));\n\n assert_eq!(ship.east, 17.0);\n\n assert_eq!(ship.north, -8.0);\n\n}\n\n\n", "file_path": "day12/src/tests.rs", "rank": 79, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_point_neighbours() {\n\n let p = Point(0, 0, 0, 0);\n\n let neighbours = p.neighbours();\n\n assert_eq!(neighbours.len(), 80);\n\n}\n\n\n", "file_path": "day17/src/tests.rs", "rank": 80, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_grid_tick() {\n\n let mut grid = Grid::new();\n\n grid.update_cube_at_point(&Point(1, 0, 0, 0), CubeStatus::Active);\n\n grid.update_cube_at_point(&Point(2, 1, 0, 0), CubeStatus::Active);\n\n grid.update_cube_at_point(&Point(0, 2, 0, 0), CubeStatus::Active);\n\n grid.update_cube_at_point(&Point(1, 2, 0, 0), CubeStatus::Active);\n\n grid.update_cube_at_point(&Point(2, 2, 0, 0), CubeStatus::Active);\n\n\n\n for _ in 1..=6 {\n\n grid.tick();\n\n }\n\n\n\n assert_eq!(grid.number_of_active_cubes(), 848);\n\n}\n\n\n", "file_path": "day17/src/tests.rs", "rank": 81, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_compile_mask() {\n\n let mask = \"XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\";\n\n let compiled_mask = DecoderChip::compile_mask(mask);\n\n\n\n assert_eq!(compiled_mask.or_value, 0b1000000, \"Or mask\");\n\n assert_eq!(compiled_mask.and_value,\n\n 0b1111111111111111111111111111111111111111111111111111111111111101,\n\n \"And mask\");\n\n\n\n let compiled_mask = DecoderChip::compile_mask(\"000000000000000000000000000000X1001X\");\n\n assert_eq!(compiled_mask.floating_indices, vec![0, 5]);\n\n}\n\n\n", "file_path": "day14/src/tests.rs", "rank": 82, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_jmp() {\n\n let mut cpu = CPU::new(vec![]);\n\n\n\n cpu.do_jmp(10);\n\n assert_eq!(cpu.instruction_pointer, 10);\n\n\n\n cpu.do_jmp(-5);\n\n assert_eq!(cpu.instruction_pointer, 5);\n\n}", "file_path": "day8/src/cpu/tests.rs", "rank": 83, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_acc() {\n\n let mut cpu = CPU::new(vec![]);\n\n\n\n cpu.do_acc(10);\n\n assert_eq!(cpu.instruction_pointer, 1);\n\n assert_eq!(cpu.accumulator, 10);\n\n\n\n cpu.do_acc(-20);\n\n assert_eq!(cpu.instruction_pointer, 2);\n\n assert_eq!(cpu.accumulator, -10);\n\n}\n\n\n", "file_path": "day8/src/cpu/tests.rs", "rank": 84, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_note_to_busses() {\n\n let busses = notes_to_busses(\"7,13,x,x,59,x,31,19\");\n\n assert_eq!(busses[0].as_ref().unwrap().id, 7);\n\n assert_eq!(busses[1].as_ref().unwrap().id, 13);\n\n assert!(busses[2].is_none());\n\n assert!(busses[3].is_none());\n\n assert_eq!(busses[4].as_ref().unwrap().id, 59);\n\n}\n\n\n", "file_path": "day13/src/test.rs", "rank": 85, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_apply_mask() {\n\n let mask = \"XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\";\n\n let mask = DecoderChip::compile_mask(mask);\n\n\n\n assert_eq!(mask.apply_mask(11), 73);\n\n assert_eq!(mask.apply_mask(101), 101);\n\n assert_eq!(mask.apply_mask(0), 64);\n\n}\n\n\n", "file_path": "day14/src/tests.rs", "rank": 86, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_rotate_waypoint() {\n\n let waypoint = Ship::rotate_waypoint(\n\n &Waypoint { east: 10.0, north: 4.0 },\n\n Right,\n\n 90,\n\n );\n\n\n\n assert_eq!(waypoint.east.round(), 4.0);\n\n assert_eq!(waypoint.north.round(), -10.0);\n\n\n\n let waypoint = Ship::rotate_waypoint(\n\n &waypoint,\n\n Left,\n\n 90\n\n );\n\n assert_eq!(waypoint.east.round(), 10.0);\n\n assert_eq!(waypoint.north.round(), 4.0);\n\n}\n\n\n", "file_path": "day12/src/tests.rs", "rank": 87, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_decoder_chip() {\n\n let program = vec![\n\n String::from(\"mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\"),\n\n String::from(\"mem[8] = 11\"),\n\n String::from(\"mem[7] = 101\"),\n\n String::from(\"mem[8] = 0\"),\n\n ];\n\n let mut chip = DecoderChip::new();\n\n chip.run_program(&program);\n\n assert_eq!(chip.sum_all_memory_values(), 165);\n\n}", "file_path": "day14/src/tests.rs", "rank": 88, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_arrangement_fill_yourself() {\n\n let mut tiles = BunchOfTiles::new();\n\n let input_tiles = read_input(\"test_input.txt\");\n\n for t in input_tiles {\n\n tiles.insert(t);\n\n }\n\n\n\n if let Some(arrangement) = Arrangement::find(3, &tiles) {\n\n println!(\"{}\", arrangement);\n\n }\n\n}", "file_path": "day20/src/tests.rs", "rank": 89, "score": 98393.77113452434 }, { "content": "#[test]\n\nfn test_get_neighbours() {\n\n let waiting_area = waiting_area_with_sample_input();\n\n let neighbours = waiting_area.get_neighbours(0, 0);\n\n let mut neighbours = neighbours.iter();\n\n let expected_output = [Floor, EmptySeat, EmptySeat];\n\n\n\n for to_expect in expected_output.iter() {\n\n assert_eq!(neighbours.next().unwrap(), to_expect);\n\n }\n\n\n\n assert_eq!(neighbours.next(), None);\n\n}\n\n\n", "file_path": "day11/src/tests.rs", "rank": 90, "score": 98393.77113452434 }, { "content": "fn notes_to_busses(note: &str) -> Vec<Option<Bus>> {\n\n let input_busses = note.split(',');\n\n\n\n let mut busses = vec![];\n\n for (offset, bus_id) in input_busses.enumerate() {\n\n if bus_id == \"x\" {\n\n busses.push(None);\n\n } else {\n\n let bus_id = bus_id.parse::<u32>().unwrap();\n\n busses.push(Some(Bus { id: bus_id, offset: offset as u32 }));\n\n }\n\n }\n\n\n\n busses\n\n}\n", "file_path": "day13/src/main.rs", "rank": 91, "score": 97160.03433259173 }, { "content": "fn get_test_input() -> (TicketFormat, PossibleTicket, Vec<PossibleTicket>) {\n\n let format_inputs = vec![\n\n String::from(\"class: 0-1 or 4-19\"),\n\n String::from(\"row: 0-5 or 8-19\"),\n\n String::from(\"seat: 0-13 or 16-19\"),\n\n ];\n\n let ticket_format = TicketFormat::from_input(&format_inputs);\n\n\n\n let your_ticket = PossibleTicket::from_input(\"11,12,13\");\n\n\n\n let nearby_tickets_input = vec![\n\n \"3,9,18\",\n\n \"15,1,5\",\n\n \"5,14,9\",\n\n ];\n\n\n\n let mut nearby_tickets = vec![];\n\n for ticket_input in nearby_tickets_input {\n\n nearby_tickets.push(\n\n PossibleTicket::from_input(ticket_input)\n\n );\n\n }\n\n\n\n (ticket_format, your_ticket, nearby_tickets)\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 92, "score": 96772.95653430448 }, { "content": "fn run_program_until_first_loop_or_exit(program: Vec<String>) -> bool {\n\n let mut cpu = CPU::new(program);\n\n\n\n let does_loop = does_program_loop(&mut cpu);\n\n if does_loop {\n\n println!(\"Value of accumulator right before loop: {}\", cpu.get_accumulator());\n\n } else {\n\n println!(\"Value of accumulator right after exit: {}\", cpu.get_accumulator());\n\n }\n\n\n\n !does_loop\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 93, "score": 96656.73714169963 }, { "content": "#[test]\n\nfn test_compile_and_apply_instruction() {\n\n let mut chip = DecoderChip::new();\n\n\n\n chip.compile_and_apply_instruction(\"mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\");\n\n\n\n chip.compile_and_apply_instruction(\"mem[8] = 11\");\n\n assert_eq!(*chip.set_memory_locations.get(&8).unwrap(), 73);\n\n\n\n chip.compile_and_apply_instruction(\"mem[7] = 101\");\n\n assert_eq!(*chip.set_memory_locations.get(&7).unwrap(), 101);\n\n\n\n chip.compile_and_apply_instruction(\"mem[8] = 0\");\n\n assert_eq!(*chip.set_memory_locations.get(&8).unwrap(), 64);\n\n}\n\n\n", "file_path": "day14/src/tests.rs", "rank": 94, "score": 96427.44259344385 }, { "content": "#[test]\n\nfn test_decode_instruction() {\n\n assert_eq!(CPU::decode_instruction(\"nop\"), Instruction::Nop);\n\n\n\n assert_eq!(CPU::decode_instruction(\"acc +0\"), Instruction::Acc(0));\n\n assert_eq!(CPU::decode_instruction(\"acc -99\"), Instruction::Acc(-99));\n\n\n\n assert_eq!(CPU::decode_instruction(\"jmp -99\"), Instruction::Jmp(-99));\n\n}\n\n\n", "file_path": "day8/src/cpu/tests.rs", "rank": 95, "score": 96427.44259344385 }, { "content": "#[test]\n\nfn test_get_manhattan_distance() {\n\n let input = [\n\n \"F10\",\n\n \"N3\",\n\n \"F7\",\n\n \"R90\",\n\n \"F11\",\n\n ];\n\n\n\n let mut ship = Ship::new();\n\n\n\n for instruction in input.iter() {\n\n ship.move_ship(&MovementInstruction::compile(*instruction));\n\n }\n\n\n\n assert_eq!(25.0, ship.get_manhattan_distance());\n\n}\n\n\n", "file_path": "day12/src/tests.rs", "rank": 96, "score": 96427.44259344385 }, { "content": "#[test]\n\nfn test_scanning_error_rate() {\n\n let input = vec![\n\n String::from(\"class: 1-3 or 5-7\"),\n\n String::from(\"row: 6-11 or 33-44\"),\n\n String::from(\"seat: 13-40 or 45-50\"),\n\n ];\n\n let ticket_format = TicketFormat::from_input(&input);\n\n let tickets = vec![\n\n PossibleTicket::from_input(\"7,3,47\"),\n\n PossibleTicket::from_input(\"40,4,50\"),\n\n PossibleTicket::from_input(\"55,2,20\"),\n\n PossibleTicket::from_input(\"38,6,12\"),\n\n ];\n\n\n\n assert_eq!(scanning_error_rate(&ticket_format, &tickets), 71);\n\n}\n\n\n", "file_path": "day16/src/tests.rs", "rank": 97, "score": 96427.44259344385 }, { "content": "#[test]\n\nfn test_apply_mask_v2() {\n\n let mask = DecoderChip::compile_mask(\"000000000000000000000000000000X1001X\");\n\n let possible_values = mask.apply_mask_v2(42);\n\n let expected_values = vec![26, 27, 58, 59];\n\n\n\n let mut values_set = HashSet::new();\n\n for v in possible_values {\n\n values_set.insert(v);\n\n }\n\n\n\n for v in expected_values.iter() {\n\n assert!(values_set.contains(v));\n\n }\n\n\n\n let mask = DecoderChip::compile_mask(\"00000000000000000000000000000000X0XX\");\n\n let possible_values = mask.apply_mask_v2(26);\n\n let expected_values = vec![16, 17, 18, 19, 24, 25, 26, 27];\n\n\n\n let mut values_set = HashSet::new();\n\n for v in possible_values {\n\n values_set.insert(v);\n\n }\n\n\n\n for v in expected_values.iter() {\n\n assert!(values_set.contains(v));\n\n }\n\n}\n\n\n", "file_path": "day14/src/tests.rs", "rank": 98, "score": 96427.44259344385 }, { "content": "#[test]\n\nfn test_waiting_area_creation() {\n\n let waiting_area = waiting_area_with_sample_input();\n\n assert_eq!(waiting_area.width, 10);\n\n assert_eq!(waiting_area.height, 10);\n\n}\n\n\n", "file_path": "day11/src/tests.rs", "rank": 99, "score": 96427.44259344385 } ]
Rust
src/link/classify.rs
CollinValley/hivemind
89c6e4214fb88d27404f7a48ac5453be807cb21e
use crate::link::utils::task_park::*; use crate::Classifier; use crate::{link::QueueStream, HStream, Link}; use crossbeam::atomic::AtomicCell; use crossbeam::crossbeam_channel; use crossbeam::crossbeam_channel::{Receiver, Sender}; use futures::prelude::*; use futures::ready; use futures::task::{Context, Poll}; use std::marker::PhantomData; use std::pin::Pin; use std::sync::Arc; use tokio::stream::Stream; pub(crate) struct DoClassify<C: Classifier + Send + 'static> { phantom: PhantomData<C>, } impl<C: Classifier + Send + 'static> DoClassify<C> { pub(crate) fn do_classify( input: HStream<C::Packet>, mut classifier: C, cap: Option<usize>, ) -> Link<C::Packet> { let mut senders: Vec<Sender<Option<C::Packet>>> = Vec::new(); let mut receivers: Vec<Receiver<Option<C::Packet>>> = Vec::new(); let mut streams: Vec<HStream<C::Packet>> = Vec::new(); let mut task_parks: Vec<Arc<AtomicCell<TaskParkState>>> = Vec::new(); for _ in 0..classifier.num_ports() { let (sender, receiver) = match cap { None => crossbeam_channel::unbounded::<Option<C::Packet>>(), Some(capacity) => crossbeam_channel::bounded::<Option<C::Packet>>(capacity), }; let task_park = Arc::new(AtomicCell::new(TaskParkState::Empty)); let stream = QueueStream::new(receiver.clone(), Arc::clone(&task_park)); senders.push(sender); streams.push(Box::new(stream)); receivers.push(receiver); task_parks.push(task_park); } let runnable = ClassifyRunnable::new(input, senders, classifier, task_parks); Link::new(vec![Box::new(runnable)], streams) } } pub(crate) struct ClassifyRunnable<C: Classifier> { input_stream: HStream<C::Packet>, to_egressors: Vec<Sender<Option<C::Packet>>>, classifier: C, task_parks: Vec<Arc<AtomicCell<TaskParkState>>>, } impl<C: Classifier> Unpin for ClassifyRunnable<C> {} impl<C: Classifier> ClassifyRunnable<C> { fn new( input_stream: HStream<C::Packet>, to_egressors: Vec<Sender<Option<C::Packet>>>, classifier: C, task_parks: Vec<Arc<AtomicCell<TaskParkState>>>, ) -> Self { ClassifyRunnable { input_stream, to_egressors, classifier, task_parks, } } } impl<C: Classifier> Future for ClassifyRunnable<C> { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { let ingressor = Pin::into_inner(self); loop { for (port, to_egressor) in ingressor.to_egressors.iter().enumerate() { if to_egressor.is_full() { park_and_wake(&ingressor.task_parks[port], cx.waker().clone()); return Poll::Pending; } } let packet_option: Option<C::Packet> = ready!(Pin::new(&mut ingressor.input_stream).poll_next(cx)); match packet_option { None => { for to_egressor in ingressor.to_egressors.iter() { to_egressor .try_send(None) .expect("ClassifyIngressor::Drop: try_send to_egressor shouldn't fail"); } for task_park in ingressor.task_parks.iter() { die_and_wake(&task_park); } return Poll::Ready(()); } Some(packet) => { if let Some(port) = ingressor.classifier.classify(&packet) { if port >= ingressor.to_egressors.len() { panic!("Classifier used port outside of its listed range: Port {}, NumOutputs{}", port, ingressor.classifier.num_ports(), ); } if let Err(err) = ingressor.to_egressors[port].try_send(Some(packet)) { panic!( "Error in to_egressors[{}] sender, have nowhere to put packet: {:?}", port, err ); } unpark_and_wake(&ingressor.task_parks[port]); } } } } } } #[cfg(test)] mod tests { use crate::utils::test::classifier::{even_link, fizz_buzz_link}; use crate::utils::test::harness::{initialize_runtime, test_link}; use crate::utils::test::packet_generators::{immediate_stream, PacketIntervalGenerator}; use crate::Link; use core::time; #[test] fn even_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9]); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0], vec![0, 2, 420, 4, 6, 8]); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn even_odd_wait_between_packets() { let packets = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9]; let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = PacketIntervalGenerator::new(time::Duration::from_millis(10), packets.into_iter()); test_link(even_link(Box::new(packet_generator)), None).await }); assert_eq!(results[0], vec![0, 2, 420, 4, 6, 8]); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn only_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(vec![1, 1337, 3, 5, 7, 9]); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0], []); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn even_odd_long_stream() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..2000); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0].len(), 1000); assert_eq!(results[1].len(), 1000); } #[test] fn fizz_buzz() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..=30); test_link(fizz_buzz_link(packet_generator), None).await }); let expected_fizz_buzz = vec![0, 15, 30]; assert_eq!(results[0], expected_fizz_buzz); let expected_fizz = vec![3, 6, 9, 12, 18, 21, 24, 27]; assert_eq!(results[1], expected_fizz); let expected_buzz = vec![5, 10, 20, 25]; assert_eq!(results[2], expected_buzz); let expected_other = vec![1, 2, 4, 7, 8, 11, 13, 14, 16, 17, 19, 22, 23, 26, 28, 29]; assert_eq!(results[3], expected_other); } #[test] fn fizz_buzz_to_even_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..=30); let (mut fb_runnables, mut fb_egressors) = fizz_buzz_link(packet_generator).take(); let (mut eo_runnables, eo_egressors) = even_link(fb_egressors.pop().unwrap()).take(); fb_runnables.append(&mut eo_runnables); test_link(Link::new(fb_runnables, eo_egressors), None).await }); assert_eq!(results[0], vec![2, 4, 8, 14, 16, 22, 26, 28]); assert_eq!(results[1], vec![1, 7, 11, 13, 17, 19, 23, 29]); } }
use crate::link::utils::task_park::*; use crate::Classifier; use crate::{link::QueueStream, HStream, Link}; use crossbeam::atomic::AtomicCell; use crossbeam::crossbeam_channel; use crossbeam::crossbeam_channel::{Receiver, Sender}; use futures::prelude::*; use futures::ready; use futures::task::{Context, Poll}; use std::marker::PhantomData; use std::pin::Pin; use std::sync::Arc; use tokio::stream::Stream; pub(crate) struct DoClassify<C: Classifier + Send + 'static> { phantom: PhantomData<C>, } impl<C: Classifier + Send + 'static> DoClassify<C> { pub(crate) fn do_classify( input: HStream<C::Packet>, mut classifier: C, cap: Option<usize>, ) -> Link<C::Packet> { let mut senders: Vec<Sender<Option<C::Packet>>> = Vec::new(); let mut receivers: Vec<Receiver<Option<C::Packet>>> = Vec::new(); let mut streams: Vec<HStream<C::Packet>> = Vec::new(); let mut task_parks: Vec<Arc<AtomicCell<TaskParkState>>> = Vec::new(); for _ in 0..classifier.num_ports() { let (sender, receiver) = match cap { None => crossbeam_channel::unbounded::<Option<C::Packet>>(), Some(capacity) => crossbeam_channel::bounded::<Option<C::Packet>>(capacity), }; let task_park = Arc::new(AtomicCell::new(TaskParkState::Empty)); let stream = QueueStream::new(receiver.clone(), Arc::clone(&task_park)); senders.push(sender); streams.push(Box::new(stream)); receivers.push(receiver); task_parks.push(task_park); } let runnable = ClassifyRunnable::new(input, senders, classifier, task_parks); Link::new(vec![Box::new(runnable)], streams) } } pub(crate) struct ClassifyRunnable<C: Classifier> { input_stream: HStream<C::Packet>, to_egressors: Vec<Sender<Option<C::Packet>>>, classifier: C, task_parks: Vec<Arc<AtomicCell<TaskParkState>>>, } impl<C: Classifier> Unpin for ClassifyRunnable<C> {} impl<C: Classifier> ClassifyRunnable<C> { fn new( input_stream: HStream<C::Packet>, to_egressors: Vec<Sender<Option<C::Packet>>>, classifier: C, task_parks: Vec<Arc<AtomicCell<TaskParkState>>>, ) -> Self { ClassifyRunnable { input_stream, to_egressors, classifier, task_parks, } } } impl<C: Classifier> Future for ClassifyRunnable<C> { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { let ingressor = Pin::into_inner(self); loop { for (port, to_egressor) in ingressor.to_egressors.iter().enumerate() { if to_egressor.is_full() { park_and_wake(&ingressor.task_parks[port], cx.waker().clone()); return Poll::Pending; } } let packet_option: Option<C::Packet> = ready!(Pin::new(&mut ingressor.input_stream).poll_next(cx)); match packet_option { None => { for to_egressor in ingressor.to_egressors.iter() { to_egressor .try_send(None) .expect("ClassifyIngressor::Drop: try_send to_egressor shouldn't fail"); } for task_park in ingressor.task_parks.iter() { die_and_wake(&task_park); } return Poll::Ready(()); } Some(packet) => { if let Some(port) = ingressor.classifier.classify(&packet) { if port >= ingressor.to_egressors.len() { panic!("Classifier used port outside of its listed range: Port {}, NumOutputs{}", port, ingressor.classifier.num_ports(), ); } if let Err(err) = ingressor.to_egressors[port].try_send(Some(packet)) { panic!( "Error in to_egressors[{}] sender, have nowhere to put packet: {:?}", port, err ); } unpark_and_wake(&ingressor.task_parks[port]); } } } } } } #[cfg(test)] mod tests { use crate::utils::test::classifier::{even_link, fizz_buzz_link}; use crate::utils::test::harness::{initialize_runtime, test_link}; use crate::utils::test::packet_generators::{immediate_stream, PacketIntervalGenerator}; use crate::Link; use core::time; #[test] fn even_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9]); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0], vec![0, 2, 420, 4, 6, 8]); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn even_odd_wait_between_packets() { let packets = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9]; let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = PacketIntervalGenerator::new(time::Duration::from_millis(10), packets.into_iter());
} #[test] fn fizz_buzz_to_even_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..=30); let (mut fb_runnables, mut fb_egressors) = fizz_buzz_link(packet_generator).take(); let (mut eo_runnables, eo_egressors) = even_link(fb_egressors.pop().unwrap()).take(); fb_runnables.append(&mut eo_runnables); test_link(Link::new(fb_runnables, eo_egressors), None).await }); assert_eq!(results[0], vec![2, 4, 8, 14, 16, 22, 26, 28]); assert_eq!(results[1], vec![1, 7, 11, 13, 17, 19, 23, 29]); } }
test_link(even_link(Box::new(packet_generator)), None).await }); assert_eq!(results[0], vec![0, 2, 420, 4, 6, 8]); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn only_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(vec![1, 1337, 3, 5, 7, 9]); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0], []); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn even_odd_long_stream() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..2000); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0].len(), 1000); assert_eq!(results[1].len(), 1000); } #[test] fn fizz_buzz() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..=30); test_link(fizz_buzz_link(packet_generator), None).await }); let expected_fizz_buzz = vec![0, 15, 30]; assert_eq!(results[0], expected_fizz_buzz); let expected_fizz = vec![3, 6, 9, 12, 18, 21, 24, 27]; assert_eq!(results[1], expected_fizz); let expected_buzz = vec![5, 10, 20, 25]; assert_eq!(results[2], expected_buzz); let expected_other = vec![1, 2, 4, 7, 8, 11, 13, 14, 16, 17, 19, 22, 23, 26, 28, 29]; assert_eq!(results[3], expected_other);
random
[ { "content": "pub fn fizz_buzz_link(stream: HStream<i32>) -> Link<i32> {\n\n DoClassify::do_classify(stream, FizzBuzz::new(), None)\n\n}\n", "file_path": "src/utils/test/classifier/fizz_buzz.rs", "rank": 0, "score": 165602.4877498499 }, { "content": "pub trait ClassifyFn<C: Classifier + Clone + Send + 'static> {\n\n fn classify(self, classifier: C, cap: Option<usize>) -> Vec<Link<C::Packet>>;\n\n}\n\n\n\nimpl<C: Classifier + Clone + Send + 'static> ClassifyFn<C> for Link<C::Packet> {\n\n fn classify(mut self, classifier: C, cap: Option<usize>) -> Vec<Link<C::Packet>> {\n\n let mut links = vec![];\n\n //Take and classify each input stream\n\n for stream in self.streams {\n\n let (mut runnables, streams) =\n\n DoClassify::do_classify(stream, classifier.clone(), cap).take();\n\n runnables.append(&mut self.runnables);\n\n links.push(Link::new(runnables, streams));\n\n }\n\n links\n\n }\n\n}\n\n\n\nuse std::marker::PhantomData;\n", "file_path": "src/lib.rs", "rank": 1, "score": 158562.44896711485 }, { "content": "pub fn even_link(stream: HStream<i32>) -> Link<i32> {\n\n DoClassify::do_classify(stream, Even::new(), None)\n\n}\n", "file_path": "src/utils/test/classifier/even.rs", "rank": 2, "score": 157901.08604713812 }, { "content": "//TODO: Under construction\n\n/// Runner is a user facing helper function for running the constructed router.\n\n///\n\n/// Runner takes only one argument, router, which is a Trait Object that implements LinkBuilder. Runner will construct\n\n/// the router inside of the Runtime context by calling build_link(). As such, the router object that is passed in should\n\n/// do all of the required initialization before it is passed into runner. (IE calling ingressors(), etc)\n\n///\n\n/// In general, the `Link` returned by the router should contain only TokioRunnables and no PacketStreams,\n\n/// since production routers are self contained with all their output going to links that push the packets\n\n/// out the routers physical ports.\n\n///\n\n/// However, if the link your `link_builder()` fn provides does return egressors, this function will automatically\n\n/// hook them into `PacketCollector` links, and whatever packets come out of those egressors will be returned to you once\n\n/// the router completes operation and joins. In a production router, the router likely never stops running so\n\n/// nothing will ever get returned. Use this functionality only for testing. \n\npub fn runner<Packet: Debug + Send + Clone + 'static, Router: LinkBuilder<Packet, Packet>>(\n\n router: Router,\n\n) -> Vec<Vec<Packet>> {\n\n let mut runtime = runtime::Builder::new()\n\n .threaded_scheduler()\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n\n\n runtime.block_on(async {\n\n let (mut runnables, egressors) = router.build_link();\n\n\n\n let (mut consumers, receivers): (\n\n Vec<TokioRunnable>,\n\n Vec<crossbeam_channel::Receiver<Packet>>,\n\n ) = egressors\n\n .into_iter()\n\n .map(|egressor| {\n\n let (s, r) = crossbeam_channel::unbounded::<Packet>();\n\n // TODO: Do we care about consumer IDs? Are they helpful to debug test examples?\n", "file_path": "src/utils/runner.rs", "rank": 3, "score": 153655.25040002263 }, { "content": "/// Immediately yields a collection of packets to be poll'd.\n\n/// Thin wrapper around iter_ok.\n\npub fn immediate_stream<I>(collection: I) -> HStream<I::Item>\n\nwhere\n\n I: IntoIterator,\n\n I::IntoIter: Send + 'static,\n\n{\n\n Box::new(stream::iter(collection))\n\n}\n\n\n\n/*\n\n LinearIntervalGenerator\n\n\n\n Generates a series of monotonically increasing integers, starting at 0.\n\n `iterations` \"packets\" are generated in the stream. One is yielded every\n\n `duration`.\n\n*/\n\n\n\npub struct LinearIntervalGenerator {\n\n interval: Interval,\n\n iterations: usize,\n\n seq_num: i32,\n", "file_path": "src/utils/test/packet_generators.rs", "rank": 4, "score": 112193.6743781225 }, { "content": "pub trait ProcessFn<P: Processor + Clone + Send + 'static> {\n\n fn process(self, processor: P) -> Link<P::Output>;\n\n}\n\n\n\nimpl<P: Processor + Clone + Send + 'static> ProcessFn<P> for Link<P::Input> {\n\n // Append process to each stream in link\n\n fn process(self, p: P) -> Link<P::Output> {\n\n let mut streams: Vec<HStream<P::Output>> = vec![];\n\n for stream in self.streams {\n\n // Little weird, still, process doesn't create new runnables, so just\n\n // manipulate the stream directly.\n\n let p_stream = ProcessStream::new(stream, p.clone());\n\n streams.push(Box::new(p_stream));\n\n }\n\n Link::new(self.runnables, streams)\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 92501.67285503032 }, { "content": "pub fn initialize_runtime() -> runtime::Runtime {\n\n runtime::Builder::new()\n\n .threaded_scheduler()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n}\n\n\n\n/// test_link\n\n///\n\n/// A testing harness to be used to run a link you wish to test. It takes a link to run\n\n/// and a max_test_duration option. If you have a link that should complete, ie it tears\n\n/// down, then put a None for Duration. If you have a link that is not expected to end\n\n///\n\npub async fn test_link<OutputPacket: Debug + Send + Clone + 'static>(\n\n link: Link<OutputPacket>,\n\n max_test_duration: Option<Duration>,\n\n) -> Vec<Vec<OutputPacket>> {\n\n // generate consumers for each egressors\n\n let (mut runnables, streams) = link.take();\n", "file_path": "src/utils/test/harness.rs", "rank": 6, "score": 92061.49242852574 }, { "content": "mod even;\n\npub use self::even::*;\n\n\n\nmod fizz_buzz;\n\npub use self::fizz_buzz::*;\n", "file_path": "src/utils/test/classifier/mod.rs", "rank": 7, "score": 79257.94122318846 }, { "content": "// Notifies a task if it resides in the `task_park`, and then sets\n\n// the `TaskParkState` to `Dead`.\n\n// Use when the callee is dropping and will not be able to awaken tasks\n\n// parked here in the future.\n\npub fn die_and_wake(task_park: &Arc<AtomicCell<TaskParkState>>) {\n\n swap_and_wake(task_park, TaskParkState::Dead);\n\n}\n", "file_path": "src/link/utils/task_park.rs", "rank": 8, "score": 57124.403151675244 }, { "content": "// Notifies a task if it resides in the `task_park`\n\n// Use this when you wish you wake up a task but do not wish to sleep yourself\n\npub fn unpark_and_wake(task_park: &Arc<AtomicCell<TaskParkState>>) {\n\n swap_and_wake(task_park, TaskParkState::Empty);\n\n}\n\n\n", "file_path": "src/link/utils/task_park.rs", "rank": 9, "score": 57121.5937889248 }, { "content": "// Simlar to logic to `park_and_wake`, with the key difference being that it\n\n// takes a provided Arc of the task handle that we wish to park. This enables the\n\n// callee to park their task handle in multiple locations without fear of overnotificiation.\n\n// This is used primarily by the egressor of the JoinLink.\n\npub fn indirect_park_and_wake(\n\n task_park: &Arc<AtomicCell<TaskParkState>>,\n\n task: Arc<AtomicCell<Option<task::Waker>>>,\n\n) -> bool {\n\n swap_and_wake(task_park, TaskParkState::IndirectParked(task))\n\n}\n\n\n", "file_path": "src/link/utils/task_park.rs", "rank": 10, "score": 55392.54944040998 }, { "content": "pub mod classifier;\n\npub mod harness;\n\npub mod packet_collectors;\n\npub mod packet_generators;\n\npub mod processor;\n", "file_path": "src/utils/test/mod.rs", "rank": 11, "score": 54925.53480551506 }, { "content": "// A simple pull based link. It is pull based in the sense that packets are only fetched on the input\n\n// when a packet is requested from the output. This link does not have the abilty store packets internally,\n\n// so all packets that enter either immediatly leave or are dropped, as dictated by the processor. Both sides of\n\n// this link are on the same thread, hence the label synchronous.\n\nmod process;\n\npub(crate) use self::process::ProcessStream;\n\n\n\n// Input packets are placed into an intermediate channel that are pulled from the output asynchronously.\n\n// Asynchronous in that a packets may enter and leave this link asynchronously to each other. This link is\n\n// useful for creating queues in the router, buffering, and creating `Task` boundries that can be processed on\n\n// different threads, or even different cores. Before packets are placed into the queue to be output, they are run\n\n// through the processor defined process function, often performing some sort of transformation.\n\nmod queue;\n\npub(crate) use self::queue::QueueStream;\n\n\n\n// Uses processor defined classifications to sort input into different streams, a good example would\n\n// be a flow that splits IPv4 and IPv6 packets, asynchronous. Packets are either dispatched to a\n\n// particular stream, or dropped.\n\nmod classify;\n\npub(crate) use self::classify::DoClassify;\n\n\n\n// Fairly combines all inputs into a single output, asynchronous.\n\nmod join;\n\n\n\n// Copies all input to each of its outputs, asynchronous.\n\nmod fork;\n\n\n\nmod utils;\n", "file_path": "src/link/mod.rs", "rank": 12, "score": 54884.463250155575 }, { "content": "mod identity;\n\npub use self::identity::*;\n\n\n\nmod transform_from;\n\npub use self::transform_from::*;\n\n\n\nmod drop;\n\npub use self::drop::*;\n\n\n\nmod log;\n\npub use self::log::*;\n\n\n\nmod file_log;\n\npub use self::file_log::*;\n", "file_path": "src/utils/test/processor/mod.rs", "rank": 24, "score": 52732.116073357894 }, { "content": "// A cache for storing task handles.\n\npub mod task_park;\n", "file_path": "src/link/utils/mod.rs", "rank": 25, "score": 52581.78606526842 }, { "content": "\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n let collector = Pin::into_inner(self);\n\n loop {\n\n match ready!(Pin::new(&mut collector.stream).poll_next(cx)) {\n\n Some(value) => {\n\n collector\n\n .packet_dump\n\n .try_send(value)\n\n .expect(\"Exhaustive Collector: Error sending to packet dump\");\n\n }\n\n None => return Poll::Ready(()),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/utils/test/packet_collectors.rs", "rank": 26, "score": 52548.77520010069 }, { "content": " }\n\n}\n\n\n\nimpl<T: Debug> Future for ExhaustiveDrain<T> {\n\n type Output = ();\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n let drain = Pin::into_inner(self);\n\n loop {\n\n match ready!(Pin::new(&mut drain.stream).poll_next(cx)) {\n\n Some(_value) => {}\n\n None => return Poll::Ready(()),\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Exhaustive Collector works like Exhaustive Drain in that it continuously polls for packets until it\n\n/// receives a None, but it also will write that value out to the provided channel, so that the packet\n\n/// may be compared in a test.\n", "file_path": "src/utils/test/packet_collectors.rs", "rank": 27, "score": 52548.23325694677 }, { "content": "use crate::HStream;\n\nuse crossbeam::crossbeam_channel::Sender;\n\nuse futures::prelude::*;\n\nuse futures::ready;\n\nuse futures::task::{Context, Poll};\n\nuse std::fmt::Debug;\n\nuse std::pin::Pin;\n\n\n\n/// A structure that may be handed an input stream that it will exhaustively drain from until it\n\n/// recieves a None. Useful for testing purposes.\n\npub struct ExhaustiveDrain<T: Debug> {\n\n id: usize,\n\n stream: HStream<T>,\n\n}\n\n\n\nimpl<T: Debug> Unpin for ExhaustiveDrain<T> {}\n\n\n\nimpl<T: Debug> ExhaustiveDrain<T> {\n\n pub fn new(id: usize, stream: HStream<T>) -> Self {\n\n ExhaustiveDrain { id, stream }\n", "file_path": "src/utils/test/packet_collectors.rs", "rank": 28, "score": 52537.52327858069 }, { "content": "}\n\n\n\nimpl<Iterable, Packet> Stream for PacketIntervalGenerator<Iterable, Packet>\n\nwhere\n\n Iterable: Iterator<Item = Packet>,\n\n Packet: Sized,\n\n{\n\n type Item = Packet;\n\n\n\n fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {\n\n let interval_generator = Pin::into_inner(self);\n\n ready!(Pin::new(&mut interval_generator.interval).poll_next(cx));\n\n match interval_generator.packets.next() {\n\n Some(packet) => Poll::Ready(Some(packet)),\n\n None => Poll::Ready(None),\n\n }\n\n }\n\n}\n", "file_path": "src/utils/test/packet_generators.rs", "rank": 29, "score": 52537.25786690687 }, { "content": "}\n\n\n\nimpl LinearIntervalGenerator {\n\n pub fn new(duration: Duration, iterations: usize) -> Self {\n\n LinearIntervalGenerator {\n\n interval: interval(duration),\n\n iterations,\n\n seq_num: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl Stream for LinearIntervalGenerator {\n\n type Item = i32;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {\n\n ready!(Pin::new(&mut self.interval).poll_next(cx));\n\n if self.seq_num as usize > self.iterations {\n\n Poll::Ready(None)\n\n } else {\n", "file_path": "src/utils/test/packet_generators.rs", "rank": 30, "score": 52532.488007848704 }, { "content": "pub struct ExhaustiveCollector<T: Debug> {\n\n id: usize,\n\n stream: HStream<T>,\n\n packet_dump: Sender<T>,\n\n}\n\n\n\nimpl<T: Debug> Unpin for ExhaustiveCollector<T> {}\n\n\n\nimpl<T: Debug> ExhaustiveCollector<T> {\n\n pub fn new(id: usize, stream: HStream<T>, packet_dump: Sender<T>) -> Self {\n\n ExhaustiveCollector {\n\n id,\n\n stream,\n\n packet_dump,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Debug> Future for ExhaustiveCollector<T> {\n\n type Output = ();\n", "file_path": "src/utils/test/packet_collectors.rs", "rank": 31, "score": 52530.02142334352 }, { "content": "use crate::HStream;\n\nuse futures::prelude::*;\n\nuse futures::ready;\n\nuse futures::task::{Context, Poll};\n\nuse std::pin::Pin;\n\nuse tokio::time::{interval, Duration, Interval};\n\n\n\n/// Immediately yields a collection of packets to be poll'd.\n\n/// Thin wrapper around iter_ok.\n", "file_path": "src/utils/test/packet_generators.rs", "rank": 32, "score": 52527.95458449804 }, { "content": " let next_packet = Poll::Ready(Some(self.seq_num));\n\n self.seq_num += 1;\n\n next_packet\n\n }\n\n }\n\n}\n\n\n\n/// Packet Interval Generator procduces a Stream of packets on a defined interval.AsMut\n\n///\n\n/// Which packet is next sent is determined by the Iterator, provided during creation. This\n\n/// is intended to be a full fledged packet eventually, but the trait bound is only set to\n\n/// something that is Sized. The Iterator is polled until it runs out of values, at which\n\n/// point we close the Stream by sending a Ready(None).\n\npub struct PacketIntervalGenerator<Iterable, Packet>\n\nwhere\n\n Iterable: Iterator<Item = Packet>,\n\n Packet: Sized,\n\n{\n\n interval: Interval,\n\n packets: Iterable,\n", "file_path": "src/utils/test/packet_generators.rs", "rank": 33, "score": 52524.65790722145 }, { "content": "}\n\n\n\nimpl<Iterable, Packet> Unpin for PacketIntervalGenerator<Iterable, Packet>\n\nwhere\n\n Iterable: Iterator<Item = Packet>,\n\n Packet: Sized,\n\n{\n\n}\n\n\n\nimpl<Iterable, Packet> PacketIntervalGenerator<Iterable, Packet>\n\nwhere\n\n Iterable: Iterator<Item = Packet>,\n\n Packet: Sized,\n\n{\n\n pub fn new(duration: Duration, packets: Iterable) -> Self {\n\n PacketIntervalGenerator {\n\n interval: interval(duration),\n\n packets,\n\n }\n\n }\n", "file_path": "src/utils/test/packet_generators.rs", "rank": 34, "score": 52520.56244983607 }, { "content": "use crate::Classifier;\n\nuse crate::{link::DoClassify, HStream, Link};\n\n\n\n#[derive(Default, Clone)]\n\npub struct Even {}\n\n\n\nimpl Even {\n\n pub fn new() -> Self {\n\n Even {}\n\n }\n\n}\n\n\n\nimpl Classifier for Even {\n\n type Packet = i32;\n\n\n\n fn classify(&mut self, packet: &Self::Packet) -> Option<usize> {\n\n //hilarious. I'm too lazy to change this out to a usize packet type\n\n match packet % 2 {\n\n 0 => Some(0),\n\n 1 => Some(1),\n\n _ => None,\n\n }\n\n }\n\n fn num_ports(&mut self) -> usize {\n\n 2\n\n }\n\n}\n\n\n", "file_path": "src/utils/test/classifier/even.rs", "rank": 35, "score": 52181.57657482051 }, { "content": "// Swaps in the provided TaskParkState. wakes any task that it finds currently in the `task_park`\n\n// Returns `true` if it was able to successfully park the provided task, ie the `task_park` is not dead.\n\nfn swap_and_wake(task_park: &Arc<AtomicCell<TaskParkState>>, swap: TaskParkState) -> bool {\n\n match task_park.swap(swap) {\n\n TaskParkState::Dead => {\n\n task_park.store(TaskParkState::Dead);\n\n false\n\n }\n\n TaskParkState::Empty => true,\n\n TaskParkState::Parked(task) => {\n\n task.wake();\n\n true\n\n }\n\n TaskParkState::IndirectParked(task) => {\n\n if let Some(task) = task.swap(None) {\n\n task.wake();\n\n }\n\n true\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/link/utils/task_park.rs", "rank": 36, "score": 51554.11910039789 }, { "content": "// Notifies a task if it resides in the `task_park`, and\n\n// then parks the callee task in the `task_park`.\n\n// Use when you wish to sleep the current task\n\npub fn park_and_wake(task_park: &Arc<AtomicCell<TaskParkState>>, task: task::Waker) {\n\n if !swap_and_wake(task_park, TaskParkState::Parked(task.clone())) {\n\n task.wake();\n\n }\n\n}\n\n\n", "file_path": "src/link/utils/task_park.rs", "rank": 37, "score": 51457.74619395545 }, { "content": "use crate::Classifier;\n\nuse crate::{link::DoClassify, HStream, Link};\n\n\n\npub enum FizzBuzzVariant {\n\n FizzBuzz,\n\n Fizz,\n\n Buzz,\n\n None,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct FizzBuzz {}\n\n\n\nimpl FizzBuzz {\n\n pub fn new() -> Self {\n\n FizzBuzz {}\n\n }\n\n}\n\n\n\nimpl Classifier for FizzBuzz {\n", "file_path": "src/utils/test/classifier/fizz_buzz.rs", "rank": 38, "score": 50167.195143749195 }, { "content": " type Packet = i32;\n\n\n\n fn classify(&mut self, packet: &Self::Packet) -> Option<usize> {\n\n if packet % 3 == 0 && packet % 5 == 0 {\n\n Some(0)\n\n } else if packet % 3 == 0 {\n\n Some(1)\n\n } else if packet % 5 == 0 {\n\n Some(2)\n\n } else {\n\n Some(3)\n\n }\n\n }\n\n\n\n fn num_ports(&mut self) -> usize {\n\n 4\n\n }\n\n}\n\n\n", "file_path": "src/utils/test/classifier/fizz_buzz.rs", "rank": 39, "score": 50160.52765417207 }, { "content": "// This is a fairly simple, round robin unzipper classifier. Since it must run in round robin, if one stream\n\n// runs consistently faster than the others, it will be underfed. But, I don't want to over-optimize too\n\n// early and implement a special work-queue lower level link to use instead of classify, so this should\n\n// do for now.\n\nstruct Unzipper<A> {\n\n phantom: PhantomData<A>,\n\n by: usize,\n\n count: usize,\n\n}\n\n\n\nimpl<A> Unzipper<A> {\n\n pub fn new(by: usize) -> Self {\n\n Unzipper {\n\n phantom: PhantomData,\n\n by,\n\n count: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl<A: Send + 'static> Classifier for Unzipper<A> {\n\n type Packet = A;\n\n\n\n fn classify(&mut self, _packet: &Self::Packet) -> Option<usize> {\n", "file_path": "src/lib.rs", "rank": 40, "score": 46987.87016566231 }, { "content": "#[allow(dead_code)]\n\npub mod test;\n\n\n\n//TODO: fixup runner\n\n//pub mod runner;\n", "file_path": "src/utils/mod.rs", "rank": 41, "score": 29459.096029904733 }, { "content": "pub trait Classifier {\n\n type Packet: Send + 'static;\n\n\n\n fn classify(&mut self, packet: &Self::Packet) -> Option<usize>;\n\n fn num_ports(&mut self) -> usize;\n\n}\n\n\n\n/// A Boxed futures::Stream, used by hivemind for data flow. They must be\n\n/// boxed so we can hand around vectors of them\n\npub type HStream<T> = Box<dyn futures::Stream<Item = T> + Send + Unpin>;\n\n\n\n/// Runnables, a redefined type over Future, refers to the internal handles hivemind\n\n/// generates as it builds the graph. Most of the operations in hivemind, other than those\n\n/// that operate as a 1-1 map, create runnables as a side effect. Runnables that are created\n\n/// must be collected and provided to the runtime for the graph to operate. Most of the time,\n\n/// this is not a manual process, runnables are automatically \"rolled forward\" in Links that\n\n/// are transformed through the provided functions, such as unzip(). However, if you call take()\n\n/// on a link, the runnables that are provided must be manually collected by yourself.\n\npub type Runnable = Box<dyn futures::Future<Output = ()> + Send + Unpin>;\n\n\n", "file_path": "src/lib.rs", "rank": 42, "score": 27630.634394005865 }, { "content": " let (mut consumers, receivers): (\n\n Vec<Runnable>,\n\n Vec<crossbeam_channel::Receiver<OutputPacket>>,\n\n ) = streams\n\n .into_iter()\n\n .map(|stream| {\n\n let (s, r) = crossbeam_channel::unbounded::<OutputPacket>();\n\n // TODO: Do we care about consumer IDs? Are they helpful to debug test examples?\n\n let consumer: Runnable = Box::new(ExhaustiveCollector::new(0, stream, s));\n\n (consumer, r)\n\n })\n\n .unzip();\n\n\n\n // gather link's runnables and tokio-driven consumers into one collection\n\n runnables.append(&mut consumers);\n\n\n\n // 🏃💨💨\n\n match max_test_duration {\n\n None => {\n\n spawn_runnables(runnables).await;\n", "file_path": "src/utils/test/harness.rs", "rank": 43, "score": 26721.26216758631 }, { "content": "use crate::utils::test::packet_collectors::ExhaustiveCollector;\n\nuse crate::{Link, Runnable};\n\nuse crossbeam::crossbeam_channel;\n\nuse std::fmt::Debug;\n\nuse tokio::{\n\n runtime,\n\n time::{timeout, Duration},\n\n};\n\n\n\n/// The utils::test::harness module should be able to help Link authors abstract away the\n\n/// complexity of dealing with the runtime. Tests should be expressed with the\n\n/// typical \"Given, When, Then\" structure (https://martinfowler.com/bliki/GivenWhenThen.html).\n\n\n\n/// \"Given\" refers to the state of the world before the behavior under test runs.\n\n/// The initial context needed to test a link is the incoming packet stream(s).\n\n\n\n/// \"When\" refers to the behavior under test.\n\n/// This is the Link configuration we're trying to test.\n\n\n\n/// \"Then\" refers to the expected changes to the system due to executing the behavior under test\n\n/// against the initial context.\n\n/// This is the state of packet collectors after the input has been exhausted and run through\n\n/// our Link under test.\n\n\n\n/// Since the initial context of \"a link's input streams\" are coupled to the construction of Links,\n\n/// let's just expose a function that takes a connected Link, runs it's runnables and collectors\n\n/// through the runtime, and extracts the output packets into vectors representing egress streams.\n\n\n", "file_path": "src/utils/test/harness.rs", "rank": 44, "score": 26718.84155560259 }, { "content": " }\n\n Some(duration) => {\n\n let _res = timeout(duration, spawn_runnables(runnables)).await;\n\n }\n\n }\n\n\n\n // collect packets from consumers via receiver channels\n\n receivers\n\n .into_iter()\n\n .map(|receiver| receiver.try_iter().collect())\n\n .collect()\n\n}\n\n\n\nasync fn spawn_runnables(runnables: Vec<Runnable>) {\n\n let mut handles = vec![];\n\n for runnable in runnables {\n\n handles.push(tokio::spawn(runnable));\n\n }\n\n await_handles(handles).await;\n\n}\n\n\n\nasync fn await_handles(handles: Vec<tokio::task::JoinHandle<()>>) {\n\n for handle in handles {\n\n handle.await.unwrap();\n\n }\n\n}\n", "file_path": "src/utils/test/harness.rs", "rank": 45, "score": 26703.711319300106 }, { "content": "use crate::Processor;\n\nuse std::marker::PhantomData;\n\n\n\n/// Processor that passes what it receives.\n\n#[derive(Default, Clone)]\n\npub struct Identity<A: Send + Clone> {\n\n phantom: PhantomData<A>,\n\n}\n\n\n\nimpl<A: Send + Clone> Identity<A> {\n\n pub fn new() -> Identity<A> {\n\n Identity {\n\n phantom: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<A: Send + Clone> Processor for Identity<A> {\n\n type Input = A;\n\n type Output = A;\n\n\n\n fn process(&mut self, packet: Self::Input) -> Option<Self::Output> {\n\n Some(packet)\n\n }\n\n}\n", "file_path": "src/utils/test/processor/identity.rs", "rank": 46, "score": 25654.333725586075 }, { "content": " rng: StdRng::seed_from_u64(int_seed),\n\n }\n\n }\n\n}\n\n\n\nimpl<A: Send + Clone> Processor for Drop<A> {\n\n type Input = A;\n\n type Output = A;\n\n\n\n fn process(&mut self, packet: Self::Input) -> Option<Self::Output> {\n\n if self.bernoulli.sample(&mut self.rng) {\n\n None\n\n } else {\n\n Some(packet)\n\n }\n\n }\n\n}\n\n\n\nimpl<A: Send + Clone> Default for Drop<A> {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n", "file_path": "src/utils/test/processor/drop.rs", "rank": 47, "score": 25649.117192567006 }, { "content": " }\n\n}\n\n\n\nimpl<A: Send + Clone, B: From<A> + Send + Clone> Processor for TransformFrom<A, B> {\n\n type Input = A;\n\n type Output = B;\n\n\n\n fn process(&mut self, packet: Self::Input) -> Option<Self::Output> {\n\n Some(Self::Output::from(packet))\n\n }\n\n}\n", "file_path": "src/utils/test/processor/transform_from.rs", "rank": 48, "score": 25645.35570610078 }, { "content": "use crate::Processor;\n\nuse rand::distributions::{Bernoulli, Distribution};\n\nuse rand::rngs::StdRng;\n\nuse rand::SeedableRng;\n\nuse std::marker::PhantomData;\n\n\n\n/// DropProcessor\n\n/// Drops packets with weighted randomness.\n\npub struct Drop<A: Send + Clone> {\n\n phantom: PhantomData<A>,\n\n bernoulli: Bernoulli,\n\n rng: StdRng,\n\n}\n\n\n\nimpl<A: Send + Clone> Drop<A> {\n\n pub fn new() -> Self {\n\n Drop {\n\n phantom: PhantomData,\n\n bernoulli: Bernoulli::new(1.0).unwrap(),\n\n rng: StdRng::from_entropy(),\n", "file_path": "src/utils/test/processor/drop.rs", "rank": 49, "score": 25642.56110069223 }, { "content": "/// \"It is critical to call flush before BufWriter<W> is dropped.\n\n/// Though dropping will attempt to flush the the contents of the buffer, any errors that happen in\n\n/// the process of dropping will be ignored. Calling flush ensures that the buffer is empty and thus\n\n/// dropping will not even attempt file operations.\"\n\n/// https://doc.rust-lang.org/std/io/struct.BufWriter.html\n\nimpl<A, W: Write> Drop for Log<A, W> {\n\n fn drop(&mut self) {\n\n self.log_writer.flush().unwrap();\n\n }\n\n}\n\n\n\nimpl<A: Send + Clone + Debug, W: Write> Processor for Log<A, W> {\n\n type Input = A;\n\n type Output = A;\n\n\n\n fn process(&mut self, packet: Self::Input) -> Option<Self::Output> {\n\n self.log_writer\n\n .write_all(format!(\"{:?}\\n\", packet).as_ref())\n\n .unwrap();\n\n Some(packet)\n", "file_path": "src/utils/test/processor/log.rs", "rank": 50, "score": 25642.284279316176 }, { "content": "use crate::Processor;\n\nuse std::convert::From;\n\nuse std::marker::PhantomData;\n\nuse std::marker::Send;\n\n\n\n/// Transform Processor\n\n///\n\n/// A generic processor that tranforms A -> B by calling B::from(A)\n\n#[derive(Default)]\n\npub struct TransformFrom<A: Send + Clone, B: Send + Clone> {\n\n phantom_in: PhantomData<A>,\n\n phantom_out: PhantomData<B>,\n\n}\n\n\n\nimpl<A: Send + Clone, B: Send + Clone> TransformFrom<A, B> {\n\n pub fn new() -> TransformFrom<A, B> {\n\n TransformFrom {\n\n phantom_in: PhantomData,\n\n phantom_out: PhantomData,\n\n }\n", "file_path": "src/utils/test/processor/transform_from.rs", "rank": 51, "score": 25639.188876244254 }, { "content": " }\n\n}\n\n\n\n// TODO: Find common testing abstraction for logging processors\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::File;\n\n use std::fs::{create_dir_all, remove_file};\n\n use std::io::Read;\n\n use std::path::Path;\n\n use uuid::Uuid;\n\n\n\n fn test_log_processor(packets: Vec<i32>, expected_log: &str) {\n\n let log_dir = Path::new(\"test_logs\");\n\n let log_filename = format!(\"{}.log\", Uuid::new_v4());\n\n let log_path = log_dir.join(log_filename);\n\n create_dir_all(log_dir).unwrap();\n\n let log_file = File::create(log_path.clone()).unwrap();\n\n\n", "file_path": "src/utils/test/processor/log.rs", "rank": 52, "score": 25638.49221618752 }, { "content": "use crate::Processor;\n\nuse std::fmt::Debug;\n\nuse std::io::{BufWriter, Write};\n\nuse std::marker::PhantomData;\n\n\n\n/// Processor that logs incoming packets with Debug information, delimited with newlines.\n\npub struct Log<A, W: Write> {\n\n phantom: PhantomData<A>,\n\n log_writer: BufWriter<W>,\n\n}\n\n\n\nimpl<A, W: Write> Log<A, W> {\n\n pub fn new(writer: W) -> Log<A, W> {\n\n Log {\n\n phantom: PhantomData,\n\n log_writer: BufWriter::new(writer),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils/test/processor/log.rs", "rank": 53, "score": 25637.25186986303 }, { "content": " let mut proc = Log::new(log_file);\n\n\n\n let res_packets: Vec<i32> = packets\n\n .clone()\n\n .into_iter()\n\n .map(|packet| proc.process(packet).unwrap())\n\n .collect();\n\n assert_eq!(res_packets, packets); // assert identity\n\n\n\n std::mem::drop(proc); // dropping to flush internal BufWriter\n\n\n\n let mut log_file = File::open(log_path.clone()).unwrap();\n\n let mut contents = String::new();\n\n log_file.read_to_string(&mut contents).unwrap();\n\n assert_eq!(contents, expected_log);\n\n remove_file(log_path).unwrap();\n\n }\n\n\n\n #[test]\n\n fn writes_nothing() {\n", "file_path": "src/utils/test/processor/log.rs", "rank": 54, "score": 25635.937071046734 }, { "content": " }\n\n }\n\n\n\n pub fn drop_chance(self, chance: f64) -> Self {\n\n assert!(chance >= 0.0, \"drop_chance must be positive\");\n\n assert!(\n\n chance <= 1.0,\n\n \"drop_chance must be less than or equal to 1.0\"\n\n );\n\n Drop {\n\n phantom: self.phantom,\n\n bernoulli: Bernoulli::new(chance).unwrap(),\n\n rng: self.rng,\n\n }\n\n }\n\n\n\n pub fn seed(self, int_seed: u64) -> Self {\n\n Drop {\n\n phantom: self.phantom,\n\n bernoulli: self.bernoulli,\n", "file_path": "src/utils/test/processor/drop.rs", "rank": 55, "score": 25634.93492918164 }, { "content": " test_log_processor(vec![], \"\");\n\n }\n\n\n\n #[test]\n\n fn writes_packet() {\n\n test_log_processor(vec![10], \"10\\n\");\n\n }\n\n\n\n #[test]\n\n fn writes_stream_of_packets() {\n\n test_log_processor((0..10).collect(), \"0\\n1\\n2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n\");\n\n }\n\n\n\n // TODO: Add tests for other impl Write's like network sockets\n\n}\n", "file_path": "src/utils/test/processor/log.rs", "rank": 56, "score": 25633.839545966246 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<P: Send + Clone> Future for ForkRunnable<P> {\n\n type Output = ();\n\n\n\n // If any of the channels are full, we await that channel to clear before processing a new packet.\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n loop {\n\n for (port, to_egressor) in self.to_egressors.iter().enumerate() {\n\n if to_egressor.is_full() {\n\n park_and_wake(&self.task_parks[port], cx.waker().clone());\n\n return Poll::Pending;\n\n }\n\n }\n\n let packet_option: Option<P> = ready!(Pin::new(&mut self.input_stream).poll_next(cx));\n\n\n\n match packet_option {\n\n None => {\n", "file_path": "src/link/fork.rs", "rank": 57, "score": 25452.5066941294 }, { "content": " // This case is handled by the `try_ready!` macro, which will automatically return\n\n // `Ok(Async::NotReady)` if the input stream gives us NotReady.\n\n //\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {\n\n loop {\n\n match ready!(Pin::new(&mut self.in_stream).poll_next(cx)) {\n\n None => return Poll::Ready(None),\n\n Some(input_packet) => {\n\n // if `processor.process` returns None, do nothing, loop around and try polling again.\n\n if let Some(output_packet) = self.processor.process(input_packet) {\n\n return Poll::Ready(Some(output_packet));\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/link/process.rs", "rank": 58, "score": 25450.81547094506 }, { "content": " // been transformed in some way. We pass that on to our egress channel and wake\n\n // our `Egressor` that it has work to do, and continue polling our upstream `HStream`.\n\n //\n\n // #5 `processor`s may also choose to \"drop\" packets by returning `None`, so we do nothing\n\n // and poll our upstream `HStream` again.\n\n //\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n loop {\n\n if self.to_egressor.is_full() {\n\n park_and_wake(&self.task_park, cx.waker().clone());\n\n return Poll::Pending;\n\n }\n\n let packet = ready!(Pin::new(&mut self.input_stream).poll_next(cx));\n\n match packet {\n\n None => {\n\n self.to_egressor.try_send(None).expect(\n\n \"QueueIngressor::Poll::Ready(None) try_send to_egressor shouldn't fail\",\n\n );\n\n die_and_wake(&self.task_park);\n\n return Poll::Ready(());\n", "file_path": "src/link/queue.rs", "rank": 59, "score": 25448.52666957178 }, { "content": "use crate::link::utils::task_park::*;\n\nuse crate::{HStream, Link};\n\nuse crossbeam::atomic::AtomicCell;\n\nuse crossbeam::crossbeam_channel;\n\nuse crossbeam::crossbeam_channel::{Receiver, Sender, TryRecvError};\n\nuse futures::prelude::*;\n\nuse futures::ready;\n\nuse futures::task::{Context, Poll};\n\nuse std::pin::Pin;\n\nuse std::sync::Arc;\n\n\n\nimpl<Packet: Send + Sized + 'static> Link<Packet> {\n\n pub(crate) fn do_queue(input: HStream<Packet>, cap: Option<usize>) -> Self {\n\n let (sender, reciever) = match cap {\n\n None => crossbeam_channel::unbounded::<Option<Packet>>(),\n\n Some(capacity) => crossbeam_channel::bounded::<Option<Packet>>(capacity),\n\n };\n\n\n\n let task_park: Arc<AtomicCell<TaskParkState>> =\n\n Arc::new(AtomicCell::new(TaskParkState::Empty));\n", "file_path": "src/link/queue.rs", "rank": 60, "score": 25448.488072287942 }, { "content": " //\n\n // #2 The input_stream returns a NotReady, we sleep, with the assumption\n\n // that whomever produced the NotReady will awaken the task in the Future.\n\n //\n\n // #3 We get a Ready(None), in which case we push a None onto the to_egressor\n\n // queue and then return Ready(()), which means we enter tear-down, since there\n\n // is no futher work to complete.\n\n // ###\n\n // By Sleep, we mean we return a NotReady to the runtime which will sleep the task.\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n let ingressor = Pin::into_inner(self);\n\n loop {\n\n if ingressor.to_egressor.is_full() {\n\n park_and_wake(&ingressor.task_park, cx.waker().clone());\n\n return Poll::Pending;\n\n }\n\n let input_packet_option: Option<Packet> =\n\n ready!(Pin::new(&mut ingressor.input).poll_next(cx));\n\n\n\n match input_packet_option {\n", "file_path": "src/link/join.rs", "rank": 61, "score": 25448.053370467125 }, { "content": "use crate::link::utils::task_park::*;\n\nuse crate::{link::QueueStream, HStream, Link};\n\nuse crossbeam::atomic::AtomicCell;\n\nuse crossbeam::crossbeam_channel;\n\nuse crossbeam::crossbeam_channel::{Receiver, Sender};\n\nuse futures::prelude::*;\n\nuse futures::ready;\n\nuse futures::task::{Context, Poll};\n\nuse std::pin::Pin;\n\nuse std::sync::Arc;\n\n\n\nimpl<Packet: Clone + Send + 'static> Link<Packet> {\n\n pub(crate) fn do_fork(input: HStream<Packet>, count: usize, capacity: Option<usize>) -> Self {\n\n let mut to_egressors: Vec<Sender<Option<Packet>>> = Vec::new();\n\n let mut egressors: Vec<HStream<Packet>> = Vec::new();\n\n\n\n let mut from_ingressors: Vec<Receiver<Option<Packet>>> = Vec::new();\n\n\n\n let mut task_parks: Vec<Arc<AtomicCell<TaskParkState>>> = Vec::new();\n\n\n", "file_path": "src/link/fork.rs", "rank": 62, "score": 25447.34013649437 }, { "content": "use crate::link::utils::task_park::*;\n\nuse crate::{HStream, Link, Runnable};\n\nuse crossbeam::atomic::AtomicCell;\n\nuse crossbeam::crossbeam_channel;\n\nuse crossbeam::crossbeam_channel::{Receiver, Sender};\n\nuse futures::prelude::*;\n\nuse futures::ready;\n\nuse futures::task::{Context, Poll};\n\nuse std::pin::Pin;\n\nuse std::sync::Arc;\n\n\n\nimpl<Packet: Send + Sized + 'static> Link<Packet> {\n\n pub(crate) fn do_join(input: Vec<HStream<Packet>>, cap: Option<usize>) -> Self {\n\n assert!(\n\n !input.is_empty(),\n\n format!(\n\n \"number of in_streams: {}, must be greater than 0\",\n\n input.len()\n\n )\n\n );\n", "file_path": "src/link/join.rs", "rank": 63, "score": 25445.96261780721 }, { "content": " for to_egressor in self.to_egressors.iter() {\n\n if let Err(err) = to_egressor.try_send(None) {\n\n panic!(\"Ingressor: Drop: try_send to egressor, fail?: {:?}\", err);\n\n }\n\n }\n\n for task_park in self.task_parks.iter() {\n\n die_and_wake(&task_park);\n\n }\n\n return Poll::Ready(());\n\n }\n\n Some(packet) => {\n\n assert!(self.to_egressors.len() == self.task_parks.len());\n\n for port in 0..self.to_egressors.len() {\n\n if let Err(err) = self.to_egressors[port].try_send(Some(packet.clone())) {\n\n panic!(\n\n \"Error in to_egressors[{}] sender, have nowhere to put packet: {:?}\",\n\n port, err\n\n );\n\n }\n\n unpark_and_wake(&self.task_parks[port]);\n", "file_path": "src/link/fork.rs", "rank": 64, "score": 25444.675828200936 }, { "content": " // This function, tries to retrieve a packet off the `from_ingressor`\n\n // channel, there are four cases:\n\n // ###\n\n // #1 Ok(Some(Packet)): Got a packet. If the Ingressor needs (likely due to\n\n // an until now full channel) to be awoken, wake them. Return the Async::Ready(Option(Packet))\n\n //\n\n // #2 Ok(None): this means that the Ingressor is in tear-down, and we\n\n // will no longer be receivig packets. Return Async::Ready(None) to forward propagate teardown\n\n //\n\n // #3 Err(TryRecvError::Empty): Packet queue is empty, await the Ingressor to awaken us with more\n\n // work, by returning Async::NotReady to signal to runtime to sleep this task.\n\n //\n\n // #4 Err(TryRecvError::Disconnected): Ingressor is in teardown and has dropped its side of the\n\n // from_ingressor channel; we will no longer receive packets. Return Async::Ready(None) to forward\n\n // propagate teardown.\n\n // ###\n\n fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {\n\n match self.from_ingressor.try_recv() {\n\n Ok(Some(packet)) => {\n\n unpark_and_wake(&self.task_park);\n", "file_path": "src/link/queue.rs", "rank": 65, "score": 25442.587343855732 }, { "content": " let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let (mut runnables, mut streams0) =\n\n Link::do_queue(immediate_stream(packets.clone()), Some(10)).take();\n\n\n\n let (mut runnables0, streams1) = Link::do_queue(streams0.remove(0), None).take();\n\n runnables.append(&mut runnables0);\n\n\n\n let link = Link::new(runnables, streams1);\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], packets);\n\n }\n\n\n\n use crate::ProcessFn;\n\n #[test]\n\n fn series_of_process_and_queue_links() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n\n let mut runtime = initialize_runtime();\n", "file_path": "src/link/queue.rs", "rank": 66, "score": 25442.250349022514 }, { "content": " Poll::Ready(Some(packet))\n\n }\n\n Ok(None) => {\n\n die_and_wake(&self.task_park);\n\n Poll::Ready(None)\n\n }\n\n Err(TryRecvError::Empty) => {\n\n park_and_wake(&self.task_park, cx.waker().clone());\n\n Poll::Pending\n\n }\n\n Err(TryRecvError::Disconnected) => Poll::Ready(None),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::utils::test::harness::{initialize_runtime, test_link};\n\n use crate::utils::test::packet_generators::{immediate_stream, PacketIntervalGenerator};\n", "file_path": "src/link/queue.rs", "rank": 67, "score": 25442.031251469223 }, { "content": " ];\n\n let link = Link::do_join(inputs, None);\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0].len(), packets.len() * 2);\n\n }\n\n\n\n #[test]\n\n fn several_long_streams() {\n\n let mut rng = thread_rng();\n\n let stream_len = rng.gen_range(2000, 3000);\n\n let num_streams = rng.gen_range(5, 10);\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async move {\n\n let mut inputs: Vec<HStream<usize>> = Vec::new();\n\n for _ in 0..num_streams {\n\n inputs.push(immediate_stream(0..stream_len));\n\n }\n", "file_path": "src/link/join.rs", "rank": 68, "score": 25440.742576549976 }, { "content": " }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::utils::test::harness::{initialize_runtime, test_link};\n\n use crate::utils::test::packet_generators::immediate_stream;\n\n\n\n #[test]\n\n fn no_input() {\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let link = Link::<i32>::do_fork(immediate_stream(vec![]), 1, None);\n\n\n\n test_link(link, None).await\n", "file_path": "src/link/fork.rs", "rank": 69, "score": 25440.464529083605 }, { "content": "\n\n let runnable = QueueRunnable::new(input, sender, Arc::clone(&task_park));\n\n let stream = QueueStream::new(reciever, task_park);\n\n\n\n Link {\n\n runnables: vec![Box::new(runnable)],\n\n streams: vec![Box::new(stream)],\n\n }\n\n }\n\n}\n\n\n\n// The QueueIngressor is responsible for polling its input stream,\n\n// processing them using the `processor`s process function, and pushing the\n\n// output packet onto the to_egressor queue. It does work in batches, so it\n\n// will continue to pull packets as long as it can make forward progess,\n\n// after which it will return NotReady to sleep. This is handed to, and is\n\n// polled by the runtime.\n\npub(crate) struct QueueRunnable<Packet: Sized> {\n\n input_stream: HStream<Packet>,\n\n to_egressor: Sender<Option<Packet>>,\n", "file_path": "src/link/queue.rs", "rank": 70, "score": 25440.3424214451 }, { "content": " let inputs = vec![\n\n immediate_stream(packets.clone()),\n\n immediate_stream(packets.clone()),\n\n ];\n\n let link = Link::do_join(inputs, None);\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0].len(), packets.len() * 2);\n\n }\n\n\n\n #[test]\n\n fn multiple_ingressor_calls_works() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9, 11];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let inputs = vec![\n\n immediate_stream(packets.clone()),\n\n immediate_stream(packets.clone()),\n", "file_path": "src/link/join.rs", "rank": 71, "score": 25438.465578125142 }, { "content": "\n\n let mut runnables: Vec<Runnable> = Vec::new();\n\n let mut from_ingressors: Vec<Receiver<Option<Packet>>> = Vec::new();\n\n let mut task_parks: Vec<Arc<AtomicCell<TaskParkState>>> = Vec::new();\n\n let num_inputs = input.len();\n\n\n\n for stream in input {\n\n let (to_egressor, from_ingressor) = match cap {\n\n None => crossbeam_channel::unbounded::<Option<Packet>>(),\n\n Some(capacity) => crossbeam_channel::bounded::<Option<Packet>>(capacity),\n\n };\n\n let task_park = Arc::new(AtomicCell::new(TaskParkState::Empty));\n\n\n\n let runnable = JoinRunnable::new(stream, to_egressor, Arc::clone(&task_park));\n\n runnables.push(Box::new(runnable));\n\n from_ingressors.push(from_ingressor);\n\n task_parks.push(task_park);\n\n }\n\n\n\n let stream = JoinStream::new(from_ingressors, task_parks, num_inputs);\n", "file_path": "src/link/join.rs", "rank": 72, "score": 25438.44420528185 }, { "content": " );\n\n\n\n let inputs: Vec<HStream<i32>> =\n\n vec![Box::new(packet_generator0), Box::new(packet_generator1)];\n\n\n\n let link = Link::do_join(inputs, None);\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0].len(), packets.len() * 2);\n\n }\n\n\n\n #[test]\n\n fn small_channel() {\n\n let mut runtime = initialize_runtime();\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9, 11];\n\n let results = runtime.block_on(async {\n\n let inputs = vec![\n\n immediate_stream::<Vec<i32>>(packets.clone()),\n\n immediate_stream::<Vec<i32>>(packets.clone()),\n", "file_path": "src/link/join.rs", "rank": 73, "score": 25438.011162866318 }, { "content": "\n\n let link = Link::do_join(inputs, None);\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0].len(), stream_len * num_streams);\n\n }\n\n\n\n #[test]\n\n fn wait_between_packets() {\n\n let mut runtime = initialize_runtime();\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9, 11];\n\n let results = runtime.block_on(async {\n\n let packet_generator0 = PacketIntervalGenerator::new(\n\n time::Duration::from_millis(10),\n\n packets.clone().into_iter(),\n\n );\n\n let packet_generator1 = PacketIntervalGenerator::new(\n\n time::Duration::from_millis(10),\n\n packets.clone().into_iter(),\n", "file_path": "src/link/join.rs", "rank": 74, "score": 25436.86410951342 }, { "content": " use super::*;\n\n use crate::utils::test::harness::{initialize_runtime, test_link};\n\n use crate::utils::test::packet_generators::{immediate_stream, PacketIntervalGenerator};\n\n use crate::utils::test::processor::{Drop, Identity, TransformFrom};\n\n use crate::Link;\n\n use core::time;\n\n\n\n #[test]\n\n fn process() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n // Gnarly declaration becuase ProcessStream is just a stream, without\n\n // runnables, but still needs to build a link. User would just call\n\n // my_link.process(my_processor);\n\n // These tests are designed to only cover ProcessStream\n\n let link = Link::new(\n\n vec![],\n\n vec![Box::new(ProcessStream::new(\n", "file_path": "src/link/process.rs", "rank": 75, "score": 25436.69811491431 }, { "content": " }\n\n\n\n #[test]\n\n fn wait_between_packets() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let input = PacketIntervalGenerator::new(\n\n time::Duration::from_millis(10),\n\n packets.clone().into_iter(),\n\n );\n\n\n\n let link = Link::do_queue(Box::new(input), None);\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], packets);\n\n }\n\n}\n", "file_path": "src/link/queue.rs", "rank": 76, "score": 25436.688587351375 }, { "content": " ];\n\n\n\n let link = Link::do_join(inputs, None);\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0].len(), packets.len() * 2);\n\n }\n\n\n\n #[test]\n\n fn empty_stream() {\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let inputs = vec![\n\n immediate_stream::<Vec<i32>>(vec![]),\n\n immediate_stream::<Vec<i32>>(vec![]),\n\n ];\n\n\n\n let link = Link::do_join(inputs, None);\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], []);\n\n }\n\n}\n", "file_path": "src/link/join.rs", "rank": 77, "score": 25436.54206430267 }, { "content": " let stream_len = rng.gen_range(2000, 4000);\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let link = Link::do_queue(immediate_stream(0..stream_len), Some(10));\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0].len(), stream_len);\n\n }\n\n\n\n #[test]\n\n fn small_channel() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let link = Link::do_queue(immediate_stream(packets.clone()), Some(1));\n\n\n\n test_link(link, None).await\n", "file_path": "src/link/queue.rs", "rank": 78, "score": 25436.488871042628 }, { "content": " task_park: Arc<AtomicCell<TaskParkState>>,\n\n}\n\n\n\nimpl<Packet: Sized> QueueRunnable<Packet> {\n\n fn new(\n\n input_stream: HStream<Packet>,\n\n to_egressor: Sender<Option<Packet>>,\n\n task_park: Arc<AtomicCell<TaskParkState>>,\n\n ) -> Self {\n\n QueueRunnable {\n\n input_stream,\n\n to_egressor,\n\n task_park,\n\n }\n\n }\n\n}\n\n\n\nimpl<Packet: Send + Sized> Unpin for QueueRunnable<Packet> {}\n\n\n\nimpl<Packet: Send + Sized> Future for QueueRunnable<Packet> {\n", "file_path": "src/link/queue.rs", "rank": 79, "score": 25436.14519054698 }, { "content": " use crate::utils::test::processor::Identity;\n\n use core::time;\n\n use rand::{thread_rng, Rng};\n\n\n\n #[test]\n\n fn queue() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let link = Link::do_queue(immediate_stream(packets.clone()), Some(10));\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], packets);\n\n }\n\n\n\n #[test]\n\n fn long_stream() {\n\n let mut rng = thread_rng();\n", "file_path": "src/link/queue.rs", "rank": 80, "score": 25436.03887308008 }, { "content": " Poll::Pending\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[allow(dead_code)]\n\nmod tests {\n\n use super::*;\n\n use crate::utils::test::packet_generators::{immediate_stream, PacketIntervalGenerator};\n\n use core::time;\n\n use rand::{thread_rng, Rng};\n\n\n\n use crate::utils::test::harness::{initialize_runtime, test_link};\n\n\n\n #[test]\n\n fn join() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9, 11];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n", "file_path": "src/link/join.rs", "rank": 81, "score": 25435.91563047918 }, { "content": " let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let link = Link::new(\n\n vec![],\n\n vec![Box::new(ProcessStream::new(\n\n immediate_stream(packets),\n\n Drop::new(),\n\n ))],\n\n );\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], []);\n\n }\n\n}\n", "file_path": "src/link/process.rs", "rank": 82, "score": 25435.57806638171 }, { "content": "\n\n let link = Link::new(\n\n vec![],\n\n vec![Box::new(ProcessStream::new(\n\n Box::new(packet_generator),\n\n Identity::new(),\n\n ))],\n\n );\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], packets);\n\n }\n\n\n\n #[test]\n\n fn type_transform() {\n\n let packets = \"route-rs\".chars();\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n", "file_path": "src/link/process.rs", "rank": 83, "score": 25435.544914105125 }, { "content": "\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let link = Link::<i32>::do_fork(immediate_stream(packets.clone()), 2, None);\n\n\n\n test_link(link, None).await\n\n });\n\n\n\n assert_eq!(results[0], packets.clone());\n\n assert_eq!(results[1], packets);\n\n }\n\n\n\n #[test]\n\n fn three_way() {\n\n let packets = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let link = Link::<i32>::do_fork(immediate_stream(packets.clone()), 3, None);\n\n\n\n test_link(link, None).await\n\n });\n\n\n\n assert_eq!(results[0], packets.clone());\n\n assert_eq!(results[1], packets.clone());\n\n assert_eq!(results[2], packets);\n\n }\n\n}\n", "file_path": "src/link/fork.rs", "rank": 84, "score": 25434.619070636363 }, { "content": " immediate_stream(packets.clone()),\n\n Identity::new(),\n\n ))],\n\n );\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], packets);\n\n }\n\n\n\n #[test]\n\n fn wait_between_packets() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let packet_generator = PacketIntervalGenerator::new(\n\n time::Duration::from_millis(10),\n\n packets.clone().into_iter(),\n\n );\n", "file_path": "src/link/process.rs", "rank": 85, "score": 25434.42249385388 }, { "content": " });\n\n assert!(results[0].is_empty());\n\n }\n\n\n\n #[test]\n\n fn one_way() {\n\n let packets = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let link = Link::<i32>::do_fork(immediate_stream(packets.clone()), 1, None);\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], packets);\n\n }\n\n\n\n #[test]\n\n fn two_way() {\n\n let packets = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n", "file_path": "src/link/fork.rs", "rank": 86, "score": 25433.54883391825 }, { "content": " }\n\n Some(packet) => {\n\n self.to_egressor\n\n .try_send(Some(packet))\n\n .expect(\"QueueIngressor::Poll::Ready(Some(val)) try_send to_egressor shouldn't fail\");\n\n unpark_and_wake(&self.task_park);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n// The Egressor side of the QueueLink is responsible to converting the\n\n// output queue of processed packets, which is a crossbeam channel, to a\n\n// Stream that can be polled for packets. It ends up being owned by the\n\n// processor which is polling for packets.\n\npub(crate) struct QueueStream<Packet: Sized> {\n\n from_ingressor: Receiver<Option<Packet>>,\n\n task_park: Arc<AtomicCell<TaskParkState>>,\n\n}\n", "file_path": "src/link/queue.rs", "rank": 87, "score": 25433.38971155396 }, { "content": " });\n\n assert_eq!(results[0], packets);\n\n }\n\n\n\n #[test]\n\n fn empty_stream() {\n\n let mut runtime = initialize_runtime();\n\n let results = runtime.block_on(async {\n\n let packets: Vec<i32> = vec![];\n\n let link = Link::do_queue(immediate_stream(packets.clone()), Some(10));\n\n\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], []);\n\n }\n\n\n\n #[test]\n\n fn two_links() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n", "file_path": "src/link/queue.rs", "rank": 88, "score": 25433.389064939664 }, { "content": "}\n\n\n\nimpl<Packet: Sized> Unpin for JoinStream<Packet> {}\n\n\n\nimpl<Packet: Sized> Drop for JoinStream<Packet> {\n\n fn drop(&mut self) {}\n\n}\n\n\n\nimpl<Packet: Sized> Stream for JoinStream<Packet> {\n\n type Item = Packet;\n\n\n\n // Iterate over all the channels, pull the first packet that is available.\n\n // This starts at the next index after the last successful recv\n\n fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {\n\n //rotate_slice exists in 1.22 nightly experimental\n\n let egressor = Pin::into_inner(self);\n\n let rotated_iter = egressor\n\n .from_ingressors\n\n .iter()\n\n .enumerate()\n", "file_path": "src/link/join.rs", "rank": 89, "score": 25433.001751256135 }, { "content": " let packet_generator = immediate_stream(packets.clone());\n\n\n\n let link = Link::new(\n\n vec![],\n\n vec![Box::new(ProcessStream::new(\n\n Box::new(packet_generator),\n\n TransformFrom::<char, u32>::new(),\n\n ))],\n\n );\n\n\n\n test_link(link, None).await\n\n });\n\n let expected_output: Vec<u32> = packets.map(|p| p.into()).collect();\n\n assert_eq!(results[0], expected_output);\n\n }\n\n\n\n #[test]\n\n fn drop() {\n\n let packets: Vec<i32> = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9];\n\n\n", "file_path": "src/link/process.rs", "rank": 90, "score": 25432.372366170992 }, { "content": " Link {\n\n runnables,\n\n streams: vec![Box::new(stream)],\n\n }\n\n }\n\n}\n\n\n\npub struct JoinRunnable<Packet: Sized> {\n\n input: HStream<Packet>,\n\n to_egressor: Sender<Option<Packet>>,\n\n task_park: Arc<AtomicCell<TaskParkState>>,\n\n}\n\n\n\nimpl<Packet: Sized> Unpin for JoinRunnable<Packet> {}\n\n\n\nimpl<Packet: Sized> JoinRunnable<Packet> {\n\n fn new(\n\n input: HStream<Packet>,\n\n to_egressor: Sender<Option<Packet>>,\n\n task_park: Arc<AtomicCell<TaskParkState>>,\n", "file_path": "src/link/join.rs", "rank": 91, "score": 25432.16527630981 }, { "content": "use crate::{HStream, Processor};\n\nuse futures::prelude::*;\n\nuse futures::ready;\n\nuse futures::task::{Context, Poll};\n\nuse std::pin::Pin;\n\n\n\npub(crate) struct ProcessStream<P: Processor> {\n\n in_stream: HStream<P::Input>,\n\n processor: P,\n\n}\n\n\n\nimpl<P: Processor> ProcessStream<P> {\n\n pub fn new(in_stream: HStream<P::Input>, processor: P) -> Self {\n\n ProcessStream {\n\n in_stream,\n\n processor,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/link/process.rs", "rank": 92, "score": 25432.10782081325 }, { "content": " let results = runtime.block_on(async {\n\n let (mut runnables0, mut streams0) =\n\n Link::new(vec![], vec![immediate_stream(packets.clone())])\n\n .process(Identity::new())\n\n .take();\n\n\n\n let (mut runnables1, streams1) = Link::do_queue(streams0.remove(0), Some(10)).take();\n\n\n\n let (mut runnables2, mut streams2) =\n\n Link::new(vec![], streams1).process(Identity::new()).take();\n\n\n\n let (mut runnables3, streams3) = Link::do_queue(streams2.remove(0), Some(10)).take();\n\n runnables0.append(&mut runnables1);\n\n runnables0.append(&mut runnables2);\n\n runnables0.append(&mut runnables3);\n\n\n\n let link = Link::new(runnables0, streams3);\n\n test_link(link, None).await\n\n });\n\n assert_eq!(results[0], packets);\n", "file_path": "src/link/queue.rs", "rank": 93, "score": 25432.04232934829 }, { "content": " for _ in 0..count {\n\n let (to_egressor, from_ingressor) = match capacity {\n\n None => crossbeam_channel::unbounded::<Option<Packet>>(),\n\n Some(capacity) => crossbeam_channel::bounded::<Option<Packet>>(capacity),\n\n };\n\n let task_park = Arc::new(AtomicCell::new(TaskParkState::Empty));\n\n\n\n let egressor = QueueStream::new(from_ingressor.clone(), Arc::clone(&task_park));\n\n\n\n to_egressors.push(to_egressor);\n\n egressors.push(Box::new(egressor));\n\n from_ingressors.push(from_ingressor);\n\n task_parks.push(task_park);\n\n }\n\n\n\n let ingressor = ForkRunnable::new(input, to_egressors, task_parks);\n\n\n\n Link {\n\n runnables: vec![Box::new(ingressor)],\n\n streams: egressors,\n", "file_path": "src/link/fork.rs", "rank": 94, "score": 25431.824025355014 }, { "content": " None => {\n\n ingressor.to_egressor.try_send(None).expect(\n\n \"JoinIngressor::Poll::Ready(None) try_send to_egressor shouldn't fail\",\n\n );\n\n die_and_wake(&ingressor.task_park);\n\n return Poll::Ready(());\n\n }\n\n Some(packet) => {\n\n ingressor.to_egressor.try_send(Some(packet)).expect(\n\n \"JoinIngressor::Poll:Ready(Some(Val)) try_send to_egressor shouldn't fail\",\n\n );\n\n unpark_and_wake(&ingressor.task_park);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\npub struct JoinStream<Packet: Sized> {\n", "file_path": "src/link/join.rs", "rank": 95, "score": 25431.758422868552 }, { "content": "\n\nimpl<Packet: Sized> QueueStream<Packet> {\n\n pub fn new(\n\n from_ingressor: Receiver<Option<Packet>>,\n\n task_park: Arc<AtomicCell<TaskParkState>>,\n\n ) -> Self {\n\n QueueStream {\n\n from_ingressor,\n\n task_park,\n\n }\n\n }\n\n}\n\n\n\nimpl<Packet: Sized> Unpin for QueueStream<Packet> {}\n\n\n\nimpl<Packet: Sized> Stream for QueueStream<Packet> {\n\n type Item = Packet;\n\n\n\n // Implement Poll for Stream for QueueEgressor\n\n //\n", "file_path": "src/link/queue.rs", "rank": 96, "score": 25430.796339652265 }, { "content": " ) -> Self {\n\n JoinRunnable {\n\n input,\n\n to_egressor,\n\n task_park,\n\n }\n\n }\n\n}\n\n\n\nimpl<Packet: Sized> Future for JoinRunnable<Packet> {\n\n type Output = ();\n\n\n\n // Implement Poll for Future for JoinRunnable\n\n //\n\n // Note that this function works a bit different, it continues to process\n\n // packets off it's input queue until it reaches a point where it can not\n\n // make forward progress. There are three cases:\n\n // ###\n\n // #1 The to_egressor queue is full, we wake the egressor that we need\n\n // awaking when there is work to do, and go to sleep.\n", "file_path": "src/link/join.rs", "rank": 97, "score": 25429.17983546656 }, { "content": " type Output = ();\n\n\n\n // Implement Poll for Future for QueueIngressor\n\n //\n\n // This function continues to process\n\n // packets off it's input queue until it reaches a point where it can not\n\n // make forward progress. There are several cases:\n\n // ###\n\n // #1 The to_egressor queue is full, we wake the Egressor that we need\n\n // awaking when there is work to do, and go to sleep by returning `Async::NotReady`.\n\n //\n\n // #2 The input_stream returns a NotReady, we sleep, with the assumption\n\n // that whomever produced the NotReady will awaken the task in the Future.\n\n //\n\n // #3 We get a Ready(None), in which case we push a None onto the to_Egressor\n\n // queue and then return Ready(()), which means we enter tear-down, since there\n\n // is no further work to complete.\n\n //\n\n // #4 If our upstream `HStream` has a packet for us, we pass it to our `processor`\n\n // for `process`ing. Most of the time, it will yield a `Some(output_packet)` that has\n", "file_path": "src/link/queue.rs", "rank": 98, "score": 25429.097569307785 }, { "content": "impl<P: Processor> Unpin for ProcessStream<P> {}\n\n\n\nimpl<P: Processor> Stream for ProcessStream<P> {\n\n type Item = P::Output;\n\n\n\n // Intro to `Stream`s:\n\n // 3 cases: `Poll::Ready(Some)`, `Poll::Ready(None)`, `Poll::Pending`\n\n //\n\n // `Poll::Ready(Some)`: We have a packet ready to process from the upstream processor.\n\n // It's passed to our core's process function for... processing\n\n //\n\n // `Poll::Ready(None)`: The input_stream doesn't have anymore input. Semantically,\n\n // it's like an iterator has exhausted it's input. We should return `Poll::Ready(None)`\n\n // to signify to our downstream components that there's no more input to process.\n\n // Our Processors should rarely return `Poll::Ready(None)` since it will effectively\n\n // kill the Stream chain.\n\n //\n\n // `Poll::Pending`: There is more input for us to process, but we can't make any more\n\n // progress right now. The contract for Streams asks us to register with a Reactor so we\n\n // will be woken up again by an Executor, but we will be relying on Tokio to do that for us.\n", "file_path": "src/link/process.rs", "rank": 99, "score": 25427.99174528461 } ]
Rust
tests/end_to_end_cases/read_cli.rs
decadevvv/influxdb_iox
da440d8d277147e92c92fbdbe735cfcfb0c88912
use assert_cmd::Command; use predicates::prelude::*; use test_helpers::make_temp_file; use crate::common::server_fixture::ServerFixture; use super::scenario::rand_name; #[tokio::test] pub async fn test() { let server_fixture = ServerFixture::create_single_use().await; let db_name = rand_name(); let addr = server_fixture.grpc_base(); set_server_id(addr).await; create_database(&db_name, addr).await; test_read_default(&db_name, addr).await; test_read_format_pretty(&db_name, addr).await; test_read_format_csv(&db_name, addr).await; test_read_format_json(&db_name, addr).await; test_read_error(&db_name, addr).await; } async fn set_server_id(addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("server") .arg("set") .arg("23") .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("Ok")); } async fn create_database(db_name: &str, addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("create") .arg(db_name) .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("Ok")); let lp_data = vec![ "cpu,region=west user=23.2 100", "cpu,region=west user=21.0 150", ]; let lp_data_file = make_temp_file(lp_data.join("\n")); Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("write") .arg(db_name) .arg(lp_data_file.as_ref()) .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("2 Lines OK")); } async fn test_read_default(db_name: &str, addr: &str) { let expected = "+--------+-------------------------------+------+\n\ | region | time | user |\n\ +--------+-------------------------------+------+\n\ | west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\ | west | 1970-01-01 00:00:00.000000150 | 21 |\n\ +--------+-------------------------------+------+"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_pretty(db_name: &str, addr: &str) { let expected = "+--------+-------------------------------+------+\n\ | region | time | user |\n\ +--------+-------------------------------+------+\n\ | west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\ | west | 1970-01-01 00:00:00.000000150 | 21 |\n\ +--------+-------------------------------+------+"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("pretty") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_csv(db_name: &str, addr: &str) { let expected = "west,1970-01-01T00:00:00.000000100,23.2\nwest,1970-01-01T00:00:00.000000150,21.0"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("csv") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_json(db_name: &str, addr: &str) { let expected = r#"[{"region":"west","time":"1970-01-01 00:00:00.000000100","user":23.2},{"region":"west","time":"1970-01-01 00:00:00.000000150","user":21.0}]"#; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("json") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_error(db_name: &str, addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from unknown_table") .arg("--host") .arg(addr) .assert() .failure() .stderr(predicate::str::contains( "Table or CTE with name \\'unknown_table\\' not found", )); Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("not_a_valid_format") .assert() .failure() .stderr(predicate::str::contains( "Unknown format type: not_a_valid_format. Expected one of 'pretty', 'csv' or 'json'", )); }
use assert_cmd::Command; use predicates::prelude::*; use test_helpers::make_temp_file; use crate::common::server_fixture::ServerFixture; use super::scenario::rand_name; #[tokio::test] pub async fn test() { let server_fixture = ServerFixture::create_single_use().await; let db_name = rand_name(); let addr = server_fixture.grpc_base(); set_server_id(addr).await; create_database(&db_name, addr).await; test_read_default(&db_name, addr).await; test_read_format_pretty(&db_name, addr).await; test_read_format_csv(&db_name, addr).await; test_read_format_json(&db_name, addr).await; test_read_error(&db_name, addr).await; } async fn set_server_id(addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("server") .arg("set") .arg("23") .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("Ok")); } async fn create_database(db_name: &str, addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("create") .arg(db_name) .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("Ok")); let lp_data = vec![ "cpu,region=west user=23.2 100", "cpu,region=west user=21.0 150", ]; let lp_data_file = make_temp_file(lp_data.join("\n")); Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("write") .arg(db_name) .arg(lp_data_file.as_ref()) .arg("--host")
.arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("json") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_error(db_name: &str, addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from unknown_table") .arg("--host") .arg(addr) .assert() .failure() .stderr(predicate::str::contains( "Table or CTE with name \\'unknown_table\\' not found", )); Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("not_a_valid_format") .assert() .failure() .stderr(predicate::str::contains( "Unknown format type: not_a_valid_format. Expected one of 'pretty', 'csv' or 'json'", )); }
.arg(addr) .assert() .success() .stdout(predicate::str::contains("2 Lines OK")); } async fn test_read_default(db_name: &str, addr: &str) { let expected = "+--------+-------------------------------+------+\n\ | region | time | user |\n\ +--------+-------------------------------+------+\n\ | west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\ | west | 1970-01-01 00:00:00.000000150 | 21 |\n\ +--------+-------------------------------+------+"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_pretty(db_name: &str, addr: &str) { let expected = "+--------+-------------------------------+------+\n\ | region | time | user |\n\ +--------+-------------------------------+------+\n\ | west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\ | west | 1970-01-01 00:00:00.000000150 | 21 |\n\ +--------+-------------------------------+------+"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("pretty") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_csv(db_name: &str, addr: &str) { let expected = "west,1970-01-01T00:00:00.000000100,23.2\nwest,1970-01-01T00:00:00.000000150,21.0"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("csv") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_json(db_name: &str, addr: &str) { let expected = r#"[{"region":"west","time":"1970-01-01 00:00:00.000000100","user":23.2},{"region":"west","time":"1970-01-01 00:00:00.000000150","user":21.0}]"#; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query")
random
[ { "content": "/// Loads the specified lines into the named database\n\nfn load_lp(addr: &str, db_name: &str, lp_data: Vec<&str>) {\n\n let lp_data_file = make_temp_file(lp_data.join(\"\\n\"));\n\n\n\n Command::cargo_bin(\"influxdb_iox\")\n\n .unwrap()\n\n .arg(\"database\")\n\n .arg(\"write\")\n\n .arg(&db_name)\n\n .arg(lp_data_file.as_ref())\n\n .arg(\"--host\")\n\n .arg(addr)\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"Lines OK\"));\n\n}\n", "file_path": "tests/end_to_end_cases/management_cli.rs", "rank": 0, "score": 317021.56671507366 }, { "content": "/// convert form that is easier to type in tests to what some code needs\n\npub fn str_vec_to_arc_vec(str_vec: &[&str]) -> Arc<Vec<Arc<String>>> {\n\n Arc::new(str_vec.iter().map(|s| Arc::new(String::from(*s))).collect())\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 1, "score": 304244.4462628483 }, { "content": "/// convert form that is easier to type in tests to what some code needs\n\npub fn str_pair_vec_to_vec(str_vec: &[(&str, &str)]) -> Vec<(Arc<String>, Arc<String>)> {\n\n str_vec\n\n .iter()\n\n .map(|(s1, s2)| (Arc::new(String::from(*s1)), Arc::new(String::from(*s2))))\n\n .collect()\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 2, "score": 301892.5463802234 }, { "content": "/// substitutes \"ns\" --> ns_since_epoch, ns1-->ns_since_epoch+1, etc\n\npub fn substitute_nanos(ns_since_epoch: i64, lines: &[&str]) -> Vec<String> {\n\n let substitutions = vec![\n\n (\"ns0\", format!(\"{}\", ns_since_epoch)),\n\n (\"ns1\", format!(\"{}\", ns_since_epoch + 1)),\n\n (\"ns2\", format!(\"{}\", ns_since_epoch + 2)),\n\n (\"ns3\", format!(\"{}\", ns_since_epoch + 3)),\n\n (\"ns4\", format!(\"{}\", ns_since_epoch + 4)),\n\n (\"ns5\", format!(\"{}\", ns_since_epoch + 5)),\n\n (\"ns6\", format!(\"{}\", ns_since_epoch + 6)),\n\n ];\n\n\n\n lines\n\n .iter()\n\n .map(|line| {\n\n let mut line = line.to_string();\n\n for (from, to) in &substitutions {\n\n line = line.replace(from, to);\n\n }\n\n line\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "tests/end_to_end_cases/scenario.rs", "rank": 3, "score": 251872.95508399786 }, { "content": "/// Validates some of the metadata output content for this tsm file\n\nfn assert_meta_000000000000005_000000002_tsm(assert: Assert) {\n\n assert\n\n .success()\n\n .stdout(predicate::str::contains(\"TSM Metadata Report\"))\n\n .stdout(predicate::str::contains(\n\n \"(05c19117091a1000, 05c19117091a1001) 2159 index entries, 2159 total records\",\n\n ))\n\n .stdout(predicate::str::contains(\n\n \"task_scheduler_total_schedule_fails\",\n\n ));\n\n}\n\n\n", "file_path": "tests/commands.rs", "rank": 4, "score": 232744.33753701783 }, { "content": "/// Validates some of the metadata output content for this tsm file\n\nfn assert_meta_temperature_parquet(assert: Assert) {\n\n assert\n\n .success()\n\n .stdout(predicate::str::contains(\"Parquet file metadata:\"))\n\n .stdout(predicate::str::contains(r#\"created by: \"Delorean\"\"#))\n\n .stdout(predicate::str::contains(\n\n r#\"Column Chunk [3]:\n\n file_offset: 595\n\n column_type: DOUBLE\n\n column_path: bottom_degrees\n\n num_values: 6\n\n encodings: [PLAIN, RLE_DICTIONARY, RLE]\n\n compression: GZIP\n\n compressed_size: 125\n\n uncompressed_size: 90\n\n data_page_offset: 547\n\n has_index_page: false\n\n has_dictionary_page: true\n\n dictionary_page_offset: 470\n\n NO STATISTICS\"#,\n\n ));\n\n}\n\n\n", "file_path": "tests/commands.rs", "rank": 5, "score": 228754.22080859775 }, { "content": "/// Validates some of the metadata output content for this tsm file\n\nfn assert_meta_cpu_usage_tsm(assert: Assert) {\n\n assert\n\n .success()\n\n .stdout(predicate::str::contains(\"TSM Metadata Report\"))\n\n .stdout(predicate::str::contains(\n\n \"(05b4927b3fe38000, 05b4927b3fe38001) 2735 index entries, 2735 total records\",\n\n ))\n\n .stdout(predicate::str::contains(\n\n \"task_scheduler_total_schedule_fails\",\n\n ));\n\n}\n\n\n", "file_path": "tests/commands.rs", "rank": 6, "score": 224965.37063339315 }, { "content": "/// Format a the vec of Arc strings paris into strings\n\npub fn dump_arc_vec(v: Vec<(Arc<String>, Arc<String>)>) -> Vec<String> {\n\n v.into_iter()\n\n .map(|(k, v)| format!(\" ({}, {})\", k, v))\n\n .collect()\n\n}\n\n\n", "file_path": "server/src/query_tests/influxrpc/util.rs", "rank": 7, "score": 221307.60272865542 }, { "content": "/// Converts bytes representing tag_keys values to Rust strings,\n\n/// handling the special case `_m(0x00)` and `_f(0xff)` values. Other\n\n/// than `0xff` panics on any non-utf8 string.\n\npub fn tag_key_bytes_to_strings(bytes: Vec<u8>) -> String {\n\n match bytes.as_slice() {\n\n [0] => \"_m(0x00)\".into(),\n\n // note this isn't valid UTF8 and thus would assert below\n\n [255] => \"_f(0xff)\".into(),\n\n _ => String::from_utf8(bytes).expect(\"string value response was not utf8\"),\n\n }\n\n}\n\n\n\nstatic LOG_SETUP: Once = Once::new();\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 8, "score": 219369.07560696156 }, { "content": "/// Format a series set into a format that is easy to compare in tests\n\npub fn dump_series_set(s: SeriesSet) -> Vec<String> {\n\n let mut f = vec![\"SeriesSet\".into()];\n\n f.push(format!(\"table_name: {}\", s.table_name));\n\n f.push(\"tags\".to_string());\n\n f.extend(dump_arc_vec(s.tags).into_iter());\n\n f.push(\"field_indexes:\".to_string());\n\n f.extend(dump_field_indexes(s.field_indexes).into_iter());\n\n f.push(format!(\"start_row: {}\", s.start_row));\n\n f.push(format!(\"num_rows: {}\", s.num_rows));\n\n f.push(\"Batches:\".into());\n\n let formatted_batch = pretty_format_batches(&[s.batch]).unwrap();\n\n f.extend(formatted_batch.trim().split('\\n').map(|s| s.to_string()));\n\n\n\n f\n\n}\n\n\n\n/// Run a series set plan to completion and produce a Vec<String> representation\n\n///\n\n/// # Panics\n\n///\n", "file_path": "server/src/query_tests/influxrpc/util.rs", "rank": 9, "score": 215114.5799780573 }, { "content": "/// Format the field indexes into strings\n\npub fn dump_field_indexes(f: FieldIndexes) -> Vec<String> {\n\n f.as_slice()\n\n .iter()\n\n .map(|field_index| {\n\n format!(\n\n \" (value_index: {}, timestamp_index: {})\",\n\n field_index.value_index, field_index.timestamp_index\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "server/src/query_tests/influxrpc/util.rs", "rank": 10, "score": 215108.2616350166 }, { "content": "pub fn all_entries(builder: &WriteBufferBuilder) -> Result<Vec<Entry>> {\n\n builder\n\n .clone()\n\n .entries()?\n\n .collect::<Result<Vec<_>, _>>()\n\n .map_err(Into::into)\n\n}\n\n\n\nmacro_rules! assert_filenames_for_sequence_numbers {\n\n ($dir:expr, [$($id:expr),* $(,)?] $(,)?) => {{\n\n let actual = write_buffer_file_names(&$dir.as_ref());\n\n let expected = [$(file_name_for_sequence_number($id)),*];\n\n assert_eq!(actual, expected);\n\n }};\n\n}\n\n\n\nmacro_rules! assert_entry {\n\n ($entry:expr, $seq_num:expr, $data: expr $(,)?) => {{\n\n assert_eq!($seq_num, $entry.sequence_number());\n\n assert_eq!($data.as_ref(), $entry.as_data());\n", "file_path": "write_buffer/tests/helpers/mod.rs", "rank": 11, "score": 210339.35949850787 }, { "content": "/// Enables debug logging regardless of the value of RUST_LOG\n\n/// environment variable. If RUST_LOG isn't specifies, defaults to\n\n/// \"debug\"\n\npub fn start_logging() {\n\n // ensure the global has been initialized\n\n LOG_SETUP.call_once(|| {\n\n // honor any existing RUST_LOG level\n\n if std::env::var(\"RUST_LOG\").is_err() {\n\n std::env::set_var(\"RUST_LOG\", \"debug\");\n\n }\n\n // Configure the logger to write to stderr and install it\n\n let output_stream = std::io::stderr;\n\n\n\n use observability_deps::tracing_subscriber::{self, prelude::*, EnvFilter};\n\n\n\n tracing_subscriber::registry()\n\n .with(EnvFilter::from_default_env())\n\n .with(tracing_subscriber::fmt::layer().with_writer(output_stream))\n\n .init();\n\n })\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 12, "score": 207002.28859066076 }, { "content": "/// Enables debug logging if the RUST_LOG environment variable is\n\n/// set. Does nothing if RUST_LOG is not set. If enable_logging has\n\n/// been set previously, does nothing\n\npub fn maybe_start_logging() {\n\n if std::env::var(\"RUST_LOG\").is_ok() {\n\n start_logging()\n\n }\n\n}\n\n\n\n#[macro_export]\n\n/// A macro to assert that one string is contained within another with\n\n/// a nice error message if they are not.\n\n///\n\n/// Usage: `assert_contains!(actual, expected)`\n\n///\n\n/// Is a macro so test error\n\n/// messages are on the same line as the failure;\n\n///\n\n/// Both arguments must be convertable into Strings (Into<String>)\n\nmacro_rules! assert_contains {\n\n ($ACTUAL: expr, $EXPECTED: expr) => {\n\n let actual_value: String = $ACTUAL.into();\n\n let expected_value: String = $EXPECTED.into();\n", "file_path": "test_helpers/src/lib.rs", "rank": 13, "score": 202541.72676427494 }, { "content": "/// Used for testing: create a Database with a local store\n\npub fn make_db() -> TestDb {\n\n let server_id = ServerId::try_from(1).unwrap();\n\n // TODO: When we support parquet file in memory, we will either turn this test back to memory\n\n // or have both tests: local disk and memory\n\n //let object_store = Arc::new(ObjectStore::new_in_memory(InMemory::new()));\n\n //\n\n // Create an object store with a specified location in a local disk\n\n let root = TempDir::new().unwrap();\n\n let object_store = Arc::new(ObjectStore::new_file(File::new(root.path())));\n\n let exec = Arc::new(Executor::new(1));\n\n let metrics_registry = Arc::new(metrics::MetricRegistry::new());\n\n\n\n TestDb {\n\n metric_registry: metrics::TestMetricRegistry::new(Arc::clone(&metrics_registry)),\n\n db: Db::new(\n\n DatabaseRules::new(DatabaseName::new(\"placeholder\").unwrap()),\n\n server_id,\n\n object_store,\n\n exec,\n\n None, // write buffer\n\n Arc::new(JobRegistry::new()),\n\n metrics_registry,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "server/src/query_tests/utils.rs", "rank": 14, "score": 200603.78823895304 }, { "content": "/// Used for testing: create a Database with a local store and a specified name\n\npub fn make_database(server_id: ServerId, object_store: Arc<ObjectStore>, db_name: &str) -> TestDb {\n\n let exec = Arc::new(Executor::new(1));\n\n let metrics_registry = Arc::new(metrics::MetricRegistry::new());\n\n TestDb {\n\n metric_registry: metrics::TestMetricRegistry::new(Arc::clone(&metrics_registry)),\n\n db: Db::new(\n\n DatabaseRules::new(DatabaseName::new(db_name.to_string()).unwrap()),\n\n server_id,\n\n object_store,\n\n exec,\n\n None, // write buffer\n\n Arc::new(JobRegistry::new()),\n\n metrics_registry,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "server/src/query_tests/utils.rs", "rank": 15, "score": 200060.45032873575 }, { "content": "pub fn write_buffer_file_names(dir: impl Into<PathBuf>) -> Vec<String> {\n\n write_buffer_paths(dir)\n\n .iter()\n\n .filter_map(|path| path.file_name().map(|p| p.to_string_lossy().to_string()))\n\n .collect()\n\n}\n\n\n", "file_path": "write_buffer/tests/helpers/mod.rs", "rank": 16, "score": 198477.47054436136 }, { "content": "pub fn write_buffer_paths(dir: impl Into<PathBuf>) -> Vec<PathBuf> {\n\n let mut paths: Vec<_> = fs::read_dir(&dir.into())\n\n .expect(\"Cannot read Write Buffer directory\")\n\n .flatten() // Ignore errors\n\n .map(|entry| entry.path())\n\n .collect();\n\n paths.sort();\n\n paths\n\n}\n\n\n", "file_path": "write_buffer/tests/helpers/mod.rs", "rank": 17, "score": 198477.47054436136 }, { "content": "/// s/span=1/span=SPAN1/g\n\nfn normalize_spans(lines: Vec<String>) -> Vec<String> {\n\n // since there can be multiple unique span values, need to normalize them\n\n // differently\n\n //\n\n // Note: we include leading and trailing spaces so that span=2\n\n // doesn't also match span=21423\n\n let re = Regex::new(r#\" span=(\\d+) \"#).unwrap();\n\n let span_ids: Vec<String> = lines\n\n .iter()\n\n .map(|line| re.find_iter(line))\n\n .flatten()\n\n .map(|m| m.as_str().to_string())\n\n .collect();\n\n\n\n // map span ids to something uniform\n\n span_ids\n\n .into_iter()\n\n .enumerate()\n\n .fold(lines, |lines, (idx, orig_id)| {\n\n // replace old span\n", "file_path": "logfmt/tests/logging.rs", "rank": 18, "score": 196711.52654689908 }, { "content": "pub fn dump_meta(input_filename: &str) -> Result<()> {\n\n info!(\"meta starting\");\n\n debug!(\"Reading from input file {}\", input_filename);\n\n\n\n let input_reader = InputReader::new(input_filename).context(OpenInput)?;\n\n\n\n match input_reader.file_type() {\n\n FileType::LineProtocol => LineProtocolNotImplemented.fail(),\n\n FileType::Tsm => {\n\n let len = input_reader\n\n .len()\n\n .try_into()\n\n .expect(\"File size more than usize\");\n\n let reader = TsmIndexReader::try_new(input_reader, len).context(CreateTsm)?;\n\n\n\n let mut stats_builder = TsmMetadataBuilder::new();\n\n\n\n for entry in reader {\n\n let entry = entry.context(UnableToReadTsmEntry)?;\n\n stats_builder.process_entry(entry)?;\n\n }\n\n stats_builder.print_report();\n\n Ok(())\n\n }\n\n FileType::Parquet => {\n\n print_parquet_metadata(input_reader).context(UnableDumpToParquetMetadata)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/commands/meta.rs", "rank": 19, "score": 194069.4242895129 }, { "content": "/// Compares the protobuf type URL found within a google.protobuf.Any\n\n/// message to an expected Protobuf package and message name\n\n///\n\n/// i.e. strips off the \"type.googleapis.com/\" prefix from `url`\n\n/// and compares the result with `protobuf_type`\n\n///\n\n/// ```\n\n/// use generated_types::protobuf_type_url_eq;\n\n/// assert!(protobuf_type_url_eq(\"type.googleapis.com/google.protobuf.Empty\", \"google.protobuf.Empty\"));\n\n/// assert!(!protobuf_type_url_eq(\"type.googleapis.com/google.protobuf.Empty\", \"something.else\"));\n\n/// ```\n\npub fn protobuf_type_url_eq(url: &str, protobuf_type: &str) -> bool {\n\n let mut split = url.splitn(2, '/');\n\n match (split.next(), split.next()) {\n\n (Some(ANY_TYPE_PREFIX), Some(t)) => t == protobuf_type,\n\n _ => false,\n\n }\n\n}\n\n\n\npub use com::github::influxdata::idpe::storage::read::*;\n\npub use influxdata::platform::storage::*;\n\n\n\npub mod google;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_protobuf_type_url() {\n\n use influxdata::iox::management::v1::OPERATION_METADATA;\n", "file_path": "generated_types/src/lib.rs", "rank": 20, "score": 192358.1823667924 }, { "content": "/// s/location=\\\"logfmt/tests/logging.rs:128\\\"/location=NORMALIZED/g\n\nfn normalize_location(v: &str) -> String {\n\n let re = Regex::new(r#\"location=\".*?\"\"#).unwrap();\n\n re.replace_all(v, \"location=NORMALIZED\").to_string()\n\n}\n\n\n", "file_path": "logfmt/tests/logging.rs", "rank": 21, "score": 192226.3421869034 }, { "content": "/// s/time=1612187170712947000/time=NORMALIZED/g\n\nfn normalize_timestamp(v: &str) -> String {\n\n let re = Regex::new(r#\"time=\\d+\"#).unwrap();\n\n re.replace_all(v, \"time=NORMALIZED\").to_string()\n\n}\n\n\n", "file_path": "logfmt/tests/logging.rs", "rank": 22, "score": 192220.0238438627 }, { "content": "/// Return a random string suitable for use as a database name\n\npub fn rand_name() -> String {\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(10)\n\n .map(char::from)\n\n .collect()\n\n}\n\n\n", "file_path": "tests/end_to_end_cases/scenario.rs", "rank": 23, "score": 189784.178068581 }, { "content": "// return a random 16 digit string comprised of numbers suitable for\n\n// use as a influxdb2 org_id or bucket_id\n\npub fn rand_id() -> String {\n\n thread_rng()\n\n .sample_iter(&Standard)\n\n .filter_map(|c: u8| {\n\n if c.is_ascii_digit() {\n\n Some(char::from(c))\n\n } else {\n\n // discard if out of range\n\n None\n\n }\n\n })\n\n .take(16)\n\n .collect()\n\n}\n\n\n\n/// given a channel to talk with the management api, create a new\n\n/// database with the specified name configured with a 10MB mutable\n\n/// buffer, partitioned on table\n\npub async fn create_readable_database(\n\n db_name: impl Into<String>,\n", "file_path": "tests/end_to_end_cases/scenario.rs", "rank": 24, "score": 189783.8411612087 }, { "content": "// gunzip's the contents of the file at input_path into a temporary path\n\nfn uncompress_gz(input_path: &str, output_extension: &str) -> test_helpers::tempfile::TempPath {\n\n let gz_file = File::open(input_path).expect(\"Error opening input\");\n\n\n\n let output_path = test_helpers::tempfile::Builder::new()\n\n .prefix(\"decompressed_e2e\")\n\n .suffix(output_extension)\n\n .tempfile()\n\n .expect(\"error creating temp file\")\n\n .into_temp_path();\n\n\n\n let mut output_file = File::create(&output_path).expect(\"error opening output\");\n\n let mut decoder = flate2::read::GzDecoder::new(gz_file);\n\n std::io::copy(&mut decoder, &mut output_file).expect(\"error copying stream\");\n\n output_path\n\n}\n\n\n", "file_path": "tests/commands.rs", "rank": 25, "score": 188607.72002754873 }, { "content": "/// Map an InfluxDB 2.X org & bucket into an IOx DatabaseName.\n\n///\n\n/// This function ensures the mapping is unambiguous by requiring both `org` and\n\n/// `bucket` to not contain the `_` character in addition to the\n\n/// [`DatabaseName`] validation.\n\npub fn org_and_bucket_to_database<'a, O: AsRef<str>, B: AsRef<str>>(\n\n org: O,\n\n bucket: B,\n\n) -> Result<DatabaseName<'a>, OrgBucketMappingError> {\n\n const SEPARATOR: char = '_';\n\n\n\n let org: Cow<'_, str> = utf8_percent_encode(org.as_ref(), NON_ALPHANUMERIC).into();\n\n let bucket: Cow<'_, str> = utf8_percent_encode(bucket.as_ref(), NON_ALPHANUMERIC).into();\n\n\n\n let db_name = format!(\"{}{}{}\", org.as_ref(), SEPARATOR, bucket.as_ref());\n\n\n\n DatabaseName::new(db_name).context(InvalidDatabaseName)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_org_bucket_map_db_ok() {\n", "file_path": "data_types/src/names.rs", "rank": 26, "score": 186981.8160727551 }, { "content": "/// Returns the protobuf URL usable with a google.protobuf.Any message\n\n/// This is the full Protobuf package and message name prefixed by\n\n/// \"type.googleapis.com/\"\n\npub fn protobuf_type_url(protobuf_type: &str) -> String {\n\n format!(\"{}/{}\", ANY_TYPE_PREFIX, protobuf_type)\n\n}\n\n\n", "file_path": "generated_types/src/lib.rs", "rank": 27, "score": 186757.14015171386 }, { "content": "/// Normalizes a set of Chunks for comparison by removing timestamps\n\nfn normalize_chunks(chunks: Vec<Chunk>) -> Vec<Chunk> {\n\n chunks\n\n .into_iter()\n\n .map(|summary| {\n\n let Chunk {\n\n partition_key,\n\n table_name,\n\n id,\n\n storage,\n\n estimated_bytes,\n\n row_count,\n\n ..\n\n } = summary;\n\n Chunk {\n\n partition_key,\n\n table_name,\n\n id,\n\n storage,\n\n estimated_bytes,\n\n row_count,\n\n time_of_first_write: None,\n\n time_of_last_write: None,\n\n time_closing: None,\n\n }\n\n })\n\n .collect::<Vec<_>>()\n\n}\n", "file_path": "tests/end_to_end_cases/management_api.rs", "rank": 28, "score": 186400.45444978066 }, { "content": "pub fn make_server() -> IOxTestingServer<impl IOxTesting> {\n\n IOxTestingServer::new(IOxTestingService {})\n\n}\n", "file_path": "src/influxdb_ioxd/rpc/testing.rs", "rank": 29, "score": 183843.5772354158 }, { "content": "/// Convert a set of tag_keys into a form suitable for gRPC transport,\n\n/// adding the special 0x00 (_m) and 0xff (_f) tag keys\n\n///\n\n/// Namely, a Vec<Vec<u8>>, including the measurement and field names\n\npub fn tag_keys_to_byte_vecs(tag_keys: Arc<BTreeSet<String>>) -> Vec<Vec<u8>> {\n\n // special case measurement (0x00) and field (0xff)\n\n // ensuring they are in the correct sort order (first and last, respectively)\n\n let mut byte_vecs = Vec::with_capacity(2 + tag_keys.len());\n\n byte_vecs.push(TAG_KEY_MEASUREMENT.to_vec()); // Shown as _m == _measurement\n\n tag_keys.iter().for_each(|name| {\n\n byte_vecs.push(name.bytes().collect());\n\n });\n\n byte_vecs.push(TAG_KEY_FIELD.to_vec()); // Shown as _f == _field\n\n byte_vecs\n\n}\n\n\n", "file_path": "src/influxdb_ioxd/rpc/storage/data.rs", "rank": 30, "score": 177146.32421655382 }, { "content": "fn dump_u8_vec(encoded_strings: &[Vec<u8>]) -> String {\n\n encoded_strings\n\n .iter()\n\n .map(|b| String::from_utf8_lossy(b))\n\n .collect::<Vec<_>>()\n\n .join(\",\")\n\n}\n\n\n", "file_path": "tests/end_to_end_cases/storage_api.rs", "rank": 31, "score": 176110.55424538758 }, { "content": "fn parse_socket_addr(s: &str) -> std::io::Result<SocketAddr> {\n\n let mut addrs = s.to_socket_addrs()?;\n\n // when name resolution fails, to_socket_address returns a validation error\n\n // so generally there is at least one result address, unless the resolver is\n\n // drunk.\n\n Ok(addrs\n\n .next()\n\n .expect(\"name resolution should return at least one address\"))\n\n}\n\n\n\narg_enum! {\n\n #[derive(Debug, Copy, Clone, PartialEq)]\n\n pub enum ObjectStore {\n\n Memory,\n\n MemoryThrottled,\n\n File,\n\n S3,\n\n Google,\n\n Azure,\n\n }\n", "file_path": "src/commands/run.rs", "rank": 32, "score": 175818.2354156713 }, { "content": "/// Serialize to application/x-www-form-urlencoded syntax\n\npub fn urlencode<T: AsRef<str>>(s: T) -> String {\n\n ::url::form_urlencoded::byte_serialize(s.as_ref().as_bytes()).collect()\n\n}\n", "file_path": "influxdb2_client/src/common.rs", "rank": 33, "score": 175724.67943699285 }, { "content": "pub fn type_description(value: wb::ColumnValue) -> &'static str {\n\n match value {\n\n wb::ColumnValue::TagValue => \"tag\",\n\n wb::ColumnValue::I64Value => \"i64\",\n\n wb::ColumnValue::U64Value => \"u64\",\n\n wb::ColumnValue::F64Value => \"f64\",\n\n wb::ColumnValue::BoolValue => \"bool\",\n\n wb::ColumnValue::StringValue => \"String\",\n\n wb::ColumnValue::NONE => \"none\",\n\n _ => \"none\",\n\n }\n\n}\n\n\n\n/// A friendlier wrapper to help deal with the Flatbuffers write data\n\n#[self_referencing]\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct ReplicatedWrite {\n\n data: Vec<u8>,\n\n #[borrows(data)]\n\n #[covariant]\n", "file_path": "internal_types/src/data.rs", "rank": 34, "score": 172755.64064938854 }, { "content": "fn to_stringset(v: &[&str]) -> StringSetRef {\n\n v.into_stringset().unwrap()\n\n}\n", "file_path": "server/src/query_tests/influxrpc/tag_values.rs", "rank": 35, "score": 172291.61270928086 }, { "content": "fn to_stringset(v: &[&str]) -> StringSetRef {\n\n v.into_stringset().unwrap()\n\n}\n", "file_path": "server/src/query_tests/influxrpc/table_names.rs", "rank": 36, "score": 172291.61270928086 }, { "content": "fn to_stringset(v: &[&str]) -> StringSetRef {\n\n v.into_stringset().unwrap()\n\n}\n", "file_path": "server/src/query_tests/influxrpc/tag_keys.rs", "rank": 37, "score": 172291.61270928086 }, { "content": "/// Return a temporary directory that is deleted when the object is dropped\n\npub fn tmp_dir() -> Result<tempfile::TempDir> {\n\n let _ = dotenv::dotenv();\n\n\n\n let root = env::var_os(\"TEST_INFLUXDB_IOX_DB_DIR\").unwrap_or_else(|| env::temp_dir().into());\n\n\n\n Ok(tempfile::Builder::new()\n\n .prefix(\"influxdb_iox\")\n\n .tempdir_in(root)?)\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 38, "score": 170815.32127497054 }, { "content": "/// decode decodes and unpacks the binary-encoded values stored in src into\n\n/// dst.\n\npub fn decode(src: &[u8], dst: &mut Vec<u64>) {\n\n let mut i = 0;\n\n let mut j = 0;\n\n let mut buf: [u8; 8] = [0; 8];\n\n while i < src.len() {\n\n if dst.len() < j + 240 {\n\n dst.resize(j + 240, 0); // may need 240 capacity\n\n }\n\n buf.copy_from_slice(&src[i..i + 8]);\n\n j += decode_value(u64::from_be_bytes(buf), &mut dst[j..]);\n\n i += 8;\n\n }\n\n dst.truncate(j);\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/simple8b.rs", "rank": 39, "score": 168780.65743478082 }, { "content": "pub fn tmp_file() -> Result<tempfile::NamedTempFile> {\n\n let _ = dotenv::dotenv();\n\n\n\n let root = env::var_os(\"TEST_INFLUXDB_IOX_DB_DIR\").unwrap_or_else(|| env::temp_dir().into());\n\n\n\n Ok(tempfile::Builder::new()\n\n .prefix(\"influxdb_iox\")\n\n .tempfile_in(root)?)\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 40, "score": 167268.2273867242 }, { "content": "fn whitespace(i: &str) -> IResult<&str, &str> {\n\n take_while1(|c| c == ' ')(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 41, "score": 166439.8543505182 }, { "content": "/// Decodes a slice of bytes representing Snappy-compressed data into a vector\n\n/// of vectors of bytes representing string data, which may or may not be valid\n\n/// UTF-8.\n\npub fn decode(src: &[u8], dst: &mut Vec<Vec<u8>>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut decoder = snap::raw::Decoder::new();\n\n // First byte stores the encoding type, only have snappy format\n\n // currently so ignore for now.\n\n let decoded_bytes = decoder.decompress_vec(&src[HEADER_LEN..])?;\n\n\n\n if dst.capacity() == 0 {\n\n dst.reserve_exact(64);\n\n }\n\n\n\n let num_decoded_bytes = decoded_bytes.len();\n\n let mut i = 0;\n\n\n\n while i < num_decoded_bytes {\n\n let (length, num_bytes_read) = u64::decode_var(&decoded_bytes[i..]);\n\n let length: usize = length.try_into()?;\n", "file_path": "influxdb_tsm/src/encoders/string.rs", "rank": 42, "score": 164740.6359588334 }, { "content": "fn exponential_value(i: &str) -> IResult<&str, &str> {\n\n recognize(separated_pair(\n\n digit1,\n\n tuple((alt((tag(\"e\"), tag(\"E\"))), alt((tag(\"-\"), tag(\"+\"))))),\n\n digit1,\n\n ))(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 43, "score": 164253.94712561485 }, { "content": "fn from_task_trackers(db_name: &str, jobs: Vec<TaskTracker<Job>>) -> Result<RecordBatch> {\n\n let jobs = jobs\n\n .into_iter()\n\n .filter(|job| job.metadata().db_name() == Some(db_name))\n\n .collect::<Vec<_>>();\n\n\n\n let ids = StringArray::from_iter(jobs.iter().map(|job| Some(job.id().to_string())));\n\n let statuses = StringArray::from_iter(jobs.iter().map(|job| Some(job.get_status().name())));\n\n let cpu_time_used = Time64NanosecondArray::from_iter(\n\n jobs.iter()\n\n .map(|job| job.get_status().cpu_nanos().map(|n| n as i64)),\n\n );\n\n let wall_time_used = Time64NanosecondArray::from_iter(\n\n jobs.iter()\n\n .map(|job| job.get_status().wall_nanos().map(|n| n as i64)),\n\n );\n\n let partition_keys =\n\n StringArray::from_iter(jobs.iter().map(|job| job.metadata().partition_key()));\n\n let chunk_ids = UInt32Array::from_iter(jobs.iter().map(|job| job.metadata().chunk_id()));\n\n let descriptions =\n", "file_path": "server/src/db/system_tables.rs", "rank": 44, "score": 164191.4822554238 }, { "content": "fn dump_data_frames(frames: &[Data]) -> Vec<String> {\n\n frames.iter().map(|f| dump_data(f)).collect()\n\n}\n\n\n", "file_path": "tests/end_to_end_cases/storage_api.rs", "rank": 45, "score": 163999.4841240625 }, { "content": "pub fn convert(\n\n input_path: &str,\n\n output_path: &str,\n\n compression_level: CompressionLevel,\n\n) -> Result<()> {\n\n info!(\"convert starting\");\n\n debug!(\"Reading from input path {}\", input_path);\n\n\n\n if is_directory(input_path) {\n\n let mut files: Vec<_> = fs::read_dir(input_path)\n\n .unwrap()\n\n .filter_map(Result::ok)\n\n .filter(|filename| filename.path().extension().map_or(false, |x| x == \"tsm\"))\n\n .collect();\n\n\n\n if files.is_empty() {\n\n warn!(\"No TSM files found\");\n\n return Ok(());\n\n }\n\n\n", "file_path": "src/commands/convert.rs", "rank": 46, "score": 163943.87195902757 }, { "content": "// Translate the field name from tracing into the logfmt style\n\nfn translate_field_name(name: &str) -> &str {\n\n if name == \"message\" {\n\n \"msg\"\n\n } else {\n\n name\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn quote_and_escape_len0() {\n\n assert_eq!(quote_and_escape(\"\"), \"\");\n\n }\n\n\n\n #[test]\n\n fn quote_and_escape_len1() {\n\n assert_eq!(quote_and_escape(\"f\"), \"f\");\n", "file_path": "logfmt/src/lib.rs", "rank": 47, "score": 163915.9505702178 }, { "content": "/// Returns the number of object store chunks in the specified database\n\npub fn count_object_store_chunks(db: &Db) -> usize {\n\n chunk_summary_iter(db)\n\n .filter(|s| {\n\n s.storage == ChunkStorage::ReadBufferAndObjectStore\n\n || s.storage == ChunkStorage::ObjectStoreOnly\n\n })\n\n .count()\n\n}\n", "file_path": "server/src/query_tests/utils.rs", "rank": 48, "score": 163905.6013450071 }, { "content": "pub fn file_name_for_sequence_number(id: u64) -> String {\n\n format!(\"wb_{:016x}.db\", id)\n\n}\n\n\n", "file_path": "write_buffer/tests/helpers/mod.rs", "rank": 49, "score": 163905.6013450071 }, { "content": "/// Returns the number of mutable buffer chunks in the specified database\n\npub fn count_mutable_buffer_chunks(db: &Db) -> usize {\n\n chunk_summary_iter(db)\n\n .filter(|s| {\n\n s.storage == ChunkStorage::OpenMutableBuffer\n\n || s.storage == ChunkStorage::ClosedMutableBuffer\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "server/src/query_tests/utils.rs", "rank": 50, "score": 163905.6013450071 }, { "content": "/// Returns the number of read buffer chunks in the specified database\n\npub fn count_read_buffer_chunks(db: &Db) -> usize {\n\n chunk_summary_iter(db)\n\n .filter(|s| {\n\n s.storage == ChunkStorage::ReadBuffer\n\n || s.storage == ChunkStorage::ReadBufferAndObjectStore\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "server/src/query_tests/utils.rs", "rank": 51, "score": 163905.6013450071 }, { "content": "fn integral_value_signed(i: &str) -> IResult<&str, &str> {\n\n recognize(preceded(opt(tag(\"-\")), digit1))(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 52, "score": 162160.19269292025 }, { "content": "/// Returns a single column record batch of type Utf8 from the\n\n/// contents of something that can be turned into an iterator over\n\n/// `Option<&str>`\n\npub fn str_iter_to_batch<Ptr, I>(field_name: &str, iter: I) -> Result<RecordBatch, ArrowError>\n\nwhere\n\n I: IntoIterator<Item = Option<Ptr>>,\n\n Ptr: AsRef<str>,\n\n{\n\n let array = StringArray::from_iter(iter);\n\n\n\n RecordBatch::try_from_iter(vec![(field_name, Arc::new(array) as ArrayRef)])\n\n}\n\n\n", "file_path": "arrow_deps/src/util.rs", "rank": 53, "score": 161045.35904777673 }, { "content": "// sort a record batch by all columns (to provide a stable output order for test\n\n// comparison)\n\npub fn sort_record_batch(batch: RecordBatch) -> RecordBatch {\n\n let sort_input: Vec<SortColumn> = batch\n\n .columns()\n\n .iter()\n\n .map(|col| SortColumn {\n\n values: col.clone(),\n\n options: Some(SortOptions {\n\n descending: false,\n\n nulls_first: false,\n\n }),\n\n })\n\n .collect();\n\n\n\n let sort_output = lexsort(&sort_input, None).expect(\"Sorting to complete\");\n\n\n\n RecordBatch::try_new(batch.schema(), sort_output).unwrap()\n\n}\n", "file_path": "arrow_deps/src/test_util.rs", "rank": 54, "score": 160719.56387503224 }, { "content": "/// Given the requested projection (set of requested columns),\n\n/// returns the schema of selecting just those columns\n\n///\n\n/// TODO contribute this back upstream in arrow's Schema so we can\n\n/// avoid the copy of fields\n\npub fn project_schema(\n\n arrow_schema: ArrowSchemaRef,\n\n projection: &Option<Vec<usize>>,\n\n) -> ArrowSchemaRef {\n\n match projection {\n\n None => arrow_schema,\n\n Some(projection) => {\n\n let new_fields = projection\n\n .iter()\n\n .map(|&i| arrow_schema.field(i))\n\n .cloned()\n\n .collect();\n\n Arc::new(ArrowSchema::new(new_fields))\n\n }\n\n }\n\n}\n\n\n", "file_path": "query/src/util.rs", "rank": 55, "score": 160568.49495000325 }, { "content": "/// A test helper function for asserting floating point numbers are within the\n\n/// machine epsilon because strict comparison of floating point numbers is\n\n/// incorrect\n\npub fn approximately_equal(f1: f64, f2: f64) -> bool {\n\n (f1 - f2).abs() < f64::EPSILON\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 56, "score": 160216.97270120608 }, { "content": "pub fn all_approximately_equal(f1: &[f64], f2: &[f64]) -> bool {\n\n f1.len() == f2.len() && f1.iter().zip(f2).all(|(&a, &b)| approximately_equal(a, b))\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 57, "score": 160205.32164279756 }, { "content": "fn field_float_value_with_decimal(i: &str) -> IResult<&str, &str> {\n\n recognize(separated_pair(integral_value_signed, tag(\".\"), digit1))(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 58, "score": 160152.36511256808 }, { "content": "fn field_float_value_no_decimal(i: &str) -> IResult<&str, &str> {\n\n integral_value_signed(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 59, "score": 160152.36511256808 }, { "content": "fn field_float_value_with_exponential_and_decimal(i: &str) -> IResult<&str, &str> {\n\n recognize(separated_pair(\n\n integral_value_signed,\n\n tag(\".\"),\n\n exponential_value,\n\n ))(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 60, "score": 158224.8199155191 }, { "content": "fn field_float_value_with_exponential_no_decimal(i: &str) -> IResult<&str, &str> {\n\n exponential_value(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 61, "score": 158224.8199155191 }, { "content": "/// Start log or trace emitter. Panics on error.\n\npub fn init_logs_and_tracing(\n\n log_verbose_count: u8,\n\n config: &crate::commands::run::Config,\n\n) -> Result<TracingGuard, SetGlobalDefaultError> {\n\n // Handle the case if -v/-vv is specified both before and after the server\n\n // command\n\n let log_verbose_count = if log_verbose_count > config.log_verbose_count {\n\n log_verbose_count\n\n } else {\n\n config.log_verbose_count\n\n };\n\n\n\n let (traces_layer_filter, traces_layer_otel) = match construct_opentelemetry_tracer(config) {\n\n None => (None, None),\n\n Some(tracer) => {\n\n let traces_layer_otel = Some(tracing_opentelemetry::OpenTelemetryLayer::new(tracer));\n\n match &config.traces_filter {\n\n None => (None, traces_layer_otel),\n\n Some(traces_filter) => (\n\n Some(EnvFilter::try_new(traces_filter).unwrap()),\n", "file_path": "src/commands/tracing.rs", "rank": 62, "score": 157401.10489205352 }, { "content": "/// Normalize lines for easy comparison\n\nfn normalize<'a>(lines: impl Iterator<Item = &'a String>) -> Vec<String> {\n\n let lines = lines\n\n .map(|line| normalize_timestamp(line))\n\n .map(|line| normalize_location(&line))\n\n .collect();\n\n normalize_spans(lines)\n\n}\n\n\n", "file_path": "logfmt/tests/logging.rs", "rank": 63, "score": 157087.13012943236 }, { "content": "/// Truncates the input slice to remove all whitespace from the\n\n/// beginning (left), including completely commented-out lines\n\nfn trim_leading(mut i: &str) -> &str {\n\n loop {\n\n let offset = i\n\n .find(|c| !is_whitespace_boundary_char(c))\n\n .unwrap_or_else(|| i.len());\n\n i = &i[offset..];\n\n\n\n if i.starts_with('#') {\n\n let offset = i.find('\\n').unwrap_or_else(|| i.len());\n\n i = &i[offset..];\n\n } else {\n\n break i;\n\n }\n\n }\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 64, "score": 155874.64855533672 }, { "content": "/// This is a copied version of nom's `recognize` that runs the parser\n\n/// **and** returns the entire matched input.\n\npub fn parse_and_recognize<\n\n I: Clone + nom::Offset + nom::Slice<std::ops::RangeTo<usize>>,\n\n O,\n\n E: nom::error::ParseError<I>,\n\n F,\n\n>(\n\n parser: F,\n\n) -> impl Fn(I) -> IResult<I, (I, O), E>\n\nwhere\n\n F: Fn(I) -> IResult<I, O, E>,\n\n{\n\n move |input: I| {\n\n let i = input.clone();\n\n match parser(i) {\n\n Ok((i, o)) => {\n\n let index = input.offset(&i);\n\n Ok((i, (input.slice(..index), o)))\n\n }\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 65, "score": 154423.0524754789 }, { "content": "pub fn lines_to_replicated_write(\n\n server_id: ServerId,\n\n sequence: u64,\n\n lines: &[ParsedLine<'_>],\n\n partitioner: &impl Partitioner,\n\n) -> ReplicatedWrite {\n\n let default_time = Utc::now();\n\n let entry_bytes = split_lines_into_write_entry_partitions(\n\n |line| partitioner.partition_key(line, &default_time).unwrap(),\n\n lines,\n\n );\n\n\n\n let mut hasher = Hasher::new();\n\n hasher.update(&entry_bytes);\n\n let checksum = hasher.finalize();\n\n\n\n let mut fbb = flatbuffers::FlatBufferBuilder::new_with_capacity(1024);\n\n let payload = fbb.create_vector_direct(&entry_bytes);\n\n\n\n let write = wb::ReplicatedWrite::create(\n", "file_path": "internal_types/src/data.rs", "rank": 66, "score": 154423.0524754789 }, { "content": "/// Converts parsed line protocol into a collection of ShardedEntry with the\n\n/// underlying flatbuffers bytes generated.\n\npub fn lines_to_sharded_entries(\n\n lines: &[ParsedLine<'_>],\n\n sharder: Option<&impl Sharder>,\n\n partitioner: &impl Partitioner,\n\n) -> Result<Vec<ShardedEntry>> {\n\n let default_time = Utc::now();\n\n let mut sharded_lines = BTreeMap::new();\n\n\n\n for line in lines {\n\n let shard_id = match &sharder {\n\n Some(s) => Some(s.shard(line).context(GeneratingShardId)?),\n\n None => None,\n\n };\n\n let partition_key = partitioner\n\n .partition_key(line, &default_time)\n\n .context(GeneratingPartitionKey)?;\n\n let table = line.series.measurement.as_str();\n\n\n\n sharded_lines\n\n .entry(shard_id)\n", "file_path": "internal_types/src/entry.rs", "rank": 67, "score": 154423.0524754789 }, { "content": "pub fn total_size_on_disk(dir: impl Into<PathBuf>) -> u64 {\n\n write_buffer_paths(&dir.into())\n\n .iter()\n\n .map(|file| {\n\n fs::metadata(file)\n\n .expect(\"Could not read file metadata\")\n\n .len()\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "write_buffer/tests/helpers/mod.rs", "rank": 68, "score": 153650.51212917984 }, { "content": "pub fn parse_lines(input: &str) -> impl Iterator<Item = Result<ParsedLine<'_>>> {\n\n split_lines(input).filter_map(|line| {\n\n let i = trim_leading(line);\n\n\n\n if i.is_empty() {\n\n return None;\n\n }\n\n\n\n let res = match parse_line(i) {\n\n Ok((remaining, line)) => {\n\n // should have parsed the whole input line, if any\n\n // data remains it is a parse error for this line\n\n // corresponding Go logic:\n\n // https://github.com/influxdata/influxdb/blob/217eddc87e14a79b01d0c22994fc139f530094a2/models/points_parser.go#L259-L266\n\n if !remaining.is_empty() {\n\n Some(Err(Error::CannotParseEntireLine {\n\n trailing_content: String::from(remaining),\n\n }))\n\n } else {\n\n Some(Ok(line))\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 69, "score": 153472.802925702 }, { "content": "fn measurement(i: &str) -> IResult<&str, EscapedStr<'_>> {\n\n let normal_char = take_while1(|c| !is_whitespace_boundary_char(c) && c != ',' && c != '\\\\');\n\n\n\n let space = map(tag(\" \"), |_| \" \");\n\n let comma = map(tag(\",\"), |_| \",\");\n\n let backslash = map(tag(\"\\\\\"), |_| \"\\\\\");\n\n\n\n let escaped = alt((space, comma, backslash));\n\n\n\n escape_or_fallback(normal_char, \"\\\\\", escaped)(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 70, "score": 152755.94949178392 }, { "content": "/// Create a DataFusion `Expr` that invokes `window_bounds` with the\n\n/// appropriate every and offset arguments at runtime\n\npub fn make_window_bound_expr(\n\n time_arg: Expr,\n\n every: &WindowDuration,\n\n offset: &WindowDuration,\n\n) -> Expr {\n\n // Bind a copy of the arguments in a closure\n\n let every = every.clone();\n\n let offset = offset.clone();\n\n\n\n // TODO provide optimized implementations (that took every/offset\n\n // as a constant rather than arrays)\n\n let func_ptr = make_scalar_function(move |args| Ok(window_bounds(args, &every, &offset)));\n\n\n\n let udf = create_udf(\n\n \"window_bounds\",\n\n vec![TIME_DATA_TYPE()], // argument types\n\n Arc::new(TIME_DATA_TYPE()), // return type\n\n func_ptr,\n\n );\n\n\n", "file_path": "query/src/func/window.rs", "rank": 71, "score": 151617.85329099587 }, { "content": "fn timestamp(i: &str) -> IResult<&str, i64> {\n\n map_fail(integral_value_signed, |value| {\n\n value.parse().context(TimestampValueInvalid { value })\n\n })(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 72, "score": 151336.6808500639 }, { "content": "#[test]\n\nfn test_without_normalization() {\n\n let capture = CapturedWriter::new();\n\n info!(\n\n event_name = \"foo bar\",\n\n other_event = \"baz\",\n\n \"This is an info message\"\n\n );\n\n\n\n // double assure that normalization isn't messing with things by\n\n // checking for presence of strings as well\n\n let log_string = normalize(capture.to_strings().iter()).join(\"\\n\");\n\n assert!(log_string.contains(\"This is an info message\"));\n\n assert!(log_string.contains(\"event_name\"));\n\n assert!(log_string.contains(\"other_event\"));\n\n assert!(log_string.contains(\"baz\"));\n\n assert!(log_string.contains(\"foo bar\"));\n\n}\n\n\n", "file_path": "logfmt/tests/logging.rs", "rank": 73, "score": 151089.4786961066 }, { "content": "fn field_key(i: &str) -> IResult<&str, EscapedStr<'_>> {\n\n let normal_char = take_while1(|c| !is_whitespace_boundary_char(c) && c != '=' && c != '\\\\');\n\n escaped_value(normal_char)(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 74, "score": 150662.1950590893 }, { "content": "fn tag_key(i: &str) -> IResult<&str, EscapedStr<'_>> {\n\n let normal_char = take_while1(|c| !is_whitespace_boundary_char(c) && c != '=' && c != '\\\\');\n\n\n\n escaped_value(normal_char)(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 75, "score": 150662.1950590893 }, { "content": "fn tag_value(i: &str) -> IResult<&str, EscapedStr<'_>> {\n\n let normal_char = take_while1(|c| !is_whitespace_boundary_char(c) && c != ',' && c != '\\\\');\n\n escaped_value(normal_char)(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 76, "score": 150662.1950590893 }, { "content": "/// escape any characters in name as needed, otherwise return string as is\n\nfn quote_and_escape(value: &'_ str) -> Cow<'_, str> {\n\n if needs_quotes_and_escaping(value) {\n\n Cow::Owned(format!(\"\\\"{}\\\"\", value.replace(\"\\\"\", \"\\\\\\\"\")))\n\n } else {\n\n Cow::Borrowed(value)\n\n }\n\n}\n\n\n", "file_path": "logfmt/src/lib.rs", "rank": 77, "score": 150039.4034292919 }, { "content": "pub fn split_lines_into_write_entry_partitions(\n\n partition_key_fn: impl Fn(&ParsedLine<'_>) -> String,\n\n lines: &[ParsedLine<'_>],\n\n) -> Vec<u8> {\n\n let mut fbb = flatbuffers::FlatBufferBuilder::new_with_capacity(1024);\n\n\n\n // split the lines into collections that go into partitions\n\n let mut partition_writes = BTreeMap::new();\n\n\n\n for line in lines {\n\n let key = partition_key_fn(line);\n\n\n\n partition_writes\n\n .entry(key)\n\n .or_insert_with(Vec::new)\n\n .push(line);\n\n }\n\n\n\n // create a WriteBufferEntry for each batch of lines going to a partition (one\n\n // WriteBufferEntry per partition)\n", "file_path": "internal_types/src/data.rs", "rank": 78, "score": 148970.8825494348 }, { "content": "pub fn snapshot_chunk<T>(\n\n metadata_path: object_store::path::Path,\n\n data_path: object_store::path::Path,\n\n store: Arc<ObjectStore>,\n\n partition_key: &str,\n\n chunk: Arc<T>,\n\n table_stats: TableSummary,\n\n notify: Option<oneshot::Sender<()>>,\n\n) -> Result<Arc<Snapshot<T>>>\n\nwhere\n\n T: Send + Sync + 'static + PartitionChunk,\n\n{\n\n let snapshot = Snapshot::new(\n\n partition_key.to_string(),\n\n metadata_path,\n\n data_path,\n\n store,\n\n chunk,\n\n vec![table_stats],\n\n );\n", "file_path": "server/src/snapshot.rs", "rank": 79, "score": 148838.8721486448 }, { "content": "fn field_string_value(i: &str) -> IResult<&str, EscapedStr<'_>> {\n\n // https://docs.influxdata.com/influxdb/v2.0/reference/syntax/line-protocol/#data-types-and-format\n\n // For string field values, backslash is only used to escape itself(\\) or double\n\n // quotes.\n\n let string_data = alt((\n\n map(tag(r#\"\\\"\"#), |_| r#\"\"\"#), // escaped double quote -> double quote\n\n map(tag(r#\"\\\\\"#), |_| r#\"\\\"#), // escaped backslash --> single backslash\n\n tag(r#\"\\\"#), // unescaped single backslash\n\n take_while1(|c| c != '\\\\' && c != '\"'), // anything else w/ no special handling\n\n ));\n\n\n\n // NB: many0 doesn't allow combinators that match the empty string so\n\n // we need to special case a pair of double quotes.\n\n let empty_str = map(tag(r#\"\"\"\"#), |_| Vec::new());\n\n\n\n let quoted_str = alt((\n\n preceded(tag(\"\\\"\"), terminated(many0(string_data), tag(\"\\\"\"))),\n\n empty_str,\n\n ));\n\n\n\n map(quoted_str, |vec| EscapedStr::from_slices(&vec))(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 80, "score": 148654.36747873714 }, { "content": "/// Creates expression like:\n\n/// start <= time && time < end\n\npub fn make_range_expr(start: i64, end: i64, time: impl AsRef<str>) -> Expr {\n\n // We need to cast the start and end values to timestamps\n\n // the equivalent of:\n\n let ts_start = ScalarValue::TimestampNanosecond(Some(start));\n\n let ts_end = ScalarValue::TimestampNanosecond(Some(end));\n\n\n\n let ts_low = lit(ts_start).lt_eq(col(time.as_ref()));\n\n let ts_high = col(time.as_ref()).lt(lit(ts_end));\n\n\n\n ts_low.and(ts_high)\n\n}\n\n\n\n/// Creates a single expression representing the conjunction (aka\n\n/// AND'ing) together of a set of expressions\n\n#[derive(Debug, Default)]\n\npub struct AndExprBuilder {\n\n cur_expr: Option<Expr>,\n\n}\n\n\n\nimpl AndExprBuilder {\n", "file_path": "arrow_deps/src/util.rs", "rank": 81, "score": 148249.49648462934 }, { "content": "fn series(i: &str) -> IResult<&str, Series<'_>> {\n\n let tag_set = preceded(tag(\",\"), tag_set);\n\n let series = tuple((measurement, opt(tag_set)));\n\n\n\n let series_and_raw_input = parse_and_recognize(series);\n\n\n\n map(\n\n series_and_raw_input,\n\n |(raw_input, (measurement, tag_set))| Series {\n\n raw_input,\n\n measurement,\n\n tag_set,\n\n },\n\n )(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 82, "score": 147563.66503015335 }, { "content": "fn field_uinteger_value(i: &str) -> IResult<&str, u64> {\n\n let tagged_value = terminated(digit1, tag(\"u\"));\n\n map_fail(tagged_value, |value| {\n\n value.parse().context(UIntegerValueInvalid { value })\n\n })(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 83, "score": 146721.6319918154 }, { "content": "fn field_integer_value(i: &str) -> IResult<&str, i64> {\n\n let tagged_value = terminated(integral_value_signed, tag(\"i\"));\n\n map_fail(tagged_value, |value| {\n\n value.parse().context(IntegerValueInvalid { value })\n\n })(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 84, "score": 146721.6319918154 }, { "content": "fn field_bool_value(i: &str) -> IResult<&str, bool> {\n\n // https://docs.influxdata.com/influxdb/v2.0/reference/syntax/line-protocol/#data-types-and-format\n\n // \"specify TRUE with t, T, true, True, or TRUE. Specify FALSE with f, F, false,\n\n // False, or FALSE\n\n alt((\n\n map(tag(\"true\"), |_| true),\n\n map(tag(\"True\"), |_| true),\n\n map(tag(\"TRUE\"), |_| true),\n\n map(tag(\"t\"), |_| true),\n\n map(tag(\"T\"), |_| true),\n\n map(tag(\"false\"), |_| false),\n\n map(tag(\"False\"), |_| false),\n\n map(tag(\"FALSE\"), |_| false),\n\n map(tag(\"f\"), |_| false),\n\n map(tag(\"F\"), |_| false),\n\n ))(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 85, "score": 146721.6319918154 }, { "content": "fn field_float_value(i: &str) -> IResult<&str, f64> {\n\n let value = alt((\n\n field_float_value_with_exponential_and_decimal,\n\n field_float_value_with_exponential_no_decimal,\n\n field_float_value_with_decimal,\n\n field_float_value_no_decimal,\n\n ));\n\n map_fail(value, |value| {\n\n value.parse().context(FloatValueInvalid { value })\n\n })(i)\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 86, "score": 146721.6319918154 }, { "content": "/// Translates FieldList into the gRPC format\n\npub fn fieldlist_to_measurement_fields_response(\n\n fieldlist: FieldList,\n\n) -> Result<MeasurementFieldsResponse> {\n\n let fields = fieldlist\n\n .fields\n\n .into_iter()\n\n .map(|f| {\n\n Ok(MessageField {\n\n key: f.name,\n\n r#type: datatype_to_measurement_field_enum(&f.data_type)? as i32,\n\n timestamp: f.last_timestamp,\n\n })\n\n })\n\n .collect::<Result<Vec<_>>>()?;\n\n\n\n Ok(MeasurementFieldsResponse { fields })\n\n}\n\n\n", "file_path": "src/influxdb_ioxd/rpc/storage/data.rs", "rank": 87, "score": 146469.1200428853 }, { "content": "pub fn make_read_group_aggregate(\n\n aggregate: Option<RPCAggregate>,\n\n group: RPCGroup,\n\n group_keys: Vec<String>,\n\n) -> Result<GroupByAndAggregate> {\n\n // validate Group setting\n\n match group {\n\n // Group:None is invalid if grouping keys are specified\n\n RPCGroup::None if !group_keys.is_empty() => InvalidGroupNone {\n\n num_group_keys: group_keys.len(),\n\n }\n\n .fail(),\n\n _ => Ok(()),\n\n }?;\n\n\n\n let gby_agg = GroupByAndAggregate::Columns {\n\n agg: convert_aggregate(aggregate)?,\n\n group_columns: group_keys,\n\n };\n\n Ok(gby_agg)\n\n}\n\n\n", "file_path": "src/influxdb_ioxd/rpc/storage/expr.rs", "rank": 88, "score": 146469.1200428853 }, { "content": "/// Builds GroupByAndAggregate::Windows\n\npub fn make_read_window_aggregate(\n\n aggregates: Vec<RPCAggregate>,\n\n window_every: i64,\n\n offset: i64,\n\n window: Option<RPCWindow>,\n\n) -> Result<GroupByAndAggregate> {\n\n // only support single aggregate for now\n\n if aggregates.len() != 1 {\n\n return AggregateNotSingleton { aggregates }.fail();\n\n }\n\n let agg = convert_aggregate(aggregates.into_iter().next())?;\n\n\n\n // Translation from these parameters to window bound\n\n // is defined in the Go code:\n\n // https://github.com/influxdata/idpe/pull/8636/files#diff-94c0a8d7e427e2d7abe49f01dced50ad776b65ec8f2c8fb2a2c8b90e2e377ed5R82\n\n //\n\n // Quoting:\n\n //\n\n // Window and the WindowEvery/Offset should be mutually\n\n // exclusive. If you set either the WindowEvery or Offset with\n", "file_path": "src/influxdb_ioxd/rpc/storage/expr.rs", "rank": 89, "score": 146469.1200428853 }, { "content": "/// encode encodes a vector of signed integers into a slice of bytes.\n\n///\n\n/// To maximise compression, the provided vector should be sorted in ascending\n\n/// order. First deltas between the integers are determined, then further\n\n/// encoding is potentially carried out. If all the deltas are the same the\n\n/// block can be encoded using RLE. If not, as long as the deltas are not bigger\n\n/// than simple8b::MAX_VALUE they can be encoded using simple8b.\n\npub fn encode(src: &[i64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear(); // reset buffer.\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut max: u64 = 0;\n\n let mut deltas = i64_to_u64_vector(src);\n\n if deltas.len() > 1 {\n\n for i in (1..deltas.len()).rev() {\n\n deltas[i] = deltas[i].wrapping_sub(deltas[i - 1]);\n\n if deltas[i] > max {\n\n max = deltas[i];\n\n }\n\n }\n\n let mut use_rle = true;\n\n for i in 2..deltas.len() {\n\n if deltas[1] != deltas[i] {\n\n use_rle = false;\n\n break;\n", "file_path": "influxdb_tsm/src/encoders/timestamp.rs", "rank": 90, "score": 146090.1574302774 }, { "content": "/// decode decodes a slice of bytes encoded using encode back into a\n\n/// vector of signed integers.\n\npub fn decode(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n let encoding = &src[0] >> 4;\n\n match encoding {\n\n encoding if encoding == Encoding::Uncompressed as u8 => {\n\n decode_uncompressed(&src[1..], dst) // first byte not used\n\n }\n\n encoding if encoding == Encoding::Rle as u8 => decode_rle(&src, dst),\n\n encoding if encoding == Encoding::Simple8b as u8 => decode_simple8b(&src, dst),\n\n _ => Err(From::from(\"invalid block encoding\")),\n\n }\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/timestamp.rs", "rank": 91, "score": 146089.92659536627 }, { "content": "/// encode packs and binary encodes the provides slice of u64 values using\n\n/// simple8b into the provided vector.\n\npub fn encode(src: &[u64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n let mut i = 0;\n\n 'next_value: while i < src.len() {\n\n // try to pack a run of 240 or 120 1s\n\n let remain = src.len() - i;\n\n if remain >= 120 {\n\n let a = if remain >= 240 {\n\n &src[i..i + 240]\n\n } else {\n\n &src[i..i + 120]\n\n };\n\n\n\n // search for the longest sequence of 1s in a\n\n let k = a.iter().take_while(|x| **x == 1).count();\n\n if k == 240 {\n\n i += 240;\n\n dst.resize(dst.len() + 8, 0);\n\n continue;\n\n } else if k >= 120 {\n\n i += 120;\n", "file_path": "influxdb_tsm/src/encoders/simple8b.rs", "rank": 92, "score": 146089.87198452535 }, { "content": "/// Encodes a slice of byte slices representing string data into a vector of\n\n/// bytes. Currently uses Snappy compression.\n\npub fn encode(src: &[&[u8]], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear(); // reset buffer\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n // strings shouldn't be longer than 64kb\n\n let length_of_lengths = src.len() * super::MAX_VAR_INT_32;\n\n let sum_of_lengths: usize = src\n\n .iter()\n\n .map(|s| {\n\n let len = s.len();\n\n assert!(len < MAX_I32);\n\n len\n\n })\n\n .sum();\n\n let source_size = 2 + length_of_lengths + sum_of_lengths;\n\n\n\n // determine the maximum possible length needed for the buffer, which\n\n // includes the compressed size\n", "file_path": "influxdb_tsm/src/encoders/string.rs", "rank": 93, "score": 146089.8183403718 }, { "content": "/// Encodes a slice of unsigned 64-bit integers into `dst`.\n\n///\n\n/// Deltas between the integers in the input are first calculated, then the\n\n/// deltas are further compressed if possible, either via bit-packing using\n\n/// simple8b or by run-length encoding the deltas if they're all the same.\n\npub fn encode(src: &[u64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n let signed = u64_to_i64_vector(&src);\n\n super::integer::encode(&signed, dst)\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/unsigned.rs", "rank": 94, "score": 146089.24156618273 }, { "content": "/// encode encodes a vector of signed integers into dst.\n\n///\n\n/// Deltas between the integers in the vector are first calculated, and these\n\n/// deltas are then zig-zag encoded. The resulting zig-zag encoded deltas are\n\n/// further compressed if possible, either via bit-packing using simple8b or by\n\n/// run-length encoding the deltas if they're all the same.\n\npub fn encode(src: &[i64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear(); // reset buffer.\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut max: u64 = 0;\n\n let mut deltas = i64_to_u64_vector(&src);\n\n for i in (1..deltas.len()).rev() {\n\n deltas[i] = zig_zag_encode(deltas[i].wrapping_sub(deltas[i - 1]) as i64);\n\n if deltas[i] > max {\n\n max = deltas[i];\n\n }\n\n }\n\n\n\n // deltas[0] is the first value in the sequence.\n\n deltas[0] = zig_zag_encode(src[0]);\n\n\n\n if deltas.len() > 2 {\n\n let mut use_rle = true;\n", "file_path": "influxdb_tsm/src/encoders/integer.rs", "rank": 95, "score": 146088.91487832143 }, { "content": "/// Encodes a slice of booleans into `dst`.\n\n///\n\n/// Boolean encoding uses 1 bit per value. Each compressed byte slice contains a\n\n/// 1 byte header indicating the compression type, followed by a variable byte\n\n/// encoded length indicating how many booleans are packed in the slice. The\n\n/// remaining bytes contain 1 byte for every 8 boolean values encoded.\n\npub fn encode(src: &[bool], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear();\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let size = HEADER_LEN + 8 + ((src.len() + 7) / 8); // Header + Num bools + bool data.\n\n dst.resize(size, 0);\n\n\n\n // Store the encoding type in the 4 high bits of the first byte\n\n dst[0] = BOOLEAN_COMPRESSED_BIT_PACKED << 4;\n\n\n\n let mut n = 8u64; // Current bit in current byte.\n\n\n\n // Encode the number of booleans written.\n\n let len_u64: u64 = src.len().try_into()?;\n\n let i = len_u64.encode_var(&mut dst[1..]);\n\n let step: u64 = (i * 8).try_into()?;\n\n n += step;\n\n\n", "file_path": "influxdb_tsm/src/encoders/boolean.rs", "rank": 96, "score": 146088.76584339954 }, { "content": "/// decode decodes the provided slice of bytes into a vector of f64 values.\n\npub fn decode(src: &[u8], dst: &mut Vec<f64>) -> Result<(), Box<dyn Error>> {\n\n decode_with_sentinel(src, dst, SENTINEL)\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/float.rs", "rank": 97, "score": 146083.81097132657 }, { "content": "/// decode decodes a slice of bytes into a vector of signed integers.\n\npub fn decode(src: &[u8], dst: &mut Vec<i64>) -> Result<(), Box<dyn Error>> {\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n let encoding = &src[0] >> 4;\n\n match encoding {\n\n encoding if encoding == Encoding::Uncompressed as u8 => {\n\n decode_uncompressed(&src[1..], dst) // first byte not used\n\n }\n\n encoding if encoding == Encoding::Rle as u8 => decode_rle(&src[1..], dst),\n\n encoding if encoding == Encoding::Simple8b as u8 => decode_simple8b(&src[1..], dst),\n\n _ => Err(From::from(\"invalid block encoding\")),\n\n }\n\n}\n\n\n", "file_path": "influxdb_tsm/src/encoders/integer.rs", "rank": 98, "score": 146083.81097132657 }, { "content": "#[allow(clippy::many_single_char_names)]\n\npub fn encode(src: &[f64], dst: &mut Vec<u8>) -> Result<(), Box<dyn Error>> {\n\n dst.clear(); // reset buffer.\n\n if src.is_empty() {\n\n return Ok(());\n\n }\n\n if dst.capacity() < 9 {\n\n dst.reserve_exact(9 - dst.capacity()); // room for encoding type, block\n\n // size and a value\n\n }\n\n\n\n // write encoding type\n\n let mut n = 8; // N.B, this is the number of bits written\n\n dst.push((1 << 4) as u8); // write compression type\n\n\n\n // write the first value into the block\n\n let first = src[0];\n\n let mut prev = first.to_bits();\n\n dst.extend_from_slice(&prev.to_be_bytes());\n\n n += 64;\n\n\n", "file_path": "influxdb_tsm/src/encoders/float.rs", "rank": 99, "score": 146083.81097132657 } ]
Rust
http/src/util/buf_list.rs
aliemjay/xitca-web
b12188930663ab5d27aa0a25cbf29fc4ecc6a0f8
use std::{collections::VecDeque, io::IoSlice}; use crate::bytes::{Buf, BufMut, Bytes, BytesMut}; pub struct BufList<B> { bufs: VecDeque<B>, remaining: usize, } impl<B: Buf> Default for BufList<B> { fn default() -> Self { Self::new() } } impl<B: Buf> BufList<B> { #[inline] pub fn with_capacity(cap: usize) -> Self { Self { bufs: VecDeque::with_capacity(cap), remaining: 0, } } #[inline] pub fn new() -> Self { Self::with_capacity(0) } #[inline] pub fn push(&mut self, buf: B) { debug_assert!(buf.has_remaining()); self.remaining += buf.remaining(); self.bufs.push_back(buf); } #[inline] pub fn cnt(&self) -> usize { self.bufs.len() } } impl<B: Buf> Buf for BufList<B> { #[inline] fn remaining(&self) -> usize { self.remaining } #[inline] fn chunk(&self) -> &[u8] { self.bufs.front().map(Buf::chunk).unwrap_or_default() } #[inline] fn chunks_vectored<'a>(&'a self, dst: &mut [IoSlice<'a>]) -> usize { assert!(!dst.is_empty()); let mut vecs = 0; for buf in &self.bufs { vecs += buf.chunks_vectored(&mut dst[vecs..]); if vecs == dst.len() { break; } } vecs } #[inline] fn advance(&mut self, mut cnt: usize) { debug_assert!(self.remaining >= cnt); self.remaining -= cnt; while cnt > 0 { { let front = &mut self.bufs[0]; let rem = front.remaining(); if rem > cnt { front.advance(cnt); return; } else { front.advance(rem); cnt -= rem; } } self.bufs.pop_front(); } } #[inline] fn copy_to_bytes(&mut self, len: usize) -> Bytes { match self.bufs.front_mut() { Some(front) if front.remaining() == len => { let b = front.copy_to_bytes(len); self.remaining -= len; self.bufs.pop_front(); b } Some(front) if front.remaining() > len => { self.remaining -= len; front.copy_to_bytes(len) } _ => { assert!(len <= self.remaining(), "`len` greater than remaining"); let mut bm = BytesMut::with_capacity(len); bm.put(self.take(len)); bm.freeze() } } } } #[cfg(test)] mod tests { use std::ptr; use super::*; fn hello_world_buf() -> BufList<Bytes> { let bufs = vec![Bytes::from("Hello"), Bytes::from(" "), Bytes::from("World")]; let remaining = bufs.iter().map(Buf::remaining).sum(); BufList { bufs: bufs.into(), remaining, } } #[test] fn to_bytes_shorter() { let mut bufs = hello_world_buf(); let old_ptr = bufs.chunk().as_ptr(); let start = bufs.copy_to_bytes(4); assert_eq!(start, "Hell"); assert!(ptr::eq(old_ptr, start.as_ptr())); assert_eq!(bufs.chunk(), b"o"); assert!(ptr::eq(old_ptr.wrapping_add(4), bufs.chunk().as_ptr())); assert_eq!(bufs.remaining(), 7); } #[test] fn to_bytes_eq() { let mut bufs = hello_world_buf(); let old_ptr = bufs.chunk().as_ptr(); let start = bufs.copy_to_bytes(5); assert_eq!(start, "Hello"); assert!(ptr::eq(old_ptr, start.as_ptr())); assert_eq!(bufs.chunk(), b" "); assert_eq!(bufs.remaining(), 6); } #[test] fn to_bytes_longer() { let mut bufs = hello_world_buf(); let start = bufs.copy_to_bytes(7); assert_eq!(start, "Hello W"); assert_eq!(bufs.remaining(), 4); } #[test] fn one_long_buf_to_bytes() { let mut buf = BufList::new(); buf.push(b"Hello World" as &[_]); assert_eq!(buf.copy_to_bytes(5), "Hello"); assert_eq!(buf.chunk(), b" World"); } #[test] #[should_panic(expected = "`len` greater than remaining")] fn buf_to_bytes_too_many() { hello_world_buf().copy_to_bytes(42); } }
use std::{collections::VecDeque, io::IoSlice}; use crate::bytes::{Buf, BufMut, Bytes, BytesMut}; pub struct BufList<B> { bufs: VecDeque<B>, remaining: usize, } impl<B: Buf> Default for BufList<B> { fn default() -> Self { Self::new() } } impl<B: Buf> BufList<B> { #[inline] pub fn with_capacity(cap: usize) -> Self { Self { bufs: VecDeque::with_capacity(cap), remaining: 0, } } #[inline] pub fn new() -> Self { Self::with_capacity(0) } #[inline] pub fn push(&mut self, buf: B) { debug_assert!(buf.has_remaining()); self.remaining += buf.remaining(); self.bufs.push_back(buf); } #[inline] pub fn cnt(&self) -> usize { self.bufs.len() } } impl<B: Buf> Buf for BufList<B> { #[inline] fn remaining(&self) -> usize { self.remaining } #[inline] fn chunk(&self) -> &[u8] { self.bufs.front().map(Buf::chunk).unwrap_or_default() } #[inline] fn chunks_vectored<'a>(&'a self, dst: &mut [IoSlice<'a>]) -> usize { assert!(!dst.is_empty()); let mut vecs = 0; for buf in &self.bufs { vecs += buf.chunks_vectored(&mut dst[vecs..]); if vecs == dst.len() { break; } } vecs } #[inline] fn advance(&mut self, mut cnt: usize) { debug_assert!(self.remaining >= cnt); self.remaining -= cnt; while cnt > 0 { { let front = &mut self.bufs[0]; let rem = front.remaining(); if rem > cnt { front.advance(cnt); return; } els
#[inline] fn copy_to_bytes(&mut self, len: usize) -> Bytes { match self.bufs.front_mut() { Some(front) if front.remaining() == len => { let b = front.copy_to_bytes(len); self.remaining -= len; self.bufs.pop_front(); b } Some(front) if front.remaining() > len => { self.remaining -= len; front.copy_to_bytes(len) } _ => { assert!(len <= self.remaining(), "`len` greater than remaining"); let mut bm = BytesMut::with_capacity(len); bm.put(self.take(len)); bm.freeze() } } } } #[cfg(test)] mod tests { use std::ptr; use super::*; fn hello_world_buf() -> BufList<Bytes> { let bufs = vec![Bytes::from("Hello"), Bytes::from(" "), Bytes::from("World")]; let remaining = bufs.iter().map(Buf::remaining).sum(); BufList { bufs: bufs.into(), remaining, } } #[test] fn to_bytes_shorter() { let mut bufs = hello_world_buf(); let old_ptr = bufs.chunk().as_ptr(); let start = bufs.copy_to_bytes(4); assert_eq!(start, "Hell"); assert!(ptr::eq(old_ptr, start.as_ptr())); assert_eq!(bufs.chunk(), b"o"); assert!(ptr::eq(old_ptr.wrapping_add(4), bufs.chunk().as_ptr())); assert_eq!(bufs.remaining(), 7); } #[test] fn to_bytes_eq() { let mut bufs = hello_world_buf(); let old_ptr = bufs.chunk().as_ptr(); let start = bufs.copy_to_bytes(5); assert_eq!(start, "Hello"); assert!(ptr::eq(old_ptr, start.as_ptr())); assert_eq!(bufs.chunk(), b" "); assert_eq!(bufs.remaining(), 6); } #[test] fn to_bytes_longer() { let mut bufs = hello_world_buf(); let start = bufs.copy_to_bytes(7); assert_eq!(start, "Hello W"); assert_eq!(bufs.remaining(), 4); } #[test] fn one_long_buf_to_bytes() { let mut buf = BufList::new(); buf.push(b"Hello World" as &[_]); assert_eq!(buf.copy_to_bytes(5), "Hello"); assert_eq!(buf.chunk(), b" World"); } #[test] #[should_panic(expected = "`len` greater than remaining")] fn buf_to_bytes_too_many() { hello_world_buf().copy_to_bytes(42); } }
e { front.advance(rem); cnt -= rem; } } self.bufs.pop_front(); } }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn apply_mask(buf: &mut [u8], mask: [u8; 4]) {\n\n apply_mask_fast32(buf, mask)\n\n}\n\n\n\n/// A safe unoptimized mask application.\n", "file_path": "http-ws/src/mask.rs", "rank": 0, "score": 278887.4791281594 }, { "content": "#[inline]\n\npub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {\n\n let mask_u32 = u32::from_ne_bytes(mask);\n\n\n\n // SAFETY:\n\n //\n\n // https://github.com/snapview/tungstenite-rs/pull/126\n\n let (prefix, words, suffix) = unsafe { buf.align_to_mut::<u32>() };\n\n apply_mask_fallback(prefix, mask);\n\n let head = prefix.len() & 3;\n\n let mask_u32 = if head > 0 {\n\n if cfg!(target_endian = \"big\") {\n\n mask_u32.rotate_left(8 * head as u32)\n\n } else {\n\n mask_u32.rotate_right(8 * head as u32)\n\n }\n\n } else {\n\n mask_u32\n\n };\n\n for word in words.iter_mut() {\n\n *word ^= mask_u32;\n", "file_path": "http-ws/src/mask.rs", "rank": 1, "score": 274701.4287312919 }, { "content": "#[inline]\n\nfn apply_mask_fallback(buf: &mut [u8], mask: [u8; 4]) {\n\n for (i, byte) in buf.iter_mut().enumerate() {\n\n *byte ^= mask[i & 3];\n\n }\n\n}\n\n\n\n/// Faster version of `apply_mask()` which operates on 4-byte blocks.\n", "file_path": "http-ws/src/mask.rs", "rank": 2, "score": 232537.91410883586 }, { "content": "#[inline]\n\nfn encode_version_status_reason(buf: &mut BytesMut, version: Version, status: StatusCode) {\n\n // encode version, status code and reason\n\n match (version, status) {\n\n // happy path shortcut.\n\n (Version::HTTP_11, StatusCode::OK) => {\n\n buf.extend_from_slice(b\"HTTP/1.1 200 OK\\r\\n\");\n\n return;\n\n }\n\n (Version::HTTP_11, _) => {\n\n buf.extend_from_slice(b\"HTTP/1.1 \");\n\n }\n\n (Version::HTTP_10, _) => {\n\n buf.extend_from_slice(b\"HTTP/1.0 \");\n\n }\n\n _ => {\n\n debug!(target: \"h1_encode\", \"response with unexpected response version\");\n\n buf.extend_from_slice(b\"HTTP/1.1 \");\n\n }\n\n }\n\n\n", "file_path": "http/src/h1/proto/encode.rs", "rank": 3, "score": 202679.54463723148 }, { "content": "/// Hashes the `Sec-WebSocket-Key` header according to the WebSocket spec.\n\n///\n\n/// Result is a Base64 encoded byte array. `base64(sha1(input))` is always 28 bytes.\n\npub fn hash_key(key: &[u8]) -> [u8; 28] {\n\n let hash = {\n\n use sha1::Digest as _;\n\n\n\n let mut hasher = sha1::Sha1::new();\n\n\n\n hasher.update(key);\n\n hasher.update(WS_GUID);\n\n\n\n hasher.finalize()\n\n };\n\n\n\n let mut hash_b64 = [0; 28];\n\n let n = base64::encode_config_slice(&hash, base64::STANDARD, &mut hash_b64);\n\n assert_eq!(n, 28);\n\n\n\n hash_b64\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "http-ws/src/proto.rs", "rank": 4, "score": 184868.9099019339 }, { "content": "fn rustls_config(alpn_protocols: Vec<Vec<u8>>) -> io::Result<Arc<rustls::ServerConfig>> {\n\n let cert = fs::read(\"./cert/cert.pem\")?;\n\n let key = fs::read(\"./cert/key.pem\")?;\n\n\n\n let key = rustls_pemfile::pkcs8_private_keys(&mut &*key).unwrap().remove(0);\n\n let key = PrivateKey(key);\n\n\n\n let cert = rustls_pemfile::certs(&mut &*cert)\n\n .unwrap()\n\n .into_iter()\n\n .map(Certificate)\n\n .collect();\n\n\n\n let mut acceptor = rustls::ServerConfig::builder()\n\n .with_safe_defaults()\n\n .with_no_client_auth()\n\n .with_single_cert(cert, key)\n\n .unwrap();\n\n\n\n acceptor.alpn_protocols = alpn_protocols;\n\n\n\n Ok(Arc::new(acceptor))\n\n}\n", "file_path": "examples/hello-world.rs", "rank": 5, "score": 161193.1324902438 }, { "content": "/// A shortcut for generating a set of response types with given [Request](http::Request).\n\npub fn ws<B, T, E>(req: http::Request<B>) -> Result<WsOutput<B>, HandshakeError>\n\nwhere\n\n B: futures_core::Stream<Item = Result<T, E>>,\n\n T: AsRef<[u8]>,\n\n{\n\n let builder = handshake(req.method(), req.headers())?;\n\n\n\n let body = req.into_body();\n\n\n\n let decode = DecodeStream::new(body);\n\n let (tx, encode) = decode.encode_stream();\n\n\n\n let res = builder\n\n .body(encode)\n\n .expect(\"handshake function failed to generate correct Response Builder\");\n\n\n\n Ok((decode, res, tx))\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "http-ws/src/lib.rs", "rank": 6, "score": 157984.2574494183 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct Flags(u8);\n\n\n\nimpl Flags {\n\n const SERVER: u8 = 0b0000_0001;\n\n const CONTINUATION: u8 = 0b0000_0010;\n\n const W_CONTINUATION: u8 = 0b0000_0100;\n\n\n\n #[inline(always)]\n\n fn remove(&mut self, other: u8) {\n\n self.0 &= !other;\n\n }\n\n\n\n #[inline(always)]\n\n fn insert(&mut self, other: u8) {\n\n self.0 |= other;\n\n }\n\n\n\n #[inline(always)]\n\n const fn contains(&self, other: u8) -> bool {\n\n (self.0 & other) == other\n", "file_path": "http-ws/src/codec.rs", "rank": 7, "score": 134970.04386060114 }, { "content": "/// A specialized http/1 server on top of [test_server]\n\npub fn test_h1_server<F, I, B, E>(factory: F) -> Result<TestServerHandle, Error>\n\nwhere\n\n F: Fn() -> I + Send + Clone + 'static,\n\n I: ServiceFactory<Request<h1::RequestBody>, Response = Response<ResponseBody<B>>, Config = (), InitError = ()>\n\n + 'static,\n\n B: Stream<Item = Result<Bytes, E>> + 'static,\n\n E: 'static,\n\n BodyError: From<E>,\n\n{\n\n test_server::<_, _, TcpStream>(move || {\n\n let f = factory();\n\n HttpServiceBuilder::h1(f)\n\n })\n\n}\n\n\n", "file_path": "test/src/lib.rs", "rank": 8, "score": 134818.1604535343 }, { "content": "/// A specialized http/3 server\n\npub fn test_h3_server<F, I, B, E>(factory: F) -> Result<TestServerHandle, Error>\n\nwhere\n\n F: Fn() -> I + Send + Clone + 'static,\n\n I: ServiceFactory<Request<h3::RequestBody>, Response = Response<ResponseBody<B>>, Config = (), InitError = ()>\n\n + 'static,\n\n I::Error: fmt::Debug,\n\n B: Stream<Item = Result<Bytes, E>> + 'static,\n\n E: 'static,\n\n BodyError: From<E>,\n\n{\n\n let addr = std::net::UdpSocket::bind(\"127.0.0.1:0\")?.local_addr()?;\n\n\n\n let key = fs::read(\"../examples/cert/key.pem\")?;\n\n let cert = fs::read(\"../examples/cert/cert.pem\")?;\n\n\n\n let key = rustls_pemfile::pkcs8_private_keys(&mut &*key)?.remove(0);\n\n let key = rustls::PrivateKey(key);\n\n\n\n let cert = rustls_pemfile::certs(&mut &*cert)?\n\n .into_iter()\n", "file_path": "test/src/lib.rs", "rank": 9, "score": 134818.1604535343 }, { "content": "/// A specialized http/2 server on top of [test_server]\n\npub fn test_h2_server<F, I, B, E>(factory: F) -> Result<TestServerHandle, Error>\n\nwhere\n\n F: Fn() -> I + Send + Clone + 'static,\n\n I: ServiceFactory<Request<h2::RequestBody>, Response = Response<ResponseBody<B>>, Config = (), InitError = ()>\n\n + 'static,\n\n I::Error: fmt::Debug,\n\n B: Stream<Item = Result<Bytes, E>> + 'static,\n\n E: 'static,\n\n BodyError: From<E>,\n\n{\n\n test_server::<_, _, TcpStream>(move || {\n\n let f = factory();\n\n let config = HttpServiceConfig::new()\n\n .first_request_timeout(Duration::from_millis(500))\n\n .tls_accept_timeout(Duration::from_millis(500))\n\n .keep_alive_timeout(Duration::from_millis(500));\n\n HttpServiceBuilder::h2(f).config(config)\n\n })\n\n}\n\n\n", "file_path": "test/src/lib.rs", "rank": 10, "score": 134818.1604535343 }, { "content": "// as special type for eof chunk when using transfer-encoding: chunked\n\ntype Eof = Chain<Chain<Bytes, Bytes>, &'static [u8]>;\n\n\n\nimpl<const BUF_LIMIT: usize> WriteBuf for ListBuf<EncodedBuf<Bytes, Eof>, BUF_LIMIT> {\n\n #[inline]\n\n fn backpressure(&self) -> bool {\n\n self.list.remaining() >= BUF_LIMIT || self.list.cnt() == BUF_LIST_CNT\n\n }\n\n\n\n #[inline]\n\n fn is_empty(&self) -> bool {\n\n self.list.remaining() == 0\n\n }\n\n\n\n fn write_head<F, T, E>(&mut self, func: F) -> Result<T, E>\n\n where\n\n F: FnOnce(&mut BytesMut) -> Result<T, E>,\n\n {\n\n let buf = &mut self.buf;\n\n let res = func(buf)?;\n\n let bytes = buf.split().freeze();\n", "file_path": "http/src/h1/proto/buf.rs", "rank": 11, "score": 132510.60016913593 }, { "content": " /// Helper trait for convert a [Request] to [Response].\n\n /// This is for re-use request's heap allocation and pass down the context data inside [Extensions]\n\n pub trait IntoResponse<B, ResB> {\n\n fn into_response(self, body: B) -> Response<ResponseBody<ResB>>;\n\n\n\n fn as_response(&mut self, body: B) -> Response<ResponseBody<ResB>>\n\n where\n\n Self: Default,\n\n {\n\n std::mem::take(self).into_response(body)\n\n }\n\n }\n\n\n\n impl<ReqB, B, ResB> IntoResponse<B, ResB> for Request<ReqB>\n\n where\n\n B: Into<ResponseBody<ResB>>,\n\n {\n\n fn into_response(self, body: B) -> Response<ResponseBody<ResB>> {\n\n let (\n\n request::Parts {\n\n mut headers,\n\n extensions,\n", "file_path": "http/src/lib.rs", "rank": 12, "score": 131991.55941164953 }, { "content": "/// A set of state for current request that are used after request's ownership is passed\n\n/// to service call.\n\nstruct ContextState(u8);\n\n\n\nimpl ContextState {\n\n /// Enable when current request has 100-continue header.\n\n const EXPECT: u8 = 0b_0001;\n\n\n\n /// Enable when current request is CONNECT method.\n\n const CONNECT: u8 = 0b_0010;\n\n\n\n const fn new() -> Self {\n\n Self(0)\n\n }\n\n\n\n fn insert(&mut self, other: u8) {\n\n self.0 |= other;\n\n }\n\n\n\n const fn contains(&self, other: u8) -> bool {\n\n (self.0 & other) == other\n\n }\n", "file_path": "http/src/h1/proto/context.rs", "rank": 13, "score": 130332.17286766092 }, { "content": "fn status_only<B>(status: StatusCode) -> Response<ResponseBody<B>> {\n\n Response::builder().status(status).body(Bytes::new().into()).unwrap()\n\n}\n", "file_path": "http/src/response.rs", "rank": 14, "score": 124985.48641866026 }, { "content": "struct Io<'a, St, W, E, const READ_BUF_LIMIT: usize, const WRITE_BUF_LIMIT: usize> {\n\n io: &'a mut St,\n\n read_buf: FlatBuf<READ_BUF_LIMIT>,\n\n write_buf: W,\n\n _err: PhantomData<E>,\n\n}\n\n\n\nimpl<'a, St, W, E, const READ_BUF_LIMIT: usize, const WRITE_BUF_LIMIT: usize>\n\n Io<'a, St, W, E, READ_BUF_LIMIT, WRITE_BUF_LIMIT>\n\nwhere\n\n St: AsyncIo,\n\n W: WriteBuf,\n\n{\n\n fn new(io: &'a mut St, write_buf: W) -> Self {\n\n Self {\n\n io,\n\n read_buf: FlatBuf::new(),\n\n write_buf,\n\n _err: PhantomData,\n\n }\n", "file_path": "http/src/h1/proto/dispatcher.rs", "rank": 15, "score": 124932.13824133022 }, { "content": "/// Predicate to match *PATCH* HTTP method.\n\npub fn Patch() -> MethodGuard {\n\n MethodGuard(http::Method::PATCH)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 16, "score": 123288.44481126775 }, { "content": "/// Predicate to match *DELETE* HTTP method.\n\npub fn Delete() -> MethodGuard {\n\n MethodGuard(http::Method::DELETE)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 17, "score": 123288.44481126775 }, { "content": "/// Predicate to match *CONNECT* HTTP method.\n\npub fn Connect() -> MethodGuard {\n\n MethodGuard(http::Method::CONNECT)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 18, "score": 123288.44481126775 }, { "content": "/// Predicate to match *HEAD* HTTP method.\n\npub fn Head() -> MethodGuard {\n\n MethodGuard(http::Method::HEAD)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 19, "score": 123288.44481126775 }, { "content": "/// Predicate to match *OPTIONS* HTTP method.\n\npub fn Options() -> MethodGuard {\n\n MethodGuard(http::Method::OPTIONS)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 20, "score": 123288.44481126775 }, { "content": "/// Predicate to match *POST* HTTP method.\n\npub fn Post() -> MethodGuard {\n\n MethodGuard(http::Method::POST)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 21, "score": 123288.44481126775 }, { "content": "/// Guard to match *GET* HTTP method.\n\npub fn Get() -> MethodGuard {\n\n MethodGuard(http::Method::GET)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 22, "score": 123288.44481126775 }, { "content": "/// Predicate to match *TRACE* HTTP method.\n\npub fn Trace() -> MethodGuard {\n\n MethodGuard(http::Method::TRACE)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 23, "score": 123288.44481126775 }, { "content": "/// Predicate to match *PUT* HTTP method.\n\npub fn Put() -> MethodGuard {\n\n MethodGuard(http::Method::PUT)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 24, "score": 123288.44481126775 }, { "content": "struct FnGuard<F: Fn(&Parts) -> bool>(F);\n\n\n\nimpl<F> Guard for FnGuard<F>\n\nwhere\n\n F: Fn(&Parts) -> bool,\n\n{\n\n fn check(&self, req: &Parts) -> bool {\n\n (self.0)(req)\n\n }\n\n}\n\n\n\nimpl<F> Guard for F\n\nwhere\n\n F: Fn(&Parts) -> bool,\n\n{\n\n fn check(&self, req: &Parts) -> bool {\n\n (self)(req)\n\n }\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 25, "score": 121832.65007992426 }, { "content": "/// Create WebSocket handshake response.\n\n///\n\n/// This function returns handshake `http::response::Builder`, ready to send to peer.\n\nfn handshake_response(key: &[u8]) -> Builder {\n\n let key = proto::hash_key(key);\n\n\n\n Response::builder()\n\n .status(StatusCode::SWITCHING_PROTOCOLS)\n\n .header(header::UPGRADE, \"websocket\")\n\n .header(header::CONNECTION, \"upgrade\")\n\n .header(\n\n header::SEC_WEBSOCKET_ACCEPT,\n\n // key is known to be header value safe ascii\n\n HeaderValue::from_bytes(&key).unwrap(),\n\n )\n\n}\n\n\n\n#[cfg(feature = \"stream\")]\n\nmod stream;\n\n\n\n#[cfg(feature = \"stream\")]\n\npub use self::stream::{DecodeError, DecodeStream, EncodeStream};\n\n\n\n#[cfg(feature = \"stream\")]\n\npub type WsOutput<B> = (\n\n DecodeStream<B>,\n\n Response<EncodeStream>,\n\n tokio::sync::mpsc::Sender<Message>,\n\n);\n\n\n\n#[cfg(feature = \"stream\")]\n", "file_path": "http-ws/src/lib.rs", "rank": 26, "score": 118464.27495983848 }, { "content": "/// Create guard object for supplied function.\n\npub fn fn_guard<F>(f: F) -> impl Guard\n\nwhere\n\n F: Fn(&Parts) -> bool,\n\n{\n\n FnGuard(f)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 27, "score": 117797.72362202354 }, { "content": "/// Trait to generic over different types of write buffer strategy.\n\npub trait WriteBuf {\n\n fn backpressure(&self) -> bool;\n\n\n\n fn is_empty(&self) -> bool;\n\n\n\n fn write_head<F, T, E>(&mut self, func: F) -> Result<T, E>\n\n where\n\n F: FnOnce(&mut BytesMut) -> Result<T, E>;\n\n\n\n fn write_static(&mut self, bytes: &'static [u8]);\n\n\n\n fn write_buf(&mut self, bytes: Bytes);\n\n\n\n fn write_chunk(&mut self, bytes: Bytes);\n\n\n\n fn try_write_io<Io: AsyncIo, E>(&mut self, io: &mut Io) -> Result<(), Error<E>>;\n\n}\n\n\n\npub struct FlatBuf<const BUF_LIMIT: usize>(BytesMut);\n\n\n", "file_path": "http/src/h1/proto/buf.rs", "rank": 28, "score": 110902.61552904636 }, { "content": "/// Predicate to match specified HTTP method.\n\npub fn Method(method: http::Method) -> MethodGuard {\n\n MethodGuard(method)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 29, "score": 108662.16317882405 }, { "content": "pub fn fn_service<F, Req, Fut, Res, Err>(f: F) -> FnServiceFactory<F>\n\nwhere\n\n F: Fn(Req) -> Fut + Clone,\n\n Fut: Future<Output = Result<Res, Err>>,\n\n{\n\n FnServiceFactory { f }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct FnServiceFactory<F: Clone> {\n\n f: F,\n\n}\n\n\n\nimpl<F, Req, Fut, Res, Err> ServiceFactory<Req> for FnServiceFactory<F>\n\nwhere\n\n F: Fn(Req) -> Fut + Clone,\n\n Fut: Future<Output = Result<Res, Err>>,\n\n{\n\n type Response = Res;\n\n type Error = Err;\n", "file_path": "service/src/factory/function.rs", "rank": 30, "score": 106540.89080715025 }, { "content": "/// Return guard that matches if all of the supplied guards.\n\npub fn All<F: Guard + 'static>(guard: F) -> AllGuard {\n\n AllGuard(vec![Box::new(guard)])\n\n}\n\n\n\n/// Matches if all of supplied guards.\n\npub struct AllGuard(Vec<Box<dyn Guard>>);\n\n\n\nimpl AllGuard {\n\n /// Add new guard to the list of guards to check\n\n pub fn and<F: Guard + 'static>(mut self, guard: F) -> Self {\n\n self.0.push(Box::new(guard));\n\n self\n\n }\n\n}\n\n\n\nimpl Guard for AllGuard {\n\n fn check(&self, req: &Parts) -> bool {\n\n for p in &self.0 {\n\n if !p.check(req) {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 31, "score": 104612.2888288511 }, { "content": "/// Return guard that matches if any of supplied guards.\n\npub fn Any<F: Guard + 'static>(guard: F) -> AnyGuard {\n\n AnyGuard(vec![Box::new(guard)])\n\n}\n\n\n\n/// Matches any of supplied guards.\n\npub struct AnyGuard(Vec<Box<dyn Guard>>);\n\n\n\nimpl AnyGuard {\n\n /// Add guard to a list of guards to check\n\n pub fn or<F: Guard + 'static>(mut self, guard: F) -> Self {\n\n self.0.push(Box::new(guard));\n\n self\n\n }\n\n}\n\n\n\nimpl Guard for AnyGuard {\n\n fn check(&self, req: &Parts) -> bool {\n\n for p in &self.0 {\n\n if p.check(req) {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 32, "score": 104612.2888288511 }, { "content": "/// Return guard that matches if supplied guard does not match.\n\npub fn Not<F: Guard + 'static>(guard: F) -> NotGuard {\n\n NotGuard(Box::new(guard))\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct NotGuard(Box<dyn Guard>);\n\n\n\nimpl Guard for NotGuard {\n\n fn check(&self, req: &Parts) -> bool {\n\n !self.0.check(req)\n\n }\n\n}\n\n\n\n/// HTTP method guard.\n\n#[doc(hidden)]\n\npub struct MethodGuard(http::Method);\n\n\n\nimpl Guard for MethodGuard {\n\n fn check(&self, req: &Parts) -> bool {\n\n req.method == self.0\n\n }\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 33, "score": 104612.24031516456 }, { "content": "pub fn Host<H: AsRef<str>>(host: H) -> HostGuard {\n\n HostGuard(host.as_ref().to_string(), None)\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 34, "score": 100929.16767879803 }, { "content": "#[cfg(any(feature = \"br\", feature = \"gz\", feature = \"de\"))]\n\nfn update_header(headers: &mut header::HeaderMap, value: &'static str) {\n\n headers.insert(header::CONTENT_ENCODING, header::HeaderValue::from_static(value));\n\n headers.remove(header::CONTENT_LENGTH);\n\n headers.insert(header::TRANSFER_ENCODING, header::HeaderValue::from_static(\"chunked\"));\n\n}\n\n\n\npub struct ContentEncoder {\n\n encoder: _ContentEncoder,\n\n}\n\n\n", "file_path": "http-encoding/src/encoder.rs", "rank": 35, "score": 99924.4166580297 }, { "content": "#[proc_macro_attribute]\n\npub fn service_impl(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::ItemImpl);\n\n\n\n // Collect type path from impl.\n\n let service_ty = match input.self_ty.as_ref() {\n\n Type::Path(path) => path,\n\n _ => panic!(\"service_impl macro must be used on a TypePath\"),\n\n };\n\n\n\n // collect generics.\n\n let generic_ty = &input.generics.params;\n\n let where_clause = &input.generics.where_clause;\n\n\n\n // find methods from impl.\n\n let new_service_impl =\n\n find_async_method(&input.items, \"new_service\").expect(\"new_service method can not be located\");\n\n\n\n // collect ServiceFactory, Config and InitError type from new_service_impl\n\n let mut inputs = new_service_impl.sig.inputs.iter();\n\n\n", "file_path": "http-codegen/src/lib.rs", "rank": 36, "score": 97993.19843615295 }, { "content": "#[proc_macro_attribute]\n\npub fn middleware_impl(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::ItemImpl);\n\n\n\n // Collect type path from impl.\n\n let middleware_ty = match input.self_ty.as_ref() {\n\n Type::Path(path) => path,\n\n _ => panic!(\"middleware_impl macro must be used on a TypePath\"),\n\n };\n\n\n\n // collect generics.\n\n let generic_ty = &input.generics.params;\n\n let where_clause = &input.generics.where_clause;\n\n\n\n // find methods from impl.\n\n let new_transform_impl =\n\n find_async_method(&input.items, \"new_transform\").expect(\"new_transform method can not be located\");\n\n\n\n // collect ServiceFactory, Config and InitError type from new_transform_impl\n\n let mut inputs = new_transform_impl.sig.inputs.iter();\n\n\n", "file_path": "http-codegen/src/lib.rs", "rank": 37, "score": 97993.19843615295 }, { "content": "/// Return predicate that matches if request contains specified header and\n\n/// value.\n\npub fn Header(name: &'static str, value: &'static str) -> HeaderGuard {\n\n HeaderGuard(\n\n header::HeaderName::from_static(name),\n\n header::HeaderValue::from_static(value),\n\n )\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct HeaderGuard(header::HeaderName, header::HeaderValue);\n\n\n\nimpl Guard for HeaderGuard {\n\n fn check(&self, req: &Parts) -> bool {\n\n if let Some(val) = req.headers.get(&self.0) {\n\n return val == self.1;\n\n }\n\n false\n\n }\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 38, "score": 97514.5471521858 }, { "content": "/// Verify WebSocket handshake request and create handshake response.\n\npub fn handshake(method: &Method, headers: &HeaderMap) -> Result<Builder, HandshakeError> {\n\n let key = verify_handshake(method, headers)?;\n\n let builder = handshake_response(key);\n\n Ok(builder)\n\n}\n\n\n", "file_path": "http-ws/src/lib.rs", "rank": 39, "score": 94358.77332954577 }, { "content": "/// Prepare a request with given Uri.\n\n/// After process the request would be ready to be sent to server for websocket connection.\n\npub fn client_request_from_uri<U, E>(uri: U) -> Result<Request<()>, E>\n\nwhere\n\n Uri: TryFrom<U, Error = E>,\n\n{\n\n let uri = uri.try_into()?;\n\n let mut req = Request::new(());\n\n *req.uri_mut() = uri;\n\n\n\n req.headers_mut()\n\n .insert(header::UPGRADE, HeaderValue::from_static(\"websocket\"));\n\n req.headers_mut()\n\n .insert(header::CONNECTION, HeaderValue::from_static(\"upgrade\"));\n\n req.headers_mut()\n\n .insert(header::SEC_WEBSOCKET_VERSION, HeaderValue::from_static(\"13\"));\n\n\n\n let sec_key = rand::random::<[u8; 16]>();\n\n let key = base64::encode(&sec_key);\n\n\n\n req.headers_mut()\n\n .insert(header::SEC_WEBSOCKET_KEY, HeaderValue::try_from(key.as_str()).unwrap());\n\n\n\n Ok(req)\n\n}\n\n\n", "file_path": "http-ws/src/lib.rs", "rank": 40, "score": 94358.77332954577 }, { "content": "/// Verify WebSocket handshake request and return `SEC_WEBSOCKET_KEY` header value as `&[u8]`\n\nfn verify_handshake<'a>(method: &'a Method, headers: &'a HeaderMap) -> Result<&'a [u8], HandshakeError> {\n\n // WebSocket accepts only GET\n\n if method != Method::GET {\n\n return Err(HandshakeError::GetMethodRequired);\n\n }\n\n\n\n // Check for \"Upgrade\" header\n\n let has_upgrade_hd = headers\n\n .get(header::UPGRADE)\n\n .and_then(|hdr| hdr.to_str().ok())\n\n .filter(|s| s.to_ascii_lowercase().contains(\"websocket\"))\n\n .is_some();\n\n\n\n if !has_upgrade_hd {\n\n return Err(HandshakeError::NoWebsocketUpgrade);\n\n }\n\n\n\n // Check for \"Connection\" header\n\n let has_connection_hd = headers\n\n .get(header::CONNECTION)\n", "file_path": "http-ws/src/lib.rs", "rank": 41, "score": 91883.03044606923 }, { "content": "/// A general test server for any given service type that accept the connection from\n\n/// xitca-server\n\npub fn test_server<F, T, Req>(factory: F) -> Result<TestServerHandle, Error>\n\nwhere\n\n F: Fn() -> T + Send + Clone + 'static,\n\n T: ServiceFactory<Req, Config = ()>,\n\n Req: FromStream + Send + 'static,\n\n{\n\n let lst = TcpListener::bind(\"127.0.0.1:0\")?;\n\n\n\n let addr = lst.local_addr()?;\n\n\n\n let handle = Builder::new()\n\n .worker_threads(1)\n\n .server_threads(1)\n\n .disable_signal()\n\n .listen::<_, _, Req>(\"test_server\", lst, factory)?\n\n .build();\n\n\n\n Ok(TestServerHandle { addr, handle })\n\n}\n\n\n", "file_path": "test/src/lib.rs", "rank": 42, "score": 91339.49299259062 }, { "content": "// generate a default PatIdent\n\nfn default_pat_ident(ident: &str) -> PatIdent {\n\n PatIdent {\n\n attrs: Vec::with_capacity(0),\n\n by_ref: None,\n\n mutability: None,\n\n ident: Ident::new(ident, Span::call_site()),\n\n subpat: None,\n\n }\n\n}\n", "file_path": "http-codegen/src/lib.rs", "rank": 43, "score": 90454.038080056 }, { "content": "// Extract Result<T, E> types from a return type of function.\n\nfn extract_res_ty(ret: &ReturnType) -> (&Type, &Type) {\n\n if let ReturnType::Type(_, ty) = ret {\n\n if let Type::Path(path) = ty.as_ref() {\n\n let seg = path.path.segments.first().unwrap();\n\n if seg.ident.to_string().as_str() == \"Result\" {\n\n if let PathArguments::AngleBracketed(ref arg) = seg.arguments {\n\n if let (Some(GenericArgument::Type(ok_ty)), Some(GenericArgument::Type(err_ty))) =\n\n (arg.args.first(), arg.args.last())\n\n {\n\n return (ok_ty, err_ty);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n panic!(\"new_service method must output Result<Self, <InitError>>\")\n\n}\n\n\n", "file_path": "http-codegen/src/lib.rs", "rank": 44, "score": 87304.22743409677 }, { "content": "struct Test;\n\n\n", "file_path": "test/tests/macro.rs", "rank": 45, "score": 72123.60400707716 }, { "content": "#[derive(Clone)]\n\nstruct TestFactory;\n\n\n\n#[xitca_http_codegen::service_impl]\n\nimpl Test {\n\n async fn new_service(_: &TestFactory, mut cfg123: String) -> Result<Self, Infallible> {\n\n cfg123.push_str(\"+da_gong_ren\");\n\n assert_eq!(cfg123.as_str(), \"996+da_gong_ren\");\n\n Ok(Test)\n\n }\n\n\n\n async fn ready(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n Err(\"251\".into())\n\n }\n\n\n\n async fn call(&self, req: String) -> Result<usize, Box<dyn std::error::Error>> {\n\n assert_eq!(req.as_str(), \"007\");\n\n\n\n Ok(233)\n\n }\n\n}\n\n\n", "file_path": "test/tests/macro.rs", "rank": 46, "score": 70769.01091459244 }, { "content": "#[derive(Clone)]\n\nstruct TestMiddleware;\n\n\n", "file_path": "test/tests/macro.rs", "rank": 47, "score": 70769.01091459244 }, { "content": "#[derive(Clone, Copy)]\n\nstruct ConnState {\n\n born: Instant,\n\n idle_since: Instant,\n\n}\n\n\n\nimpl ConnState {\n\n fn new() -> Self {\n\n let now = Instant::now();\n\n\n\n Self {\n\n born: now,\n\n idle_since: now,\n\n }\n\n }\n\n\n\n fn update_idle(&mut self) {\n\n self.idle_since = Instant::now();\n\n }\n\n\n\n fn is_expired(&self) -> bool {\n", "file_path": "client/src/pool.rs", "rank": 48, "score": 70769.01091459244 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n len: usize,\n\n eof: bool,\n\n err: Option<BodyError>,\n\n items: VecDeque<Bytes>,\n\n task: Option<Waker>,\n\n io_task: Option<Waker>,\n\n}\n\n\n\nimpl Inner {\n\n fn new(eof: bool) -> Self {\n\n Inner {\n\n eof,\n\n len: 0,\n\n err: None,\n\n items: VecDeque::new(),\n\n task: None,\n\n io_task: None,\n\n }\n\n }\n", "file_path": "http/src/h1/body.rs", "rank": 49, "score": 70769.01091459244 }, { "content": "struct Next<'a> {\n\n stream: &'a mut Incoming,\n\n}\n\n\n\nimpl Future for Next<'_> {\n\n type Output = Option<<Incoming as futures_core::Stream>::Item>;\n\n\n\n #[inline(always)]\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n futures_core::Stream::poll_next(Pin::new(&mut self.get_mut().stream), cx)\n\n }\n\n}\n\n\n\n/// Wrapper type for [`Connecting`].\n\n///\n\n/// Naming is to keep consistent with `TcpStream` / `UnixStream`.\n\npub struct UdpStream {\n\n connecting: Connecting,\n\n}\n\n\n", "file_path": "io/src/h3.rs", "rank": 50, "score": 69809.57520586024 }, { "content": "struct LimitInner {\n\n current: Cell<usize>,\n\n waker: Cell<Option<Waker>>,\n\n}\n\n\n\npub(crate) struct LimitGuard(Limit);\n\n\n\nimpl Drop for LimitGuard {\n\n fn drop(&mut self) {\n\n let current = self.0.inner.current.get();\n\n if current == self.0.limit {\n\n if let Some(waker) = self.0.inner.waker.take() {\n\n waker.wake();\n\n }\n\n }\n\n self.0.inner.current.set(current - 1);\n\n }\n\n}\n\n\n\nimpl Limit {\n", "file_path": "server/src/worker/limit.rs", "rank": 51, "score": 69498.22164499646 }, { "content": "struct WorkerInner {\n\n listener: Arc<Listener>,\n\n service: RcWorkerService,\n\n limit: Limit,\n\n}\n\n\n\nimpl WorkerInner {\n\n fn spawn_handling(self) -> JoinHandle<()> {\n\n tokio::task::spawn_local(async move {\n\n loop {\n\n let guard = self.limit.ready().await;\n\n\n\n match self.listener.accept().await {\n\n Ok(stream) => self.service.clone().call((guard, stream)),\n\n Err(ref e) if connection_error(e) => continue,\n\n // TODO: This error branch is used to detect Accept thread exit.\n\n // Should use other notifier other than error.\n\n Err(ref e) if fatal_error(e) => return,\n\n Err(e) => {\n\n error!(\"Error accepting connection: {}\", e);\n", "file_path": "server/src/worker/mod.rs", "rank": 52, "score": 69498.22164499646 }, { "content": "/// Http/1 dispatcher\n\nstruct Dispatcher<\n\n 'a,\n\n St,\n\n S,\n\n ReqB,\n\n X,\n\n W,\n\n D,\n\n const HEADER_LIMIT: usize,\n\n const READ_BUF_LIMIT: usize,\n\n const WRITE_BUF_LIMIT: usize,\n\n> where\n\n S: Service<Request<ReqB>>,\n\n{\n\n io: Io<'a, St, W, S::Error, READ_BUF_LIMIT, WRITE_BUF_LIMIT>,\n\n timer: Pin<&'a mut KeepAlive>,\n\n ka_dur: Duration,\n\n ctx: Context<'a, D, HEADER_LIMIT>,\n\n expect: &'a X,\n\n service: &'a S,\n\n _phantom: PhantomData<ReqB>,\n\n}\n\n\n", "file_path": "http/src/h1/proto/dispatcher.rs", "rank": 53, "score": 69498.22164499646 }, { "content": "#[inline]\n\n#[cold]\n\nfn cold() {}\n\n\n\n#[inline]\n\npub(crate) fn unlikely() {\n\n cold();\n\n}\n", "file_path": "http/src/util/hint.rs", "rank": 54, "score": 67931.3644545214 }, { "content": "struct CallImpl<'a> {\n\n req_ident: PatIdent,\n\n req_ty: &'a Type,\n\n res_ty: &'a Type,\n\n err_ty: &'a Type,\n\n call_stmts: &'a [Stmt],\n\n}\n\n\n\nimpl<'a> CallImpl<'a> {\n\n fn from_items(items: &'a [ImplItem]) -> Self {\n\n // collect Request, Response and Error type.\n\n let call_impl = find_async_method(items, \"call\").expect(\"call method can not be located\");\n\n\n\n let mut inputs = call_impl.sig.inputs.iter();\n\n // ignore receiver and move on.\n\n // TODO: Check the first fn arg and make sure it's a Receiver of &Self.\n\n let _ = inputs.next().unwrap();\n\n\n\n let (req_ident, req_ty) = match inputs.next().unwrap() {\n\n FnArg::Receiver(_) => panic!(\"call method does not accept Self as second argument\"),\n", "file_path": "http-codegen/src/lib.rs", "rank": 55, "score": 67184.19284377953 }, { "content": "struct WebSocketInner<'c> {\n\n codec: Codec,\n\n eof: bool,\n\n send_buf: BytesMut,\n\n recv_buf: BytesMut,\n\n body: h1::body::ResponseBody<ConnectionWithKey<'c>>,\n\n}\n\n\n\nimpl Sink<Message> for WebSocketInner<'_> {\n\n type Error = Error;\n\n\n\n fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n // TODO: set up a meaningful backpressure limit for send buf.\n\n if !self.as_mut().get_mut().send_buf.chunk().is_empty() {\n\n self.poll_flush(cx)\n\n } else {\n\n Poll::Ready(Ok(()))\n\n }\n\n }\n\n\n", "file_path": "client/src/ws.rs", "rank": 56, "score": 67184.19284377953 }, { "content": "struct PooledConn<C> {\n\n conn: C,\n\n state: ConnState,\n\n}\n\n\n\nimpl<C> Deref for PooledConn<C> {\n\n type Target = C;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.conn\n\n }\n\n}\n\n\n\nimpl<C> DerefMut for PooledConn<C> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.conn\n\n }\n\n}\n\n\n", "file_path": "client/src/pool.rs", "rank": 57, "score": 67184.19284377953 }, { "content": "struct ReadyImpl<'a> {\n\n ready_stmts: &'a [Stmt],\n\n}\n\n\n\nimpl<'a> ReadyImpl<'a> {\n\n fn from_items(items: &'a [ImplItem]) -> Self {\n\n // make sure async fn ready is there and move on.\n\n // TODO: Check the first fn arg and make sure it's a Receiver of &Self.\n\n\n\n let ready_impl = find_async_method(items, \"ready\").expect(\"ready method can not be located\");\n\n let ready_stmts = &ready_impl.block.stmts;\n\n\n\n Self { ready_stmts }\n\n }\n\n}\n\n\n", "file_path": "http-codegen/src/lib.rs", "rank": 58, "score": 67184.19284377953 }, { "content": "struct RequestBodyHandle {\n\n decoder: TransferCoding,\n\n sender: RequestBodySender,\n\n}\n\n\n", "file_path": "http/src/h1/proto/dispatcher.rs", "rank": 59, "score": 67178.7591248391 }, { "content": "/// Trait defines resource guards. Guards are used for route selection.\n\n///\n\n/// Guards can not modify the request object. But it is possible\n\n/// to store extra attributes on a request by using the `Extensions` container.\n\n/// Extensions containers are available via the `RequestHead::extensions()` method.\n\npub trait Guard {\n\n /// Check if request matches predicate\n\n fn check(&self, req: &Parts) -> bool;\n\n}\n\n\n\nimpl<G> Guard for Rc<G>\n\nwhere\n\n G: Guard + ?Sized,\n\n{\n\n fn check(&self, req: &Parts) -> bool {\n\n (**self).check(req)\n\n }\n\n}\n\n\n", "file_path": "web/src/guard.rs", "rank": 60, "score": 66002.72065441324 }, { "content": "/// A helper trait for get a protocol from certain types.\n\npub trait AsVersion {\n\n fn as_version(&self) -> Version;\n\n\n\n fn from_alpn<B: AsRef<[u8]>>(proto: B) -> Version {\n\n if proto.as_ref().windows(2).any(|window| window == b\"h2\") {\n\n Version::HTTP_2\n\n } else {\n\n Version::HTTP_11\n\n }\n\n }\n\n}\n\n\n\nimpl AsVersion for xitca_io::net::Stream {\n\n #[inline]\n\n fn as_version(&self) -> Version {\n\n match *self {\n\n Self::Tcp(ref tcp) => tcp.as_version(),\n\n #[cfg(unix)]\n\n Self::Unix(..) => Version::HTTP_11,\n\n #[cfg(feature = \"http3\")]\n", "file_path": "http/src/version.rs", "rank": 61, "score": 65997.8978393939 }, { "content": "pub trait Address {\n\n /// Get hostname part.\n\n fn hostname(&self) -> &str;\n\n\n\n /// Get optional port part.\n\n fn port(&self) -> Option<u16> {\n\n None\n\n }\n\n}\n\n\n\nimpl Address for Uri<'_> {\n\n fn hostname(&self) -> &str {\n\n self.host().unwrap_or(\"\")\n\n }\n\n\n\n fn port(&self) -> Option<u16> {\n\n match self.port_u16() {\n\n Some(port) => Some(port),\n\n None => scheme_to_port(self.scheme_str()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/src/connect.rs", "rank": 62, "score": 65997.8978393939 }, { "content": "/// Trait for multiplex connection.\n\n/// HTTP2 and HTTP3 connections are supposed to be multiplexed on single TCP connection.\n\npub trait Multiplex {\n\n /// Get a ownership from mut reference.\n\n ///\n\n /// # Panics:\n\n /// When called on connection type that are not multiplexable.\n\n fn multiplex(&mut self) -> Self;\n\n\n\n /// Return true for connection that can be multiplexed.\n\n fn is_multiplexable(&self) -> bool;\n\n}\n\n\n\nimpl Multiplex for Connection {\n\n fn multiplex(&mut self) -> Self {\n\n match *self {\n\n #[cfg(feature = \"http2\")]\n\n Self::H2(ref conn) => Self::H2(conn.clone()),\n\n _ => unreachable!(\"Connection is not multiplexable\"),\n\n }\n\n }\n\n\n\n fn is_multiplexable(&self) -> bool {\n\n match *self {\n\n #[cfg(feature = \"http2\")]\n\n Self::H2(_) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n", "file_path": "client/src/connection.rs", "rank": 63, "score": 65997.8978393939 }, { "content": "struct H2PingPong<'a> {\n\n on_flight: bool,\n\n keep_alive: Pin<&'a mut KeepAlive>,\n\n ping_pong: PingPong,\n\n date: &'a DateTimeHandle,\n\n ka_dur: Duration,\n\n}\n\n\n\nimpl Future for H2PingPong<'_> {\n\n type Output = Result<(), ::h2::Error>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let this = self.get_mut();\n\n\n\n loop {\n\n if this.on_flight {\n\n // When have on flight ping pong. poll pong and and keep alive timer.\n\n // on success pong received update keep alive timer to determine the next timing of\n\n // ping pong.\n\n match this.ping_pong.poll_pong(cx)? {\n", "file_path": "http/src/h2/proto/dispatcher.rs", "rank": 64, "score": 64864.73032362216 }, { "content": "/// Trait for getting current date/time.\n\n///\n\n/// This is usally used by a low resolution of timer to reduce frequent syscall to OS.\n\npub trait DateTime {\n\n /// The size hint of slice by Self::date method.\n\n const DATE_VALUE_LENGTH: usize;\n\n\n\n /// closure would receive byte slice representation of [HttpDate].\n\n fn with_date<F, O>(&self, f: F) -> O\n\n where\n\n F: FnOnce(&[u8]) -> O;\n\n\n\n fn now(&self) -> Instant;\n\n}\n\n\n\n/// Struct with Date update periodically at 500 milli seconds interval.\n\npub(crate) struct DateTimeService {\n\n state: Rc<RefCell<DateTimeState>>,\n\n handle: JoinHandle<()>,\n\n}\n\n\n\nimpl Drop for DateTimeService {\n\n fn drop(&mut self) {\n", "file_path": "http/src/date.rs", "rank": 65, "score": 64737.865331073546 }, { "content": "pub trait FromStream {\n\n fn from_stream(stream: Stream) -> Self;\n\n}\n\n\n\nimpl FromStream for Stream {\n\n fn from_stream(stream: Stream) -> Self {\n\n stream\n\n }\n\n}\n\n\n\nimpl FromStream for TcpStream {\n\n fn from_stream(stream: Stream) -> Self {\n\n match stream {\n\n Stream::Tcp(tcp) => tcp,\n\n _ => unreachable!(\"Can not be casted to TcpStream\"),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(unix)]\n", "file_path": "server/src/net/mod.rs", "rank": 66, "score": 64733.50711333863 }, { "content": "fn worker_name() -> String {\n\n thread::current()\n\n .name()\n\n .map(ToString::to_string)\n\n .unwrap_or_else(|| String::from(\"xitca-server-worker\"))\n\n}\n\n\n", "file_path": "server/src/worker/mod.rs", "rank": 67, "score": 63364.58829148185 }, { "content": "struct TestMiddlewareService<S>(S);\n\n\n\n#[xitca_http_codegen::middleware_impl]\n\nimpl<S> TestMiddlewareService<S>\n\nwhere\n\n S: Service<String, Error = Box<dyn std::error::Error>, Response = usize>,\n\n{\n\n async fn new_transform(_m: &TestMiddleware, service: S) -> Result<Self, Infallible> {\n\n Ok(TestMiddlewareService(service))\n\n }\n\n\n\n async fn ready(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n self.0.ready().await\n\n }\n\n\n\n async fn call(&self, req: String) -> Result<usize, Box<dyn std::error::Error>> {\n\n assert_eq!(req.as_str(), \"007\");\n\n\n\n self.0.call(req).await\n\n }\n", "file_path": "test/tests/macro.rs", "rank": 68, "score": 62885.35824497983 }, { "content": "pub trait Service<Req> {\n\n type Response;\n\n\n\n type Error;\n\n\n\n type Ready<'f>: Future<Output = Result<(), Self::Error>>\n\n where\n\n Self: 'f;\n\n\n\n type Future<'f>: Future<Output = Result<Self::Response, Self::Error>>\n\n where\n\n Self: 'f;\n\n\n\n fn ready(&self) -> Self::Ready<'_>;\n\n\n\n fn call(&self, req: Req) -> Self::Future<'_>;\n\n}\n\n\n\nmacro_rules! impl_alloc {\n\n ($alloc: ident) => {\n", "file_path": "service/src/service/mod.rs", "rank": 69, "score": 61728.335870470146 }, { "content": "pub trait ServiceFactory<Req> {\n\n /// Responses given by the created services.\n\n type Response;\n\n\n\n /// Errors produced by the created services.\n\n type Error;\n\n\n\n /// Service factory configuration.\n\n type Config;\n\n\n\n /// The kind of `Service` created by this factory.\n\n type Service: Service<Req, Response = Self::Response, Error = Self::Error>;\n\n\n\n /// Errors potentially raised while building a service.\n\n type InitError;\n\n\n\n /// The future of the `Service` instance.g\n\n type Future: Future<Output = Result<Self::Service, Self::InitError>>;\n\n\n\n /// Create and return a new service asynchronously.\n", "file_path": "service/src/factory/mod.rs", "rank": 70, "score": 60609.066558139486 }, { "content": "/// Trait for custom resolver.\n\n///\n\n/// # Examples\n\n/// ```rust\n\n/// use std::net::SocketAddr;\n\n///\n\n/// use xitca_client::{error::Error, ClientBuilder, Resolve};\n\n///\n\n/// struct MyResolver;\n\n///\n\n/// #[async_trait::async_trait]\n\n/// impl Resolve for MyResolver {\n\n/// async fn resolve(&self, hostname: &str, port: u16) -> Result<Vec<SocketAddr>, Error> {\n\n/// // Your DNS resolve logic goes here.\n\n/// todo!()\n\n/// }\n\n/// }\n\n///\n\n/// # fn resolve() {\n\n/// let client = ClientBuilder::new().resolver(MyResolver).finish();\n\n/// # }\n\n/// ```\n\npub trait Resolve: Send + Sync {\n\n /// *. hostname does not include port number.\n\n fn resolve<'s, 'h, 'f>(&'s self, hostname: &'h str, port: u16) -> BoxFuture<'f, Result<Vec<SocketAddr>, Error>>\n\n where\n\n 's: 'f,\n\n 'h: 'f;\n\n}\n", "file_path": "client/src/resolver.rs", "rank": 71, "score": 60181.793937358365 }, { "content": "pub trait Responder<D>: Sized {\n\n fn respond_to(self, req: &mut WebRequest<'_, D>) -> WebResponse;\n\n}\n\n\n\nimpl<D> Responder<D> for WebResponse {\n\n fn respond_to(self, _: &mut WebRequest<'_, D>) -> WebResponse {\n\n self\n\n }\n\n}\n", "file_path": "web/src/response.rs", "rank": 72, "score": 60156.68875047337 }, { "content": "fn main() -> std::io::Result<()> {\n\n tokio_uring::start(async {\n\n HttpServer::new(move || {\n\n // a temporary file with 64 hello world string.\n\n let mut file = NamedTempFile::new().unwrap();\n\n for _ in 0..64 {\n\n file.write_all(HELLO).unwrap();\n\n }\n\n App::with_current_thread_state(Rc::new(file)).service(fn_service(handler))\n\n })\n\n .bind(\"127.0.0.1:8080\")?\n\n .run()\n\n .await\n\n })\n\n}\n\n\n\nasync fn handler(req: &mut WebRequest<'_, Rc<NamedTempFile>>) -> Result<WebResponse, Box<dyn std::error::Error>> {\n\n let file = File::open(req.state().path()).await?;\n\n let res = read(&file).await;\n\n file.close().await?;\n", "file_path": "examples/io-uring.rs", "rank": 73, "score": 59998.9968709622 }, { "content": "/// Helper trait to cast a cloneable type that impl [`ServiceFactory`](xitca_service::ServiceFactory)\n\n/// to a trait object that is `Send` and `Clone`.\n\npub trait AsServiceFactoryClone<Req>\n\nwhere\n\n Req: FromStream,\n\n Self: Send + Clone + 'static,\n\n{\n\n type ServiceFactoryClone: ServiceFactory<Req, Config = ()>;\n\n\n\n fn as_factory_clone(&self) -> Self::ServiceFactoryClone;\n\n}\n\n\n\nimpl<F, T, Req> AsServiceFactoryClone<Req> for F\n\nwhere\n\n F: Fn() -> T + Send + Clone + 'static,\n\n T: ServiceFactory<Req, Config = ()>,\n\n Req: FromStream,\n\n{\n\n type ServiceFactoryClone = T;\n\n\n\n fn as_factory_clone(&self) -> T {\n\n (self)()\n\n }\n\n}\n", "file_path": "server/src/server/service.rs", "rank": 74, "score": 59553.162186397894 }, { "content": "#[cold]\n\n#[inline(never)]\n\nfn incomplete_body() -> io::Error {\n\n io::Error::new(\n\n io::ErrorKind::UnexpectedEof,\n\n \"end of file before message length reached\",\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::h1::proto::buf::FlatBuf;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_chunk_size() {\n\n use std::io::ErrorKind::{InvalidData, InvalidInput, UnexpectedEof};\n\n\n\n fn read(s: &str) -> u64 {\n\n let mut state = ChunkedState::Size;\n\n let rdr = &mut BytesMut::from(s);\n", "file_path": "http/src/h1/proto/codec.rs", "rank": 75, "score": 59390.634359754884 }, { "content": "/// Trait for custom tls connector.\n\n///\n\n/// # Examples\n\n/// ```rust\n\n/// use xitca_client::{error::Error, http::Version, ClientBuilder, Io, TlsConnect};\n\n///\n\n/// struct MyConnector;\n\n///\n\n/// #[async_trait::async_trait]\n\n/// impl TlsConnect for MyConnector {\n\n/// async fn connect(&self, io: Box<dyn Io>) -> Result<(Box<dyn Io>, Version), Error> {\n\n/// // tls handshake logic\n\n/// todo!()\n\n/// }\n\n/// }\n\n///\n\n/// # fn resolve() {\n\n/// let client = ClientBuilder::new().tls_connector(MyConnector).finish();\n\n/// # }\n\n/// ```\n\npub trait TlsConnect: Send + Sync {\n\n /// Box<dyn Io> is an async read/write type.\n\n ///\n\n /// See [Io] trait for detail.\n\n #[allow(clippy::type_complexity)]\n\n fn connect<'s, 'f>(&'s self, io: Box<dyn Io>) -> BoxFuture<'f, Result<(Box<dyn Io>, Version), Error>>\n\n where\n\n 's: 'f;\n\n}\n", "file_path": "client/src/tls/connector.rs", "rank": 76, "score": 58001.9519388433 }, { "content": "/// An async coding trait that consume self with every method call that can be used for either\n\n/// decode or encode.\n\n///\n\n/// This is useful when cross thread de/encode is desirable in the form of moving objects between\n\n/// threads.\n\npub trait AsyncCode<Item>: Sized {\n\n type Item;\n\n\n\n type Future: Future<Output = io::Result<(Self, Option<Self::Item>)>>;\n\n\n\n fn code(self, item: Item) -> Self::Future;\n\n\n\n fn code_eof(self) -> io::Result<Option<Self::Item>>;\n\n}\n\n\n\n/// Identity coder serve as a pass through coder that just forward items.\n\npub struct IdentityCoder;\n\n\n\nimpl<Item> AsyncCode<Item> for IdentityCoder\n\nwhere\n\n Bytes: From<Item>,\n\n{\n\n type Item = Bytes;\n\n type Future = impl Future<Output = io::Result<(Self, Option<Self::Item>)>>;\n\n\n", "file_path": "http-encoding/src/coder.rs", "rank": 77, "score": 57990.45498230117 }, { "content": "fn h2_config() -> io::Result<SslAcceptor> {\n\n // set up openssl and alpn protocol.\n\n let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls())?;\n\n builder.set_private_key_file(\"./cert/key.pem\", SslFiletype::PEM)?;\n\n builder.set_certificate_chain_file(\"./cert/cert.pem\")?;\n\n\n\n builder.set_alpn_select_callback(|_, protocols| {\n\n const H2: &[u8] = b\"\\x02h2\";\n\n const H11: &[u8] = b\"\\x08http/1.1\";\n\n\n\n if protocols.windows(3).any(|window| window == H2) {\n\n Ok(b\"h2\")\n\n } else if protocols.windows(9).any(|window| window == H11) {\n\n Ok(b\"http/1.1\")\n\n } else {\n\n Err(AlpnError::NOACK)\n\n }\n\n });\n\n\n\n builder.set_alpn_protos(b\"\\x08http/1.1\\x02h2\")?;\n\n\n\n Ok(builder.build())\n\n}\n\n\n", "file_path": "examples/multi-services.rs", "rank": 78, "score": 57824.50253136593 }, { "content": "fn h3_config() -> io::Result<ServerConfig> {\n\n let cert = fs::read(\"./cert/cert.pem\")?;\n\n let key = fs::read(\"./cert/key.pem\")?;\n\n\n\n let key = rustls_pemfile::pkcs8_private_keys(&mut &*key).unwrap().remove(0);\n\n let key = PrivateKey(key);\n\n\n\n let cert = rustls_pemfile::certs(&mut &*cert)\n\n .unwrap()\n\n .into_iter()\n\n .map(Certificate)\n\n .collect();\n\n\n\n let mut acceptor = rustls::ServerConfig::builder()\n\n .with_safe_defaults()\n\n .with_no_client_auth()\n\n .with_single_cert(cert, key)\n\n .unwrap();\n\n\n\n acceptor.alpn_protocols = vec![b\"h3-29\".to_vec(), b\"h3-28\".to_vec(), b\"h3-27\".to_vec()];\n\n\n\n Ok(ServerConfig::with_crypto(Arc::new(acceptor)))\n\n}\n", "file_path": "examples/multi-services.rs", "rank": 79, "score": 57824.50253136593 }, { "content": "fn h3_config() -> io::Result<ServerConfig> {\n\n let config = rustls_config(vec![b\"h3-29\".to_vec(), b\"h3-28\".to_vec(), b\"h3-27\".to_vec()])?;\n\n Ok(ServerConfig::with_crypto(config))\n\n}\n\n\n", "file_path": "examples/hello-world.rs", "rank": 80, "score": 57824.50253136593 }, { "content": "/// Trait implemented by types that can be extracted from request.\n\n///\n\n/// Types that implement this trait can be used with `Route` handlers.\n\npub trait FromRequest<'a, D>: Sized {\n\n /// Configuration for this extractor.\n\n type Config: Default + 'static;\n\n\n\n /// The associated error which can be returned.\n\n type Error;\n\n\n\n /// Future that resolves to a Self.\n\n type Future: Future<Output = Result<Self, Self::Error>> + 'a;\n\n\n\n /// Create a Self from request parts asynchronously.\n\n fn from_request(req: &'a WebRequest<'_, D>) -> Self::Future;\n\n}\n\n\n\nimpl<'a, D> FromRequest<'a, D> for &'a WebRequest<'a, D>\n\nwhere\n\n D: 'static,\n\n{\n\n type Config = ();\n\n type Error = ();\n", "file_path": "web/src/extract/mod.rs", "rank": 81, "score": 57668.744971923 }, { "content": "pub trait Transform<S, Req>: Clone {\n\n /// Responses produced by the service.\n\n type Response;\n\n\n\n /// Errors produced by the service.\n\n type Error;\n\n\n\n /// The `TransformService` value created by this factory\n\n type Transform: Service<Req, Response = Self::Response, Error = Self::Error>;\n\n\n\n /// Errors produced while building a transform service.\n\n type InitError;\n\n\n\n /// The future response value.\n\n type Future: Future<Output = Result<Self::Transform, Self::InitError>>;\n\n\n\n /// Creates and returns a new Transform component, asynchronously\n\n fn new_transform(&self, service: S) -> Self::Future;\n\n}\n\n\n", "file_path": "service/src/transform/mod.rs", "rank": 82, "score": 56608.44277512505 }, { "content": "/// This function defines errors that are per-connection. Which basically\n\n/// means that if we get this error from `accept()` system call it means\n\n/// next connection might be ready to be accepted.\n\n///\n\n/// All other errors will incur a timeout before next `accept()` is performed.\n\n/// The timeout is useful to handle resource exhaustion errors like ENFILE\n\n/// and EMFILE. Otherwise, could enter into tight loop.\n\nfn connection_error(e: &io::Error) -> bool {\n\n e.kind() == io::ErrorKind::ConnectionRefused\n\n || e.kind() == io::ErrorKind::ConnectionAborted\n\n || e.kind() == io::ErrorKind::ConnectionReset\n\n}\n\n\n", "file_path": "server/src/worker/mod.rs", "rank": 83, "score": 56464.21783340529 }, { "content": "fn fatal_error(e: &io::Error) -> bool {\n\n e.kind() == io::ErrorKind::BrokenPipe || e.kind() == io::ErrorKind::Other\n\n}\n", "file_path": "server/src/worker/mod.rs", "rank": 84, "score": 56460.63420533101 }, { "content": "#[doc(hidden)]\n\npub trait _ServiceObject<Req, Res, Err> {\n\n type Future: Future<Output = Result<Res, Err>>;\n\n\n\n fn clone_object(&self) -> ServiceObject<Req, Res, Err>;\n\n\n\n fn call(&self, req: Req) -> Self::Future;\n\n}\n\n\n\nimpl<S, Req> _ServiceObject<Req, S::Response, S::Error> for S\n\nwhere\n\n S: Service<Req> + Clone + 'static,\n\n Req: 'static,\n\n{\n\n type Future = BoxFuture<'static, S::Response, S::Error>;\n\n\n\n #[inline]\n\n fn clone_object(&self) -> ServiceObject<Req, S::Response, S::Error> {\n\n ServiceObject::new(self.clone())\n\n }\n\n\n\n #[inline]\n\n fn call(&self, req: Req) -> Self::Future {\n\n let this = self.clone();\n\n Box::pin(async move {\n\n this.ready().await?;\n\n Service::call(&this, req).await\n\n })\n\n }\n\n}\n", "file_path": "service/src/service/object.rs", "rank": 85, "score": 55610.670567382986 }, { "content": "fn get_host_uri(req: &Parts) -> Option<Uri> {\n\n use core::str::FromStr;\n\n req.headers\n\n .get(header::HOST)\n\n .and_then(|host_value| host_value.to_str().ok())\n\n .or_else(|| req.uri.host())\n\n .map(|host: &str| Uri::from_str(host).ok())\n\n .and_then(|host_success| host_success)\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct HostGuard(String, Option<String>);\n\n\n\nimpl HostGuard {\n\n /// Set request scheme to match\n\n pub fn scheme<H: AsRef<str>>(mut self, scheme: H) -> HostGuard {\n\n self.1 = Some(scheme.as_ref().to_string());\n\n self\n\n }\n\n}\n", "file_path": "web/src/guard.rs", "rank": 86, "score": 55463.173619158566 }, { "content": " /// A wrapper trait for an AsyncRead/AsyncWrite tokio type with additional methods.\n\n pub trait AsyncIo: AsyncRead + AsyncWrite + Unpin {\n\n type ReadyFuture<'f>: Future<Output = io::Result<Ready>>\n\n where\n\n Self: 'f;\n\n\n\n /// asynchronously wait for the IO type and\n\n fn ready(&self, interest: Interest) -> Self::ReadyFuture<'_>;\n\n\n\n fn try_read_buf<B: BufMut>(&mut self, buf: &mut B) -> io::Result<usize>;\n\n\n\n fn try_write(&mut self, buf: &[u8]) -> io::Result<usize>;\n\n\n\n fn try_write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result<usize>;\n\n }\n\n\n\n macro_rules! basic_impl {\n\n ($ty: ty) => {\n\n impl AsyncIo for $ty {\n\n type ReadyFuture<'f> = impl Future<Output = io::Result<Ready>>;\n\n\n", "file_path": "io/src/lib.rs", "rank": 87, "score": 54666.35848605693 }, { "content": "pub trait Handler<T, R>: Clone + 'static\n\nwhere\n\n R: Future,\n\n{\n\n fn call(&self, param: T) -> R;\n\n}\n\n\n\n#[doc(hidden)]\n\n/// Extract arguments from request, run handler function and make response.\n\npub struct HandlerService<State, F, T, R>\n\nwhere\n\n State: 'static,\n\n F: Handler<T, R>,\n\n R: Future,\n\n R::Output: Responder<State>,\n\n{\n\n hnd: F,\n\n _phantom: PhantomData<(State, T, R)>,\n\n}\n\n\n", "file_path": "web/src/service/handler.rs", "rank": 88, "score": 54400.7749831861 }, { "content": "/// Helper trait for convert Service::Error type to Service::Response.\n\n// TODO: Add method to modify status code.\n\npub trait ResponseError<Req, Res>: fmt::Debug {\n\n fn status_code() -> StatusCode {\n\n StatusCode::INTERNAL_SERVER_ERROR\n\n }\n\n\n\n fn response_error(&mut self, req: &mut Req) -> Res;\n\n}\n\n\n\n// implement ResponseError for common error types.\n\nimpl<Req, B> ResponseError<Req, Response<ResponseBody<B>>> for () {\n\n fn response_error(&mut self, _: &mut Req) -> Response<ResponseBody<B>> {\n\n status_only(<Self as ResponseError<Req, Response<ResponseBody<B>>>>::status_code())\n\n }\n\n}\n\n\n\nmacro_rules! internal_impl {\n\n ($ty: ty) => {\n\n impl<B, Req> ResponseError<Req, Response<ResponseBody<B>>> for $ty {\n\n fn response_error(&mut self, _: &mut Req) -> Response<ResponseBody<B>> {\n\n let mut bytes = BytesMut::new();\n", "file_path": "http/src/response.rs", "rank": 89, "score": 54400.7749831861 }, { "content": "fn parse_host(host: &str) -> (&str, Option<u16>) {\n\n let mut parts_iter = host.splitn(2, ':');\n\n\n\n match parts_iter.next() {\n\n Some(hostname) => {\n\n let port_str = parts_iter.next().unwrap_or(\"\");\n\n let port = port_str.parse::<u16>().ok();\n\n (hostname, port)\n\n }\n\n\n\n None => (host, None),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::net::{IpAddr, Ipv4Addr};\n\n\n\n use super::*;\n\n\n", "file_path": "client/src/connect.rs", "rank": 90, "score": 54261.388195637934 }, { "content": "// Get port from well-known URL schemes.\n\nfn scheme_to_port(scheme: Option<&str>) -> Option<u16> {\n\n match scheme {\n\n // HTTP\n\n Some(\"http\" | \"ws\") => Some(80),\n\n\n\n // HTTP Tls\n\n Some(\"https\" | \"wss\") => Some(443),\n\n\n\n // Advanced Message Queuing Protocol (AMQP)\n\n Some(\"amqp\") => Some(5672),\n\n Some(\"amqps\") => Some(5671),\n\n\n\n // Message Queuing Telemetry Transport (MQTT)\n\n Some(\"mqtt\") => Some(1883),\n\n Some(\"mqtts\") => Some(8883),\n\n\n\n // File Transfer Protocol (FTP)\n\n Some(\"ftp\") => Some(1883),\n\n Some(\"ftps\") => Some(990),\n\n\n", "file_path": "client/src/connect.rs", "rank": 91, "score": 54261.388195637934 }, { "content": "pub trait ServiceFactoryExt<Req>: ServiceFactory<Req> {\n\n fn map<F, Res>(self, mapper: F) -> PipelineServiceFactory<Self, F, marker::Map>\n\n where\n\n F: Fn(Result<Self::Response, Self::Error>) -> Result<Res, Self::Error> + Clone,\n\n Self: Sized,\n\n {\n\n PipelineServiceFactory::new(self, mapper)\n\n }\n\n\n\n fn map_err<F, E>(self, err: F) -> PipelineServiceFactory<Self, F, marker::MapErr>\n\n where\n\n F: Fn(Self::Error) -> E + Clone,\n\n Self: Sized,\n\n {\n\n PipelineServiceFactory::new(self, err)\n\n }\n\n\n\n fn then<F>(self, factory: F) -> PipelineServiceFactory<Self, F, marker::Then>\n\n where\n\n F: ServiceFactory<Result<Self::Response, Self::Error>>,\n", "file_path": "service/src/factory/ext.rs", "rank": 92, "score": 53771.322081979546 }, { "content": "/// A trait impl for all types that impl [AsyncRead], [AsyncWrite], [Send] and [Unpin].\n\n/// Enabling `Box<dyn Io>` trait object usage.\n\npub trait Io: AsyncRead + AsyncWrite + Send + Unpin {}\n\n\n\nimpl<S> Io for S where S: AsyncRead + AsyncWrite + Send + Unpin {}\n\n\n\n#[allow(unused_variables)]\n\nimpl<S> AsyncRead for TlsStream<S>\n\nwhere\n\n S: AsyncRead + AsyncWrite + Unpin,\n\n{\n\n fn poll_read(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>) -> Poll<io::Result<()>> {\n\n match self.get_mut() {\n\n Self::NoOp(io) => Pin::new(io).poll_read(cx, buf),\n\n Self::Boxed(io) => Pin::new(io.as_mut()).poll_read(cx, buf),\n\n #[cfg(feature = \"openssl\")]\n\n Self::Openssl(s) => Pin::new(s).poll_read(cx, buf),\n\n #[cfg(feature = \"rustls\")]\n\n Self::Rustls(s) => Pin::new(s).poll_read(cx, buf),\n\n }\n\n }\n\n}\n", "file_path": "client/src/tls/stream.rs", "rank": 93, "score": 52561.42649778266 }, { "content": "fn h1_h2_config() -> io::Result<Arc<rustls::ServerConfig>> {\n\n let config = rustls_config(vec![\"h2\".into(), \"http/1.1\".into()])?;\n\n Ok(config)\n\n}\n\n\n", "file_path": "examples/hello-world.rs", "rank": 94, "score": 52422.61417067512 }, { "content": "#[doc(hidden)]\n\npub trait _ServiceFactoryObject<Req, Res, Err, Cfg, InitErr> {\n\n type Future: Future<Output = Result<ServiceObject<Req, Res, Err>, InitErr>>;\n\n\n\n fn new_service(&self, cfg: Cfg) -> Self::Future;\n\n}\n\n\n\nimpl<F, Req> _ServiceFactoryObject<Req, F::Response, F::Error, F::Config, F::InitError> for F\n\nwhere\n\n F: ServiceFactory<Req>,\n\n F::Service: 'static,\n\n F::Future: 'static,\n\n Req: 'static,\n\n{\n\n type Future = BoxFuture<'static, ServiceObject<Req, F::Response, F::Error>, F::InitError>;\n\n\n\n fn new_service(&self, cfg: F::Config) -> Self::Future {\n\n let fut = ServiceFactory::new_service(self, cfg);\n\n Box::pin(async move {\n\n let service = fut.await?;\n\n Ok(ServiceObject::new(Rc::new(service)))\n\n })\n\n }\n\n}\n", "file_path": "service/src/factory/object.rs", "rank": 95, "score": 49830.342724293136 }, { "content": "type StateFactory<State> = Box<dyn Fn() -> LocalBoxFuture<'static, State>>;\n\n\n\npub struct App<SF = StateFactory<()>, F = ()> {\n\n state_factory: SF,\n\n pub factory: F,\n\n}\n\n\n\nimpl Default for App {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl App {\n\n pub fn new() -> App {\n\n Self {\n\n state_factory: Box::new(|| Box::pin(async {})),\n\n factory: (),\n\n }\n\n }\n", "file_path": "web/src/app/mod.rs", "rank": 96, "score": 47977.52849894324 }, { "content": "fn from_headers<E>(headers: &HeaderMap) -> Result<ContentDecoder, CoderError<E>> {\n\n let decoder = headers\n\n .get(&CONTENT_ENCODING)\n\n .and_then(|val| val.to_str().ok())\n\n .map(|encoding| match ContentEncoding::from(encoding) {\n\n ContentEncoding::Br => {\n\n #[cfg(feature = \"br\")]\n\n {\n\n Ok(_ContentDecoder::Br(super::brotli::BrotliDecoder::new(Writer::new())))\n\n }\n\n #[cfg(not(feature = \"br\"))]\n\n Err(CoderError::Feature(super::coder::Feature::Br))\n\n }\n\n ContentEncoding::Gzip => {\n\n #[cfg(feature = \"gz\")]\n\n {\n\n Ok(_ContentDecoder::Gz(super::gzip::GzDecoder::new(Writer::new())))\n\n }\n\n #[cfg(not(feature = \"gz\"))]\n\n Err(CoderError::Feature(super::coder::Feature::Gzip))\n", "file_path": "http-encoding/src/decoder.rs", "rank": 97, "score": 47977.52849894324 }, { "content": "fn find_async_method<'a>(items: &'a [ImplItem], ident_str: &'a str) -> Option<&'a ImplItemMethod> {\n\n items.iter().find_map(|item| match item {\n\n ImplItem::Method(method) if method.sig.ident.to_string().as_str() == ident_str => {\n\n assert!(method.sig.asyncness.is_some(), \"{} method must be async fn\", ident_str);\n\n Some(method)\n\n }\n\n _ => None,\n\n })\n\n}\n\n\n", "file_path": "http-codegen/src/lib.rs", "rank": 98, "score": 44211.04580430396 }, { "content": "use std::future::{ready, Future, Ready};\n\n\n\nuse xitca_http::{\n\n http::{Response, StatusCode},\n\n ResponseBody,\n\n};\n\nuse xitca_service::{Service, ServiceFactory};\n\n\n\nuse crate::response::WebResponse;\n\n\n\npub struct NotFoundService;\n\n\n\nimpl<Req> ServiceFactory<Req> for NotFoundService {\n\n type Response = WebResponse;\n\n type Error = ();\n\n type Config = ();\n\n type Service = NotFoundService;\n\n type InitError = ();\n\n type Future = impl Future<Output = Result<Self::Service, Self::InitError>>;\n\n\n", "file_path": "web/src/service/default.rs", "rank": 99, "score": 42556.66812063955 } ]
Rust
tests/test_lookup_ipv6.rs
tomhrr/owhois
94791fca1e01c7bfc5fe7f001d65fe3755213ab3
extern crate owhois; extern crate ipnet; #[cfg(test)] mod test_lookup { use ipnet::Ipv6Net; use owhois::lookup::ResourceLookup; use owhois::lookup::Ipv6ResourceLookup; use std::str::FromStr; #[test] fn ipv6_lookup_empty() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, None); } #[test] fn ipv6_lookup_single() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1) ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); } #[test] fn ipv6_lookup_multiple() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1), (Ipv6Net::from_str("::1/128").unwrap(), 2) ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); } #[test] fn ipv6_lookup_parents() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/16").unwrap(), 1), (Ipv6Net::from_str("::/32").unwrap(), 2), (Ipv6Net::from_str("::/48").unwrap(), 3), ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/48").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/32").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/16").unwrap() ); assert_eq!(value, Some(1)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/15").unwrap() ); assert_eq!(value, None); } #[test] fn ipv6_lookup_bounds() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1), (Ipv6Net::from_str("::/8").unwrap(), 2), (Ipv6Net::from_str("ff00::/8").unwrap(), 3), (Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128").unwrap(), 4), ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::1/128").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/127").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/8").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/7").unwrap() ); assert_eq!(value, None); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128").unwrap() ); assert_eq!(value, Some(4)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/128").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ff00::/8").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("fe00::/7").unwrap() ); assert_eq!(value, None); } }
extern crate owhois; extern crate ipnet; #[cfg(test)] mod test_lookup { use ipnet::Ipv6Net; use owhois::lookup::ResourceLookup; use owhois::lookup::Ipv6ResourceLookup; use std::str::FromStr; #[test] fn ipv6_lookup_empty() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, None); } #[test] fn ipv6_lookup_single() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1) ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); } #[test] fn ipv6_lookup_multiple() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1), (Ipv6Net::from_str("::1/128").unwrap(), 2) ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); } #[test] fn ipv6_lookup_parents() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/16").unwrap(), 1), (Ipv6Net::from_str("::/32").unwrap(), 2), (Ipv6Net::from_str("::/48").unwrap(), 3), ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/48").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/32").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/16").unwrap() ); assert_eq!(value, Some(1)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/15").unwrap() ); assert_eq!(value, None); } #[test] fn ipv6_lookup_bounds() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1), (Ipv6Net::from_str("::/8").unwrap(), 2), (Ipv6Net::from_str("ff00::/8").unwrap(), 3), (Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128").unwrap(), 4), ]); let value = ipv6_lookup.get_longest_
m_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128").unwrap() ); assert_eq!(value, Some(4)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/128").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ff00::/8").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("fe00::/7").unwrap() ); assert_eq!(value, None); } }
match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::1/128").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/127").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/8").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/7").unwrap() ); assert_eq!(value, None); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::fro
random
[ { "content": "fn run_processors(directory: &str,\n\n servers: &HashMap<String, u32>,\n\n processors: Vec<Box<dyn Processor>>,\n\n ipv4_path: &str,\n\n ipv6_path: &str,\n\n asn_path: &str) {\n\n let mut ipv4_entries: Vec<(Ipv4Net, u32)> = Vec::new();\n\n let mut ipv6_entries: Vec<(Ipv6Net, u32)> = Vec::new();\n\n let mut asn_entries: Vec<(AsnRange, u32)> = Vec::new();\n\n\n\n for processor in processors.iter() {\n\n let ipv4_lookup: Ipv4ResourceLookup =\n\n ResourceLookup::from_iter(ipv4_entries.clone());\n\n let ipv6_lookup: Ipv6ResourceLookup =\n\n ResourceLookup::from_iter(ipv6_entries.clone());\n\n let asn_lookup: AsnResourceLookup =\n\n ResourceLookup::from_iter(asn_entries.clone());\n\n\n\n processor.run(directory, servers, &ipv4_lookup,\n\n &ipv6_lookup, &asn_lookup,\n", "file_path": "src/data/mod.rs", "rank": 0, "score": 52320.39609119502 }, { "content": "pub fn process_public(public_data_dir: &str,\n\n ipv4_path: &str,\n\n ipv6_path: &str,\n\n asn_path: &str) {\n\n let mut servers: HashMap<String, u32> = HashMap::new();\n\n servers.insert(String::from_str(\"\").unwrap(), 0);\n\n servers.insert(String::from_str(\"whois.afrinic.net\").unwrap(), 1);\n\n servers.insert(String::from_str(\"whois.apnic.net\").unwrap(), 2);\n\n servers.insert(String::from_str(\"whois.arin.net\").unwrap(), 3);\n\n servers.insert(String::from_str(\"whois.iana.org\").unwrap(), 4);\n\n servers.insert(String::from_str(\"whois.lacnic.net\").unwrap(), 5);\n\n servers.insert(String::from_str(\"whois.ripe.net\").unwrap(), 6);\n\n run_processors(public_data_dir, &servers,\n\n vec![Box::new(Iana::new()),\n\n Box::new(Delegated::new())],\n\n ipv4_path, ipv6_path, asn_path);\n\n}\n", "file_path": "src/data/mod.rs", "rank": 1, "score": 46267.069587054444 }, { "content": "fn watch() -> notify::Result<()> {\n\n let (tx, rx) = channel();\n\n\n\n let mut watcher =\n\n PollWatcher::new(tx, Duration::from_secs(POLL_PERIOD))?;\n\n\n\n watcher.watch(\"data/ipv4\", RecursiveMode::NonRecursive).unwrap();\n\n watcher.watch(\"data/ipv6\", RecursiveMode::NonRecursive).unwrap();\n\n watcher.watch(\"data/asn\", RecursiveMode::NonRecursive).unwrap();\n\n\n\n let mut last_event_time = Instant::now();\n\n\n\n loop {\n\n match rx.recv() {\n\n Ok(DebouncedEvent::Write(_)) => {\n\n let current_event_time = Instant::now();\n\n let difference = current_event_time.sub(last_event_time);\n\n let test_duration = Duration::from_secs(RELOAD_DELAY);\n\n if difference.ge(&test_duration) {\n\n last_event_time = Instant::now();\n", "file_path": "src/server.rs", "rank": 2, "score": 33735.23543993775 }, { "content": "fn ipv4_min_addr() -> Ipv4Addr {\n\n Ipv4Addr::new(0, 0, 0, 0)\n\n}\n\n\n\npub struct Ipv4IntervalTree {\n\n interval_tree: IntervalTree<Ipv4Addr, u32>,\n\n last_values: Vec<(Range<Ipv4Addr>, u32)>,\n\n}\n\n\n\nimpl ResourceLookup<Ipv4Net, u32>\n\n for Ipv4IntervalTree {\n\n fn get_longest_match(&self, net: Ipv4Net)\n\n -> Option<(Option<Ipv4Net>, u32)> {\n\n let end = net.broadcast();\n\n let range_end = ipv4_increment(end);\n\n let tree = &self.interval_tree;\n\n let iter =\n\n match net.prefix_len() == net.max_prefix_len() {\n\n true => IntervalTree::query_point(tree, net.addr()),\n\n false => IntervalTree::query(tree, Range {\n", "file_path": "src/lookup.rs", "rank": 3, "score": 33326.46790042321 }, { "content": "fn ipv6_min_addr() -> Ipv6Addr {\n\n Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)\n\n}\n\n\n\npub struct Ipv6IntervalTree {\n\n interval_tree: IntervalTree<Ipv6Addr, u32>,\n\n last_values: Vec<(Range<Ipv6Addr>, u32)>,\n\n}\n\n\n\nimpl ResourceLookup<Ipv6Net, u32>\n\n for Ipv6IntervalTree {\n\n fn get_longest_match(&self, net: Ipv6Net)\n\n -> Option<(Option<Ipv6Net>, u32)> {\n\n let end = net.broadcast();\n\n let range_end = ipv6_increment(end);\n\n let tree = &self.interval_tree;\n\n let iter =\n\n match net.prefix_len() == net.max_prefix_len() {\n\n true => IntervalTree::query_point(tree, net.addr()),\n\n false => IntervalTree::query(tree, Range {\n", "file_path": "src/lookup.rs", "rank": 4, "score": 33326.46790042321 }, { "content": "extern crate owhois;\n\n\n\n#[cfg(test)]\n\nmod test_context {\n\n use owhois::context::Context;\n\n use std::str::FromStr;\n\n\n\n #[test]\n\n fn context() {\n\n let context =\n\n Context::from_files(\n\n \"testdata/test_lookup/ipv4_data_1\",\n\n \"testdata/test_lookup/ipv6_data_1\",\n\n \"testdata/test_lookup/asn_data_1\",\n\n );\n\n\n\n let s1: String = String::from_str(\"first-server\").unwrap();\n\n let s2: String = String::from_str(\"second-server\").unwrap();\n\n let s3: String = String::from_str(\"third-server\").unwrap();\n\n\n", "file_path": "tests/test_context.rs", "rank": 5, "score": 30713.632076136222 }, { "content": " let value = context.lookup(\"asdf\");\n\n assert_eq!(value, None);\n\n\n\n let value = context.lookup(\"1.0.0.0\");\n\n assert_eq!(value, Some(&s1));\n\n\n\n let value = context.lookup(\"2.0.0.0/16\");\n\n assert_eq!(value, Some(&s2));\n\n\n\n let value = context.lookup(\"4.0.0.0/8\");\n\n assert_eq!(value, None);\n\n\n\n let value = context.lookup(\"0002::/32\");\n\n assert_eq!(value, Some(&s2));\n\n\n\n let value = context.lookup(\"::1\");\n\n assert_eq!(value, None);\n\n\n\n let value = context.lookup(\"AS500\");\n\n assert_eq!(value, Some(&s1));\n\n\n\n let value = context.lookup(\"AS500-AS2500\");\n\n assert_eq!(value, None);\n\n\n\n let value = context.lookup(\"AS2500-AS2600\");\n\n assert_eq!(value, Some(&s3));\n\n }\n\n}\n", "file_path": "tests/test_context.rs", "rank": 6, "score": 30707.19412477472 }, { "content": "fn duration_to_ms(duration: Duration) -> u64 {\n\n let ms_secs: u64 = duration.as_secs() * 1000;\n\n let ns_secs: u64 = (duration.subsec_nanos() / 1000000) as u64;\n\n ms_secs + ns_secs\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 7, "score": 30545.301390979577 }, { "content": "fn to_u32(address: Ipv4Addr) -> u32 {\n\n let octets = address.octets();\n\n let value =\n\n (octets[0] as u32) << 24\n\n | (octets[1] as u32) << 16\n\n | (octets[2] as u32) << 8\n\n | (octets[3] as u32);\n\n\n\n return value;\n\n}\n\n\n", "file_path": "src/lookup.rs", "rank": 8, "score": 30545.301390979577 }, { "content": "fn parse_ipv6_iana_data(directory: &str,\n\n servers: &HashMap<String, u32>,\n\n entries: &mut Vec<(Ipv6Net, u32)>) {\n\n let path = format!(\"{}/iana/ipv6-unicast-address-assignments.csv\", directory);\n\n let file = File::open(path).unwrap();\n\n let mut csv_reader = csv::Reader::from_reader(file);\n\n csv_reader.records()\n\n .filter(|i| i.is_ok())\n\n .map(|i| i.unwrap())\n\n .for_each(|i| {\n\n let server = i.get(3).unwrap();\n\n let index = *(servers.get(server).unwrap());\n\n\n\n let address_str = i.get(0).unwrap();\n\n let net = Ipv6Net::from_str(address_str).unwrap();\n\n\n\n entries.push((net, index));\n\n });\n\n}\n\n\n", "file_path": "src/data/iana.rs", "rank": 9, "score": 30154.16244326537 }, { "content": "fn parse_asn32_iana_data(directory: &str,\n\n servers: &HashMap<String, u32>,\n\n entries: &mut Vec<(AsnRange, u32)>) {\n\n let path = format!(\"{}/iana/as-numbers-2.csv\", directory);\n\n let file = File::open(path).unwrap();\n\n let mut csv_reader = csv::Reader::from_reader(file);\n\n csv_reader.records()\n\n .filter(|i| i.is_ok())\n\n .map(|i| i.unwrap())\n\n .for_each(|i| {\n\n let server = i.get(2).unwrap();\n\n let index = *(servers.get(server).unwrap());\n\n\n\n let range = i.get(0).unwrap();\n\n match range.contains(\"-\") {\n\n true => {\n\n let nums: Vec<&str> = range.split(\"-\").collect();\n\n let start = u32::from_str(nums.get(0).unwrap()).unwrap();\n\n let end = u32::from_str(nums.get(1).unwrap()).unwrap();\n\n if start >= 65536 {\n", "file_path": "src/data/iana.rs", "rank": 10, "score": 30154.16244326537 }, { "content": "fn parse_asn16_iana_data(directory: &str,\n\n servers: &HashMap<String, u32>,\n\n entries: &mut Vec<(AsnRange, u32)>) {\n\n let path = format!(\"{}/iana/as-numbers-1.csv\", directory);\n\n let file = File::open(path).unwrap();\n\n let mut csv_reader = csv::Reader::from_reader(file);\n\n csv_reader.records()\n\n .filter(|i| i.is_ok())\n\n .map(|i| i.unwrap())\n\n .for_each(|i| {\n\n let server = i.get(2).unwrap();\n\n let index = *(servers.get(server).unwrap());\n\n\n\n let range = i.get(0).unwrap();\n\n match range.contains(\"-\") {\n\n true => {\n\n let nums: Vec<&str> = range.split(\"-\").collect();\n\n let start = u32::from_str(nums.get(0).unwrap()).unwrap();\n\n let end = u32::from_str(nums.get(1).unwrap()).unwrap();\n\n entries.push((AsnRange { start: Asn { value: start },\n", "file_path": "src/data/iana.rs", "rank": 11, "score": 30154.16244326537 }, { "content": "fn parse_ipv4_iana_data(directory: &str,\n\n servers: &HashMap<String, u32>,\n\n entries: &mut Vec<(Ipv4Net, u32)>) {\n\n let path = format!(\"{}/iana/ipv4-address-space.csv\", directory);\n\n let file = File::open(path).unwrap();\n\n let mut csv_reader = csv::Reader::from_reader(file);\n\n csv_reader.records()\n\n .filter(|i| i.is_ok())\n\n .map(|i| i.unwrap())\n\n .for_each(|i| {\n\n let server = i.get(3).unwrap();\n\n let index = *(servers.get(server).unwrap());\n\n\n\n let address_str = i.get(0).unwrap();\n\n let address: Vec<&str> = address_str.split('/').collect();\n\n let first_octet = u8::from_str(address.get(0).unwrap()).unwrap();\n\n let prefix_length = u32::from_str(address.get(1).unwrap()).unwrap();\n\n\n\n entries.push((Ipv4Net::new(Ipv4Addr::new(first_octet,0,0,0),\n\n prefix_length as u8).unwrap(),\n\n index));\n\n });\n\n}\n\n\n", "file_path": "src/data/iana.rs", "rank": 12, "score": 30154.16244326537 }, { "content": "fn to_u32(address: Ipv4Addr) -> u32 {\n\n let octets = address.octets();\n\n let value =\n\n (octets[0] as u32) << 24\n\n | (octets[1] as u32) << 16\n\n | (octets[2] as u32) << 8\n\n | (octets[3] as u32);\n\n\n\n return value;\n\n}\n\n\n", "file_path": "src/data/delegated.rs", "rank": 13, "score": 29696.278347702235 }, { "content": "extern crate owhois;\n\nextern crate ipnet;\n\n\n\n#[cfg(test)]\n\nmod test_lookup {\n\n use ipnet::Ipv4Net;\n\n use owhois::lookup::ResourceLookup;\n\n use owhois::lookup::Ipv4ResourceLookup;\n\n use std::str::FromStr;\n\n\n\n #[test]\n\n fn ipv4_lookup_empty() {\n\n let ipv4_lookup: Ipv4ResourceLookup =\n\n ResourceLookup::from_iter(vec![]);\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"1.0.0.0/32\").unwrap()\n\n );\n\n assert_eq!(value, None);\n\n }\n\n\n", "file_path": "tests/test_lookup_ipv4.rs", "rank": 15, "score": 29656.34748665829 }, { "content": "extern crate owhois;\n\nextern crate ipnet;\n\nextern crate treebitmap;\n\nextern crate intervaltree;\n\n\n\n#[cfg(test)]\n\nmod test_data_iana {\n\n use owhois::data::iana::Iana;\n\n use ipnet::Ipv4Net;\n\n use ipnet::Ipv6Net;\n\n use owhois::data::processor::Processor;\n\n use owhois::lookup::ResourceLookup;\n\n use owhois::lookup::Ipv4ResourceLookup;\n\n use owhois::lookup::Ipv6ResourceLookup;\n\n use owhois::lookup::AsnResourceLookup;\n\n use owhois::lookup::AsnRange;\n\n use owhois::lookup::Asn;\n\n use std::str::FromStr;\n\n use std::collections::HashMap;\n\n\n", "file_path": "tests/test_data_iana.rs", "rank": 16, "score": 29653.897762750054 }, { "content": "extern crate owhois;\n\nextern crate ipnet;\n\nextern crate treebitmap;\n\nextern crate intervaltree;\n\n\n\n#[cfg(test)]\n\nmod test_data_delegated {\n\n use owhois::data::delegated::Delegated;\n\n use ipnet::Ipv4Net;\n\n use ipnet::Ipv6Net;\n\n use owhois::data::processor::Processor;\n\n use owhois::lookup::ResourceLookup;\n\n use owhois::lookup::Ipv4ResourceLookup;\n\n use owhois::lookup::Ipv6ResourceLookup;\n\n use owhois::lookup::AsnResourceLookup;\n\n use owhois::lookup::AsnRange;\n\n use owhois::lookup::Asn;\n\n use std::str::FromStr;\n\n use std::collections::HashMap;\n\n\n", "file_path": "tests/test_data_delegated.rs", "rank": 17, "score": 29653.897762750054 }, { "content": "extern crate owhois;\n\n\n\n#[cfg(test)]\n\nmod test_lookup {\n\n use owhois::lookup::ResourceLookup;\n\n use owhois::lookup::AsnResourceLookup;\n\n use owhois::lookup::Asn;\n\n use owhois::lookup::AsnRange;\n\n\n\n #[test]\n\n fn asn_lookup_empty() {\n\n let asn_lookup: AsnResourceLookup =\n\n ResourceLookup::from_iter(vec![]);\n\n let value = asn_lookup.get_longest_match_value(\n\n AsnRange { start: Asn { value: 1 },\n\n end: Asn { value: 2 } }\n\n );\n\n assert_eq!(value, None);\n\n }\n\n\n", "file_path": "tests/test_lookup_asn.rs", "rank": 18, "score": 29653.37023991854 }, { "content": " );\n\n assert_eq!(value, Some(2));\n\n\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"2.0.0.0/9\").unwrap()\n\n );\n\n assert_eq!(value, Some(1));\n\n\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"2.0.0.0/7\").unwrap()\n\n );\n\n assert_eq!(value, None);\n\n }\n\n\n\n #[test]\n\n fn ipv4_lookup_bounds() {\n\n let ipv4_lookup: Ipv4ResourceLookup =\n\n ResourceLookup::from_iter(vec![\n\n (Ipv4Net::from_str(\"0.0.0.0/32\").unwrap(), 1),\n\n (Ipv4Net::from_str(\"0.0.0.0/8\").unwrap(), 2),\n", "file_path": "tests/test_lookup_ipv4.rs", "rank": 20, "score": 29641.88525682318 }, { "content": " #[test]\n\n fn asn_lookup_single() {\n\n let asn_lookup: AsnResourceLookup =\n\n ResourceLookup::from_iter(vec![\n\n (AsnRange { start: Asn { value: 1 },\n\n end: Asn { value: 2 } }, 1)\n\n ]);\n\n let value = asn_lookup.get_longest_match_value(\n\n AsnRange { start: Asn { value: 1 },\n\n end: Asn { value: 2 } }\n\n );\n\n assert_eq!(value, Some(1));\n\n }\n\n\n\n #[test]\n\n fn asn_lookup_multiple() {\n\n let asn_lookup: AsnResourceLookup =\n\n ResourceLookup::from_iter(vec![\n\n (AsnRange { start: Asn { value: 1 },\n\n end: Asn { value: 2 } }, 1),\n", "file_path": "tests/test_lookup_asn.rs", "rank": 21, "score": 29641.01121076522 }, { "content": " Ipv4Net::from_str(\"0.0.0.0/7\").unwrap()\n\n );\n\n assert_eq!(value, None);\n\n\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"255.255.255.255/32\").unwrap()\n\n );\n\n assert_eq!(value, Some(4));\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"255.255.255.254/32\").unwrap()\n\n );\n\n assert_eq!(value, Some(3));\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"255.255.255.254/31\").unwrap()\n\n );\n\n assert_eq!(value, Some(3));\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"255.0.0.0/8\").unwrap()\n\n );\n\n assert_eq!(value, Some(3));\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"254.0.0.0/7\").unwrap()\n\n );\n\n assert_eq!(value, None);\n\n }\n\n}\n", "file_path": "tests/test_lookup_ipv4.rs", "rank": 22, "score": 29640.869337980053 }, { "content": " #[test]\n\n fn ipv4_lookup_single() {\n\n let ipv4_lookup: Ipv4ResourceLookup =\n\n ResourceLookup::from_iter(vec![\n\n (Ipv4Net::from_str(\"1.0.0.0/32\").unwrap(), 1)\n\n ]);\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"1.0.0.0/32\").unwrap()\n\n );\n\n assert_eq!(value, Some(1));\n\n }\n\n\n\n #[test]\n\n fn ipv4_lookup_multiple() {\n\n let ipv4_lookup: Ipv4ResourceLookup =\n\n ResourceLookup::from_iter(vec![\n\n (Ipv4Net::from_str(\"1.0.0.0/32\").unwrap(), 1),\n\n (Ipv4Net::from_str(\"2.0.0.0/32\").unwrap(), 2)\n\n ]);\n\n let value = ipv4_lookup.get_longest_match_value(\n", "file_path": "tests/test_lookup_ipv4.rs", "rank": 24, "score": 29640.608561007084 }, { "content": " (AsnRange { start: Asn { value: 2 },\n\n end: Asn { value: 3 } }, 2),\n\n ]);\n\n let value = asn_lookup.get_longest_match_value(\n\n AsnRange { start: Asn { value: 1 },\n\n end: Asn { value: 2 } }\n\n );\n\n assert_eq!(value, Some(1));\n\n }\n\n\n\n #[test]\n\n fn asn_lookup_parents() {\n\n let asn_lookup: AsnResourceLookup =\n\n ResourceLookup::from_iter(vec![\n\n (AsnRange { start: Asn { value: 1 },\n\n end: Asn { value: 6 } }, 1),\n\n (AsnRange { start: Asn { value: 2 },\n\n end: Asn { value: 5 } }, 2),\n\n (AsnRange { start: Asn { value: 3 },\n\n end: Asn { value: 4 } }, 3),\n", "file_path": "tests/test_lookup_asn.rs", "rank": 26, "score": 29640.44572900311 }, { "content": " Ipv4Net::from_str(\"1.0.0.0/32\").unwrap()\n\n );\n\n assert_eq!(value, Some(1));\n\n }\n\n\n\n #[test]\n\n fn ipv4_lookup_parents() {\n\n let ipv4_lookup: Ipv4ResourceLookup =\n\n ResourceLookup::from_iter(vec![\n\n (Ipv4Net::from_str(\"2.0.0.0/8\").unwrap(), 1),\n\n (Ipv4Net::from_str(\"2.0.0.0/16\").unwrap(), 2),\n\n (Ipv4Net::from_str(\"2.0.0.0/32\").unwrap(), 3),\n\n ]);\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"2.0.0.0/32\").unwrap()\n\n );\n\n assert_eq!(value, Some(3));\n\n\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"2.0.0.0/31\").unwrap()\n", "file_path": "tests/test_lookup_ipv4.rs", "rank": 28, "score": 29640.074144677088 }, { "content": " ]);\n\n let value = asn_lookup.get_longest_match_value(\n\n AsnRange { start: Asn { value: 3 },\n\n end: Asn { value: 4 } }\n\n );\n\n assert_eq!(value, Some(3));\n\n\n\n let value = asn_lookup.get_longest_match_value(\n\n AsnRange { start: Asn { value: 3 },\n\n end: Asn { value: 5 } }\n\n );\n\n assert_eq!(value, Some(2));\n\n\n\n let value = asn_lookup.get_longest_match_value(\n\n AsnRange { start: Asn { value: 3 },\n\n end: Asn { value: 6 } }\n\n );\n\n assert_eq!(value, Some(1));\n\n }\n\n}\n", "file_path": "tests/test_lookup_asn.rs", "rank": 29, "score": 29638.985336639827 }, { "content": " (Ipv4Net::from_str(\"255.0.0.0/8\").unwrap(), 3),\n\n (Ipv4Net::from_str(\"255.255.255.255/32\").unwrap(), 4),\n\n ]);\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"0.0.0.0/32\").unwrap()\n\n );\n\n assert_eq!(value, Some(1));\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"0.0.0.1/32\").unwrap()\n\n );\n\n assert_eq!(value, Some(2));\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"0.0.0.0/31\").unwrap()\n\n );\n\n assert_eq!(value, Some(2));\n\n let value = ipv4_lookup.get_longest_match_value(\n\n Ipv4Net::from_str(\"0.0.0.0/8\").unwrap()\n\n );\n\n assert_eq!(value, Some(2));\n\n let value = ipv4_lookup.get_longest_match_value(\n", "file_path": "tests/test_lookup_ipv4.rs", "rank": 30, "score": 29638.861397145643 }, { "content": " let asn_lookup: AsnResourceLookup =\n\n ResourceLookup::from_iter(asn_entries.clone());\n\n\n\n delegated.run(\"testdata/test_data_delegated\", &servers,\n\n &ipv4_lookup, &ipv6_lookup, &asn_lookup,\n\n &mut ipv4_entries, &mut ipv6_entries, &mut asn_entries);\n\n\n\n let check = (Ipv4Net::from_str(\"41.0.0.0/11\").unwrap(), 1);\n\n assert_eq!(ipv4_entries.get(0).unwrap(), &check);\n\n\n\n let check = (Ipv6Net::from_str(\"2001:4201::/32\").unwrap(), 1);\n\n assert_eq!(ipv6_entries.get(1).unwrap(), &check);\n\n\n\n let check = (AsnRange { start: Asn { value: 1230 },\n\n end: Asn { value: 1231 } }, 1);\n\n assert_eq!(asn_entries.get(2).unwrap(), &check);\n\n }\n\n}\n", "file_path": "tests/test_data_delegated.rs", "rank": 32, "score": 29638.809783142635 }, { "content": " let asn_lookup: AsnResourceLookup =\n\n ResourceLookup::from_iter(asn_entries.clone());\n\n\n\n iana.run(\"testdata/test_data_iana\", &servers,\n\n &ipv4_lookup, &ipv6_lookup, &asn_lookup,\n\n &mut ipv4_entries, &mut ipv6_entries, &mut asn_entries);\n\n\n\n let check = (Ipv4Net::from_str(\"0.0.0.0/8\").unwrap(), 0);\n\n assert_eq!(ipv4_entries.get(0).unwrap(), &check);\n\n\n\n let check = (Ipv6Net::from_str(\"2001:0200::/23\").unwrap(), 2);\n\n assert_eq!(ipv6_entries.get(1).unwrap(), &check);\n\n\n\n let check = (AsnRange { start: Asn { value: 7 },\n\n end: Asn { value: 8 } }, 6);\n\n assert_eq!(asn_entries.get(2).unwrap(), &check);\n\n }\n\n}\n", "file_path": "tests/test_data_iana.rs", "rank": 33, "score": 29638.809783142635 }, { "content": " #[test]\n\n fn iana() {\n\n let iana = Iana::new();\n\n let mut servers: HashMap<String, u32> = HashMap::new();\n\n servers.insert(String::from_str(\"\").unwrap(), 0);\n\n servers.insert(String::from_str(\"whois.afrinic.net\").unwrap(), 1);\n\n servers.insert(String::from_str(\"whois.apnic.net\").unwrap(), 2);\n\n servers.insert(String::from_str(\"whois.arin.net\").unwrap(), 3);\n\n servers.insert(String::from_str(\"whois.iana.org\").unwrap(), 4);\n\n servers.insert(String::from_str(\"whois.lacnic.net\").unwrap(), 5);\n\n servers.insert(String::from_str(\"whois.ripe.net\").unwrap(), 6);\n\n\n\n let mut ipv4_entries: Vec<(Ipv4Net, u32)> = Vec::new();\n\n let mut ipv6_entries: Vec<(Ipv6Net, u32)> = Vec::new();\n\n let mut asn_entries: Vec<(AsnRange, u32)> = Vec::new();\n\n\n\n let ipv4_lookup: Ipv4ResourceLookup =\n\n ResourceLookup::from_iter(ipv4_entries.clone());\n\n let ipv6_lookup: Ipv6ResourceLookup =\n\n ResourceLookup::from_iter(ipv6_entries.clone());\n", "file_path": "tests/test_data_iana.rs", "rank": 34, "score": 29636.568551740987 }, { "content": " #[test]\n\n fn delegated() {\n\n let delegated = Delegated::new();\n\n let mut servers: HashMap<String, u32> = HashMap::new();\n\n servers.insert(String::from_str(\"\").unwrap(), 0);\n\n servers.insert(String::from_str(\"whois.afrinic.net\").unwrap(), 1);\n\n servers.insert(String::from_str(\"whois.apnic.net\").unwrap(), 2);\n\n servers.insert(String::from_str(\"whois.arin.net\").unwrap(), 3);\n\n servers.insert(String::from_str(\"whois.iana.org\").unwrap(), 4);\n\n servers.insert(String::from_str(\"whois.lacnic.net\").unwrap(), 5);\n\n servers.insert(String::from_str(\"whois.ripe.net\").unwrap(), 6);\n\n\n\n let mut ipv4_entries: Vec<(Ipv4Net, u32)> = Vec::new();\n\n let mut ipv6_entries: Vec<(Ipv6Net, u32)> = Vec::new();\n\n let mut asn_entries: Vec<(AsnRange, u32)> = Vec::new();\n\n\n\n let ipv4_lookup: Ipv4ResourceLookup =\n\n ResourceLookup::from_iter(ipv4_entries.clone());\n\n let ipv6_lookup: Ipv6ResourceLookup =\n\n ResourceLookup::from_iter(ipv6_entries.clone());\n", "file_path": "tests/test_data_delegated.rs", "rank": 35, "score": 29636.568551740987 }, { "content": "fn ipv4_increment(address: Ipv4Addr) -> Ipv4Addr {\n\n Ipv4Addr::from(to_u32(address).wrapping_add(1))\n\n}\n\n\n", "file_path": "src/lookup.rs", "rank": 36, "score": 28918.315859219107 }, { "content": "fn ipv6_increment(address: Ipv6Addr) -> Ipv6Addr {\n\n let as_num = Emu128::from(address);\n\n if as_num == Emu128::max_value() {\n\n Emu128::min_value().into()\n\n } else {\n\n as_num.saturating_add(Emu128::from(1)).into()\n\n }\n\n}\n\n\n", "file_path": "src/lookup.rs", "rank": 37, "score": 28918.315859219107 }, { "content": "fn ipv4_decrement(address: Ipv4Addr) -> Ipv4Addr {\n\n Ipv4Addr::from(to_u32(address).wrapping_sub(1))\n\n}\n\n\n", "file_path": "src/lookup.rs", "rank": 38, "score": 28918.315859219107 }, { "content": "fn ipv6_decrement(address: Ipv6Addr) -> Ipv6Addr {\n\n let as_num = Emu128::from(address);\n\n if as_num == Emu128::min_value() {\n\n Emu128::max_value().into()\n\n } else {\n\n as_num.saturating_sub(Emu128::from(1)).into()\n\n }\n\n}\n\n\n", "file_path": "src/lookup.rs", "rank": 39, "score": 28918.315859219107 }, { "content": "fn largest_prefix_length(address: Ipv4Addr) -> u32 {\n\n let num = to_u32(address);\n\n let mut length = 8;\n\n while num % (u32::pow(2, 32 - length)) != 0 {\n\n length = length + 1;\n\n }\n\n return length;\n\n}\n\n\n", "file_path": "src/data/delegated.rs", "rank": 40, "score": 28202.850955390975 }, { "content": "pub fn run(default_server_option: Option<String>,\n\n port_option: Option<String>) {\n\n let default_server =\n\n match default_server_option {\n\n Some(hostname) => hostname,\n\n None => \"whois.iana.org\".to_owned()\n\n };\n\n let port =\n\n match port_option {\n\n Some(port) => port,\n\n None => \"4343\".to_owned()\n\n };\n\n\n\n let mut core = Core::new().unwrap();\n\n let handle = core.handle();\n\n\n\n info!(\"Loading data\");\n\n {\n\n let _unused =\n\n CONTEXT.lock().unwrap().ipv4.get_longest_match(\n", "file_path": "src/server.rs", "rank": 41, "score": 27907.439180672234 }, { "content": "fn handle_asn<T: ResourceLookup<AsnRange, u32>>(\n\n record: &csv::StringRecord,\n\n server: u32,\n\n asn_lookup: &T,\n\n asn_entries: &mut Vec<(AsnRange, u32)>) {\n\n\n\n let asn_str = record.get(3).unwrap();\n\n let count = record.get(4).unwrap();\n\n if let Ok(start_asn) = u32::from_str(asn_str) {\n\n let end_asn = start_asn + (u32::from_str(count).unwrap());\n\n let asn_range = AsnRange { start: Asn { value: start_asn },\n\n end: Asn { value: end_asn } };\n\n match asn_lookup.get_longest_match(asn_range) {\n\n Some((_, lm_server)) => {\n\n if server != lm_server {\n\n asn_entries.push((asn_range, server));\n\n }\n\n },\n\n None => {\n\n asn_entries.push((asn_range, server));\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/data/delegated.rs", "rank": 42, "score": 26531.766670405443 }, { "content": "fn handle_ipv4<T: ResourceLookup<Ipv4Net, u32>>(\n\n record: &csv::StringRecord,\n\n server: u32,\n\n ipv4_lookup: &T,\n\n ipv4_entries: &mut Vec<(Ipv4Net, u32)>) {\n\n let address_str = record.get(3).unwrap();\n\n let size_str = record.get(4).unwrap();\n\n if let Ok(address) = Ipv4Addr::from_str(address_str) {\n\n let prefixes = to_prefixes(address, u32::from_str(size_str).unwrap());\n\n for &(prefix_address, prefix_length) in prefixes.iter() {\n\n let net = Ipv4Net::new(prefix_address, prefix_length).unwrap();\n\n match ipv4_lookup.get_longest_match(net) {\n\n Some((_, lm_server)) => {\n\n if server != lm_server {\n\n ipv4_entries.push((net, server));\n\n }\n\n },\n\n None => {\n\n ipv4_entries.push((net, server));\n\n },\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/data/delegated.rs", "rank": 43, "score": 26531.766670405443 }, { "content": "fn handle_ipv6<T: ResourceLookup<Ipv6Net, u32>>(\n\n record: &csv::StringRecord,\n\n server: u32,\n\n ipv6_lookup: &T,\n\n ipv6_entries: &mut Vec<(Ipv6Net, u32)>) {\n\n let address_str = record.get(3).unwrap();\n\n let prefix_length_str = record.get(4).unwrap();\n\n if let Ok(address) = Ipv6Addr::from_str(address_str) {\n\n let prefix_length = u8::from_str(prefix_length_str).unwrap();\n\n let net = Ipv6Net::new(address, prefix_length).unwrap();\n\n match ipv6_lookup.get_longest_match(net) {\n\n Some((_, lm_server)) => {\n\n if server != lm_server {\n\n ipv6_entries.push((net, server));\n\n }\n\n },\n\n None => {\n\n ipv6_entries.push((net, server));\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/data/delegated.rs", "rank": 44, "score": 26531.766670405443 }, { "content": "fn parse_delegated_data<T1: ResourceLookup<Ipv4Net, u32>,\n\n T2: ResourceLookup<Ipv6Net, u32>,\n\n T3: ResourceLookup<AsnRange, u32>>(\n\n ipv4_lookup: &T1, ipv6_lookup: &T2, asn_lookup: &T3,\n\n server: u32, path: &str,\n\n ipv4_entries: &mut Vec<(Ipv4Net, u32)>,\n\n ipv6_entries: &mut Vec<(Ipv6Net, u32)>,\n\n asn_entries: &mut Vec<(AsnRange, u32)>) {\n\n let file = File::open(path).unwrap();\n\n let mut csv_reader = csv::ReaderBuilder::new()\n\n .delimiter(b'|')\n\n .flexible(true)\n\n .has_headers(false)\n\n .from_reader(file);\n\n csv_reader.records()\n\n .filter(|i| i.is_ok())\n\n .map(|i| i.unwrap())\n\n .filter(|i| i.len() > 1)\n\n .for_each(|i| {\n\n let record_type = i.get(2).unwrap();\n", "file_path": "src/data/delegated.rs", "rank": 45, "score": 25920.65286638766 }, { "content": "pub mod iana;\n\npub mod delegated;\n\npub mod processor;\n\n\n\nextern crate csv;\n\nextern crate intervaltree;\n\nextern crate ipnet;\n\nextern crate treebitmap;\n\n\n\nuse super::lookup::AsnRange;\n\nuse super::lookup::AsnResourceLookup;\n\nuse super::lookup::Ipv4ResourceLookup;\n\nuse super::lookup::Ipv6ResourceLookup;\n\nuse super::lookup::ResourceLookup;\n\n\n\nuse self::delegated::Delegated;\n\nuse self::iana::Iana;\n\nuse self::ipnet::Ipv4Net;\n\nuse self::ipnet::Ipv6Net;\n\nuse self::processor::Processor;\n\n\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse std::iter::FromIterator;\n\nuse std::str::FromStr;\n\n\n", "file_path": "src/data/mod.rs", "rank": 46, "score": 25143.75799590577 }, { "content": " });\n\n\n\n let mut output_file = File::create(asn_path).unwrap();\n\n asn_entries.iter().for_each(|&(ref asn_range, index)| {\n\n /* Account for overflow in the last ASN. */\n\n let final_asn =\n\n if asn_range.end.value == 0 {\n\n 4294967295\n\n } else {\n\n asn_range.end.value - 1\n\n };\n\n let line = format!(\"{}-{},{}\\n\",\n\n asn_range.start.value,\n\n final_asn,\n\n get_reverse_server(index));\n\n output_file.write_all(line.as_bytes()).unwrap();\n\n });\n\n}\n\n\n", "file_path": "src/data/mod.rs", "rank": 47, "score": 25132.709271632903 }, { "content": " &mut ipv4_entries, &mut ipv6_entries,\n\n &mut asn_entries);\n\n }\n\n\n\n let mut reverse_servers = Vec::from_iter(servers.keys());\n\n reverse_servers.sort();\n\n let get_reverse_server = |i| {\n\n reverse_servers.get(i as usize).unwrap()\n\n };\n\n\n\n let mut output_file = File::create(ipv4_path).unwrap();\n\n ipv4_entries.iter().for_each(|&(ipv4_net, index)| {\n\n let line = format!(\"{},{}\\n\", ipv4_net, get_reverse_server(index));\n\n output_file.write_all(line.as_bytes()).unwrap();\n\n });\n\n\n\n let mut output_file = File::create(ipv6_path).unwrap();\n\n ipv6_entries.iter().for_each(|&(ipv6_net, index)| {\n\n let line = format!(\"{},{}\\n\", ipv6_net, get_reverse_server(index));\n\n output_file.write_all(line.as_bytes()).unwrap();\n", "file_path": "src/data/mod.rs", "rank": 48, "score": 25130.111229634113 }, { "content": "fn ipv4_range_size(start: Ipv4Addr, end: Ipv4Addr) -> u32 {\n\n to_u32(end).wrapping_sub(to_u32(start))\n\n}\n\n\n", "file_path": "src/lookup.rs", "rank": 49, "score": 25078.441552877513 }, { "content": "fn ipv6_range_size(start: Ipv6Addr, end: Ipv6Addr) -> Emu128 {\n\n Emu128::from(ipv6_decrement(end)).saturating_sub(Emu128::from(start))\n\n}\n\n\n", "file_path": "src/lookup.rs", "rank": 50, "score": 25078.441552877513 }, { "content": "fn to_prefixes(address: Ipv4Addr, hosts: u32) -> Vec<(Ipv4Addr, u8)> {\n\n let mut prefixes: Vec<(Ipv4Addr, u8)> = Vec::new();\n\n let mut current_address = address;\n\n let mut remaining_hosts = hosts;\n\n\n\n while remaining_hosts > 0 {\n\n let prefix_length: u32 = 32 - ((remaining_hosts as f32).log2() as u32);\n\n let new_prefix_length =\n\n max(prefix_length,\n\n largest_prefix_length(current_address));\n\n prefixes.push((current_address, new_prefix_length as u8));\n\n\n\n let new_hosts = u32::pow(2, 32 - new_prefix_length);\n\n remaining_hosts = remaining_hosts - new_hosts;\n\n\n\n current_address = Ipv4Addr::from(to_u32(current_address) + new_hosts);\n\n }\n\n\n\n return prefixes;\n\n}\n\n\n", "file_path": "src/data/delegated.rs", "rank": 51, "score": 23188.008278516827 }, { "content": "## owhois\n\n\n\n[![Build Status](https://github.com/tomhrr/owhois/workflows/build/badge.svg?branch=master)](https://github.com/tomhrr/owhois/actions)\n\n\n\nowhois is a Whois proxy server for IP address and ASN queries. It\n\nsupports the following types of queries:\n\n\n\n * single IP addresses (e.g. 192.0.2.0)\n\n * IP address prefixes (e.g. 192.0.2.0/24)\n\n * single ASNs (e.g. AS64496)\n\n * ASN ranges (e.g. AS64496-AS64511)\n\n\n\n### Build\n\n\n\n # Locally.\n\n $ cargo build --release\n\n\n\n # With Docker.\n\n $ docker build -t owhois .\n\n\n\n### Usage\n\n\n\n # Locally.\n\n $ mkdir data\n\n $ ./get-public-data\n\n $ ./target/release/process-public-data\n\n $ RUST_LOG=info ./target/release/owhois\n\n\n\n # With Docker.\n\n $ mkdir data\n\n $ docker run -it -v $(pwd)/data:/data owhois /bin/sh\n\n / # get-public-data\n\n / # process-public-data\n\n / # exit\n\n $ docker run -v $(pwd)/data:/data -p 4343:4343 -e RUST_LOG=info owhois\n\n\n\n # With Docker (Docker Hub).\n\n $ docker pull tomhrr/owhois:latest\n\n $ mkdir data\n\n $ docker run -it -v $(pwd)/data:/data tomhrr/owhois:latest /bin/sh\n\n / # get-public-data\n\n / # process-public-data\n\n / # exit\n\n $ docker run -v $(pwd)/data:/data -p 4343:4343 -e RUST_LOG=info tomhrr/owhois:latest\n\n\n\n # With Helm. By default, this will refresh the address data once\n\n # per day.\n\n $ cd chart/owhois\n\n $ helm package owhois\n\n $ helm install owhois\n\n\n\n # Example client usage.\n\n $ whois -hlocalhost -p4343 1.0.0.0/8\n\n\n", "file_path": "README.md", "rank": 52, "score": 16845.430072445022 }, { "content": "### Configuration\n\n\n\nThe mapping from IP/ASN to server is configured via CSV files with the\n\nnames `data/ipv4`, `data/ipv6`, and `data/asn`, relative to the\n\ndirectory of server execution. These files must be generated before\n\nrunning the server. Each contains IP address prefixes or ASN ranges\n\nin the first column and server names in the second column (single IP\n\naddresses and ASNs are not supported within these files). CSV headers\n\nshould not be included in these files.\n\n\n\nThe `Usage` instructions above generate CSV files based on the\n\ndelegation data published by IANA and the RIRs, mapping to their Whois\n\nservers as appropriate.\n\n\n\nThe server monitors the mapping data files for changes. If changes\n\nare detected, then the server reloads the mapping data.\n\n\n\nBy default, queries that are not handled by the server will be passed\n\nthrough to `whois.iana.org`. To change the server used for these\n\ncases, pass the `--default-server` command line option when starting\n\nthe server.\n\n\n\n### Bugs/problems/suggestions\n\n\n\nSee the [GitHub issue tracker](https://github.com/tomhrr/owhois/issues).\n\n\n\n### Licence\n\n\n\nSee LICENCE.\n", "file_path": "README.md", "rank": 53, "score": 16844.218697705313 }, { "content": "extern crate csv;\n\nextern crate intervaltree;\n\nextern crate ipnet;\n\nextern crate treebitmap;\n\nextern crate time;\n\n\n\nuse self::intervaltree::IntervalTree;\n\nuse self::ipnet::Ipv4Net;\n\nuse self::ipnet::Ipv6Net;\n\nuse self::ipnet::Emu128;\n\n\n\nuse std::iter::FromIterator;\n\nuse std::iter::Iterator;\n\nuse std::net::Ipv4Addr;\n\nuse std::net::Ipv6Addr;\n\nuse std::ops::Add;\n\nuse std::ops::Range;\n\nuse std::ops::Shr;\n\nuse std::ops::Sub;\n\n\n", "file_path": "src/lookup.rs", "rank": 54, "score": 12.694927178849873 }, { "content": "#[macro_use]\n\nextern crate log;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\npub mod context;\n\npub mod data;\n\npub mod lookup;\n\npub mod server;\n", "file_path": "src/lib.rs", "rank": 55, "score": 12.371945745261304 }, { "content": "extern crate csv;\n\nextern crate intervaltree;\n\nextern crate ipnet;\n\nextern crate rand;\n\nextern crate regex;\n\n\n\nuse super::lookup::Asn;\n\nuse super::lookup::AsnRange;\n\nuse super::lookup::AsnResourceLookup;\n\nuse super::lookup::Ipv4ResourceLookup;\n\nuse super::lookup::Ipv6ResourceLookup;\n\nuse super::lookup::ResourceLookup;\n\n\n\nuse self::ipnet::Ipv4Net;\n\nuse self::ipnet::Ipv6Net;\n\nuse self::regex::Regex;\n\n\n\nuse std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::fs::File;\n", "file_path": "src/context.rs", "rank": 56, "score": 12.10257567731786 }, { "content": "extern crate csv;\n\nextern crate intervaltree;\n\nextern crate ipnet;\n\nextern crate treebitmap;\n\n\n\nuse super::super::lookup::Asn;\n\nuse super::super::lookup::AsnRange;\n\nuse super::super::lookup::AsnResourceLookup;\n\nuse super::super::lookup::Ipv4ResourceLookup;\n\nuse super::super::lookup::Ipv6ResourceLookup;\n\nuse super::processor::Processor;\n\n\n\nuse self::ipnet::Ipv4Net;\n\nuse self::ipnet::Ipv6Net;\n\n\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::net::Ipv4Addr;\n\nuse std::str::FromStr;\n\n\n\npub struct Iana {}\n\n\n", "file_path": "src/data/iana.rs", "rank": 57, "score": 11.54263775291462 }, { "content": "extern crate futures;\n\nextern crate ipnet;\n\nextern crate notify;\n\nextern crate tokio_core;\n\nextern crate tokio_io;\n\n\n\nuse super::context::Context;\n\nuse super::lookup::ResourceLookup;\n\n\n\nuse self::futures::{Future, Stream, Poll};\n\nuse self::ipnet::Ipv4Net;\n\nuse self::notify::{PollWatcher, Watcher, RecursiveMode,\n\n DebouncedEvent};\n\nuse self::tokio_core::net::{TcpListener, TcpStream};\n\nuse self::tokio_core::reactor::Core;\n\nuse self::tokio_io::{AsyncRead, AsyncWrite};\n\nuse self::tokio_io::io::{copy, shutdown, lines, write_all};\n\n\n\nuse std::io::{self, Read, Write, BufReader};\n\nuse std::net::{Shutdown, ToSocketAddrs};\n", "file_path": "src/server.rs", "rank": 58, "score": 11.14518199288603 }, { "content": "extern crate csv;\n\nextern crate intervaltree;\n\nextern crate ipnet;\n\nextern crate treebitmap;\n\n\n\nuse super::super::lookup::Asn;\n\nuse super::super::lookup::AsnRange;\n\nuse super::super::lookup::AsnResourceLookup;\n\nuse super::super::lookup::Ipv4ResourceLookup;\n\nuse super::super::lookup::Ipv6ResourceLookup;\n\nuse super::super::lookup::ResourceLookup;\n\nuse super::processor::Processor;\n\n\n\nuse self::ipnet::Ipv4Net;\n\nuse self::ipnet::Ipv6Net;\n\n\n\nuse std::cmp::max;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::net::Ipv4Addr;\n\nuse std::net::Ipv6Addr;\n\nuse std::str::FromStr;\n\n\n\npub struct Delegated {}\n\n\n", "file_path": "src/data/delegated.rs", "rank": 59, "score": 11.11020388518768 }, { "content": "extern crate ipnet;\n\n\n\nuse super::super::lookup::AsnRange;\n\nuse super::super::lookup::AsnResourceLookup;\n\nuse super::super::lookup::Ipv4ResourceLookup;\n\nuse super::super::lookup::Ipv6ResourceLookup;\n\n\n\nuse self::ipnet::Ipv4Net;\n\nuse self::ipnet::Ipv6Net;\n\n\n\nuse std::collections::HashMap;\n\n\n", "file_path": "src/data/processor.rs", "rank": 60, "score": 10.068843710973841 }, { "content": "\n\n Context {\n\n ipv4: ResourceLookup::from_iter(ipv4_entries),\n\n ipv6: ResourceLookup::from_iter(ipv6_entries),\n\n asn: ResourceLookup::from_iter(asn_entries),\n\n servers: final_servers,\n\n }\n\n }\n\n\n\n pub fn lookup(&self, value: &str) -> Option<&String> {\n\n match self.lookup_internal(value) {\n\n Some(server_index) => {\n\n self.servers.get(server_index as usize)\n\n },\n\n None => {\n\n None\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/context.rs", "rank": 61, "score": 4.85133578894413 }, { "content": " }\n\n }\n\n\n\n fn get_longest_match_value(&self, asrange: AsnRange)\n\n -> Option<u32> {\n\n match self.get_longest_match(asrange) {\n\n Some((_, value)) => Some(value),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_iter<I: IntoIterator<Item=(AsnRange, u32)>>(values: I)\n\n -> AsnIntervalTree {\n\n let interval_tree: IntervalTree<Asn, u32> =\n\n FromIterator::from_iter(\n\n values.into_iter()\n\n .map(|(r, v)| { (Range { start: r.start, end: r.end }, v)})\n\n );\n\n AsnIntervalTree {\n\n last_values:\n", "file_path": "src/lookup.rs", "rank": 62, "score": 4.808633119147251 }, { "content": " }\n\n }\n\n\n\n fn get_longest_match_value(&self, net: Ipv6Net)\n\n -> Option<u32> {\n\n match self.get_longest_match(net) {\n\n Some((_, value)) => Some(value),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_iter<I: IntoIterator<Item=(Ipv6Net, u32)>>(values: I)\n\n -> Ipv6IntervalTree {\n\n let interval_tree: IntervalTree<Ipv6Addr, u32> =\n\n FromIterator::from_iter(\n\n values.into_iter()\n\n .map(|(r, v)| {\n\n (Range { start: r.addr(),\n\n end: ipv6_increment(r.broadcast()) }, v)\n\n })\n", "file_path": "src/lookup.rs", "rank": 63, "score": 4.725534725931132 }, { "content": " && (i.range.end.value.wrapping_sub(1)\n\n >= asrange.end.value.wrapping_sub(1)) })\n\n .map(|i| { (AsnRange { start: i.range.start,\n\n end: i.range.end }, i.value) })\n\n .collect();\n\n let mut matching_last_values =\n\n self.last_values\n\n .iter()\n\n .filter(|i| { i.0.start <= asrange.start })\n\n .map(|i| { i.clone() })\n\n .collect();\n\n response.append(&mut matching_last_values);\n\n response.sort_by(|a, b| { let a_diff = a.0.end - a.0.start;\n\n let b_diff = b.0.end - b.0.start;\n\n a_diff.cmp(&b_diff) });\n\n\n\n match response.len() >= 1 {\n\n true => { Some((Some(response.get(0).unwrap().0),\n\n response.get(0).unwrap().1)) }\n\n false => None\n", "file_path": "src/lookup.rs", "rank": 64, "score": 4.509464191349982 }, { "content": " return self.ipv6.get_longest_match_value(ipv6_net);\n\n }\n\n\n\n let asn_regex = Regex::new(r\"^(?i)AS(\\d+)$\").unwrap();\n\n if let Some(captures) = asn_regex.captures(value) {\n\n if let Ok(asn_value) = u32::from_str(captures.get(1).unwrap().as_str()) {\n\n return self.asn.get_longest_match_value(\n\n AsnRange{ start: Asn { value: asn_value as u32 },\n\n end: Asn { value: (asn_value as u32) + 1 }}\n\n );\n\n }\n\n }\n\n\n\n let asn_range_regex = Regex::new(r\"^(?i)AS(\\d+)\\s*-AS(\\d+)$\").unwrap();\n\n if let Some(captures) = asn_range_regex.captures(value) {\n\n if let Ok(asn_value_start) = u32::from_str(captures.get(1).unwrap().as_str()) {\n\n if let Ok(asn_value_end) = u32::from_str(captures.get(2).unwrap().as_str()) {\n\n return self.asn.get_longest_match_value(\n\n AsnRange { start: Asn { value: asn_value_start as u32 },\n\n end: Asn { value: asn_value_end as u32 } }\n\n );\n\n }\n\n }\n\n }\n\n\n\n return None;\n\n }\n\n}\n", "file_path": "src/context.rs", "rank": 65, "score": 4.407640605603377 }, { "content": " -> Option<u32> {\n\n match self.get_longest_match(net) {\n\n Some((_, value)) => Some(value),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_iter<I: IntoIterator<Item=(Ipv4Net, u32)>>(values: I)\n\n -> Ipv4IntervalTree {\n\n let interval_tree: IntervalTree<Ipv4Addr, u32> =\n\n FromIterator::from_iter(\n\n values.into_iter()\n\n .map(|(r, v)| {\n\n (Range { start: r.addr(),\n\n end: ipv4_increment(r.broadcast()) }, v)})\n\n );\n\n Ipv4IntervalTree {\n\n last_values:\n\n interval_tree.iter()\n\n .filter(|i| { i.range.end == ipv4_min_addr() })\n\n .map(|i| { (Range { start: i.range.start,\n\n end: i.range.end }, i.value) })\n\n .collect(),\n\n interval_tree: interval_tree,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lookup.rs", "rank": 66, "score": 4.398414678090715 }, { "content": "use std::ops::Sub;\n\nuse std::str::FromStr;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse std::sync::mpsc::channel;\n\nuse std::thread;\n\nuse std::time::{Duration, Instant};\n\n\n\nconst RELOAD_DELAY: u64 = 15;\n\nconst POLL_PERIOD: u64 = 5;\n\n\n\nlazy_static! {\n\n static ref CONTEXT: Arc<Mutex<Context>> = {\n\n Arc::new(Mutex::new(Context::from_files(\"data/ipv4\",\n\n \"data/ipv6\",\n\n \"data/asn\")))\n\n };\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 67, "score": 3.3824352842710685 }, { "content": " let b_size = ipv4_range_size(b.0.start, b.0.end);\n\n a_size.cmp(&b_size) }\n\n );\n\n\n\n match response.len() >= 1 {\n\n true => {\n\n let entry = response.get(0).unwrap();\n\n let range = &entry.0;\n\n\n\n let host_count = ipv4_range_size(range.start, range.end);\n\n let prefix_length: u32 = 32 - ((host_count as f32).log2() as u32);\n\n\n\n Some((Some(Ipv4Net::new(range.start, prefix_length as u8).unwrap()),\n\n entry.1))\n\n },\n\n false => None\n\n }\n\n }\n\n\n\n fn get_longest_match_value(&self, net: Ipv4Net)\n", "file_path": "src/lookup.rs", "rank": 68, "score": 3.1027672860975675 }, { "content": " fn add(self, other: Asn) -> Asn {\n\n Asn { value: self.value + other.value }\n\n }\n\n}\n\n\n\nimpl Sub for Asn {\n\n type Output = Asn;\n\n fn sub(self, other: Asn) -> Asn {\n\n Asn { value: self.value - other.value }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]\n\npub struct AsnRange {\n\n pub start: Asn,\n\n pub end: Asn,\n\n}\n\n\n\npub struct AsnIntervalTree {\n\n interval_tree: IntervalTree<Asn, u32>,\n", "file_path": "src/lookup.rs", "rank": 69, "score": 3.051457540678362 }, { "content": " end: Asn { value: end + 1 } },\n\n index));\n\n },\n\n false => {\n\n let num = u32::from_str(range).unwrap();\n\n entries.push((AsnRange { start: Asn { value: num },\n\n end: Asn { value: num + 1 } },\n\n index));\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/data/iana.rs", "rank": 70, "score": 2.9882767414893925 }, { "content": " entries.push((AsnRange { start: Asn { value: start },\n\n end: Asn { value: end + 1 } },\n\n index));\n\n }\n\n },\n\n false => {\n\n let num = u32::from_str(range).unwrap();\n\n if num >= 65536 {\n\n entries.push((AsnRange { start: Asn { value: num },\n\n end: Asn { value: num.wrapping_add(1) } },\n\n index));\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n\nimpl Processor for Iana {\n\n fn new() -> Iana { Iana {} }\n\n fn run(&self,\n", "file_path": "src/data/iana.rs", "rank": 71, "score": 2.895919650315514 }, { "content": " pub fn lookup_internal(&self, value: &str) -> Option<u32> {\n\n let ipv4_address_result = Ipv4Addr::from_str(value);\n\n if let Ok(ipv4_address) = ipv4_address_result {\n\n let ipv4_net = Ipv4Net::new(ipv4_address, 32).unwrap();\n\n return self.ipv4.get_longest_match_value(ipv4_net);\n\n }\n\n\n\n let ipv4_net_result = Ipv4Net::from_str(value);\n\n if let Ok(ipv4_net) = ipv4_net_result {\n\n return self.ipv4.get_longest_match_value(ipv4_net);\n\n }\n\n\n\n let ipv6_address_result = Ipv6Addr::from_str(value);\n\n if let Ok(ipv6_address) = ipv6_address_result {\n\n let ipv6_net = Ipv6Net::new(ipv6_address, 128).unwrap();\n\n return self.ipv6.get_longest_match_value(ipv6_net);\n\n }\n\n\n\n let ipv6_net_result = Ipv6Net::from_str(value);\n\n if let Ok(ipv6_net) = ipv6_net_result {\n", "file_path": "src/context.rs", "rank": 72, "score": 2.860891425213397 }, { "content": "use std::iter::FromIterator;\n\nuse std::net::Ipv4Addr;\n\nuse std::net::Ipv6Addr;\n\nuse std::str::FromStr;\n\n\n\npub struct Context {\n\n pub ipv4: Ipv4ResourceLookup,\n\n pub ipv6: Ipv6ResourceLookup,\n\n pub asn: AsnResourceLookup,\n\n pub servers: Vec<String>,\n\n}\n\n\n\nimpl Context {\n\n pub fn from_files(ipv4_file: &str,\n\n ipv6_file: &str,\n\n asn_file: &str) -> Context {\n\n let files: Vec<&str> = vec![ipv4_file,ipv6_file,asn_file];\n\n let mut servers: HashSet<String> = HashSet::new();\n\n files.iter().for_each(|s| {\n\n let file = File::open(s).unwrap();\n", "file_path": "src/context.rs", "rank": 73, "score": 2.7136776756543197 }, { "content": " start: net.addr(),\n\n end: range_end\n\n })\n\n };\n\n\n\n let mut response: Vec<(Range<Ipv6Addr>, u32)> =\n\n iter.filter(|i| { (i.range.start <= net.addr())\n\n && (ipv6_decrement(i.range.end) >= end) })\n\n .map(|i| { (Range { start: i.range.start,\n\n end: i.range.end }, i.value) })\n\n .collect();\n\n let mut matching_last_values =\n\n self.last_values\n\n .iter()\n\n .filter(|i| { i.0.start <= net.addr() })\n\n .map(|i| { i.clone() })\n\n .collect();\n\n response.append(&mut matching_last_values);\n\n response.sort_by(\n\n |a, b| { let a_size = ipv6_range_size(a.0.start, a.0.end);\n", "file_path": "src/lookup.rs", "rank": 74, "score": 2.6880541503280524 }, { "content": " start: net.addr(),\n\n end: range_end\n\n })\n\n };\n\n\n\n let mut response: Vec<(Range<Ipv4Addr>, u32)> =\n\n iter.filter(|i| { (i.range.start <= net.addr())\n\n && (ipv4_decrement(i.range.end) >= end) })\n\n .map(|i| { (Range { start: i.range.start,\n\n end: i.range.end }, i.value) })\n\n .collect();\n\n let mut matching_last_values =\n\n self.last_values\n\n .iter()\n\n .filter(|i| { i.0.start <= net.addr() })\n\n .map(|i| { i.clone() })\n\n .collect();\n\n response.append(&mut matching_last_values);\n\n response.sort_by(\n\n |a, b| { let a_size = ipv4_range_size(a.0.start, a.0.end);\n", "file_path": "src/lookup.rs", "rank": 75, "score": 2.6880541503280524 }, { "content": " );\n\n Ipv6IntervalTree {\n\n last_values:\n\n interval_tree.iter()\n\n .filter(|i| { i.range.end == ipv6_min_addr() })\n\n .map(|i| { (Range { start: i.range.start,\n\n end: i.range.end }, i.value) })\n\n .collect(),\n\n interval_tree: interval_tree,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]\n\npub struct Asn {\n\n pub value: u32\n\n}\n\n\n\nimpl Add for Asn {\n\n type Output = Asn;\n", "file_path": "src/lookup.rs", "rank": 76, "score": 2.583982061933688 }, { "content": " interval_tree.iter()\n\n .filter(|i| { i.range.end == Asn { value: 0 } })\n\n .map(|i| { (AsnRange { start: i.range.start,\n\n end: i.range.end }, i.value) })\n\n .collect(),\n\n interval_tree: interval_tree,\n\n }\n\n }\n\n}\n\n\n\npub type Ipv4ResourceLookup = Ipv4IntervalTree;\n\npub type Ipv6ResourceLookup = Ipv6IntervalTree;\n\npub type AsnResourceLookup = AsnIntervalTree;\n", "file_path": "src/lookup.rs", "rank": 77, "score": 2.311210449006398 }, { "content": " last_values: Vec<(AsnRange, u32)>,\n\n}\n\n\n\nimpl ResourceLookup<AsnRange, u32>\n\n for AsnIntervalTree {\n\n fn get_longest_match(&self, asrange: AsnRange)\n\n -> Option<(Option<AsnRange>, u32)> {\n\n let tree = &self.interval_tree;\n\n let difference = (asrange.end.value - 1) - asrange.start.value;\n\n let iter =\n\n match difference == 0 {\n\n true => IntervalTree::query_point(tree, asrange.start),\n\n false => IntervalTree::query(tree, Range {\n\n start: asrange.start,\n\n end: asrange.end\n\n })\n\n };\n\n\n\n let mut response: Vec<(AsnRange, u32)> =\n\n iter.filter(|i| { (i.range.start <= asrange.start)\n", "file_path": "src/lookup.rs", "rank": 78, "score": 2.309068197038672 }, { "content": " .filter(|i| i.is_ok())\n\n .map(|i| i.unwrap())\n\n .for_each(|i| { let range = i.get(0).unwrap();\n\n let server = i.get(1).unwrap();\n\n if server.is_empty() {\n\n return;\n\n }\n\n let els: Vec<&str> = range.split(\"-\").collect();\n\n let start = u32::from_str(els.get(0).unwrap()).unwrap();\n\n let end = u32::from_str(els.get(1).unwrap()).unwrap();\n\n asn_entries.push(\n\n (AsnRange { start: Asn { value: start },\n\n end: Asn { value: end + 1 } },\n\n *(servers.get(server).unwrap()))\n\n ); });\n\n\n\n let mut final_servers = Vec::new();\n\n for i in reverse_servers.iter() {\n\n final_servers.push(i.clone());\n\n }\n", "file_path": "src/context.rs", "rank": 79, "score": 2.057276874845342 }, { "content": " .into_future()\n\n .map_err(|e| e.0).\n\n and_then(move |(line, _)| {\n\n let mut line_data = line.unwrap();\n\n let line_data_original = line_data.clone();\n\n let server;\n\n {\n\n let inner_context = CONTEXT.lock().unwrap();\n\n let inner_server = match inner_context.lookup(&line_data) {\n\n Some(server) => {\n\n info!(\"'{}' from {} redirecting to {}\",\n\n &line_data, client_addr, server);\n\n server\n\n },\n\n None => {\n\n info!(\"'{}' from {} not handled, redirecting to {}\",\n\n &line_data, client_addr, &default_server_);\n\n &default_server_\n\n }\n\n };\n", "file_path": "src/server.rs", "rank": 80, "score": 1.655334822101791 }, { "content": " let b_size = ipv6_range_size(b.0.start, b.0.end);\n\n a_size.cmp(&b_size) }\n\n );\n\n\n\n match response.len() >= 1 {\n\n true => {\n\n let entry = response.get(0).unwrap();\n\n let range = &entry.0;\n\n\n\n let mut host_count = ipv6_range_size(range.start, range.end);\n\n let mut prefix_length = 0;\n\n while host_count != Emu128::from(0) {\n\n host_count = host_count.shr(1);\n\n prefix_length += 1;\n\n }\n\n\n\n Some((Some(Ipv6Net::new(range.start, prefix_length as u8).unwrap()),\n\n entry.1))\n\n },\n\n false => None\n", "file_path": "src/lookup.rs", "rank": 81, "score": 1.6398168663714472 } ]
Rust
src/adjacency/graph/compressed.rs
colin-daniels/agnr-ml
fc936cb8b6a68c37dfaf64c74796e0cf795c1bb8
use itertools::Itertools; use rand::prelude::*; use std::iter::FromIterator; use std::ops::Range; use super::Edge; #[derive(Debug, Clone, Eq, PartialEq)] pub struct CompressedGraph<M = ()> { edges: Vec<Edge<M>>, edge_ranges: Vec<Range<usize>>, } impl<M> Default for CompressedGraph<M> { fn default() -> Self { Self { edges: Default::default(), edge_ranges: Default::default(), } } } impl<T, M> Extend<T> for CompressedGraph<M> where T: Into<Edge<M>>, { fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { let mut new_edges: Vec<Edge<M>> = iter.into_iter().map(|e| e.into()).collect(); if new_edges.is_empty() { return; } new_edges.sort_unstable_by_key(|e| (e.from, e.to)); let old_edges = self.edges.drain(..); self.edges = Itertools::merge_by(old_edges, new_edges.into_iter(), |a, b| { (a.from, a.to).lt(&(b.from, b.to)) }) .collect(); self.update_edge_ranges(); } } impl<T, M> FromIterator<T> for CompressedGraph<M> where T: Into<Edge<M>>, { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { Self::from_edges(iter) } } impl<M> CompressedGraph<M> { pub fn new<I, T>(n_vertices: usize, edges: I) -> Self where I: IntoIterator<Item = T>, Self: Extend<T>, { let mut ret = Self::from_edges(edges); ret.resize(n_vertices); ret } pub fn from_edges<I, T>(iter: I) -> Self where I: IntoIterator<Item = T>, Self: Extend<T>, { let mut ret = Self::default(); ret.extend(iter); ret } #[inline(always)] pub fn n_vertices(&self) -> usize { self.edge_ranges.len() } #[inline(always)] pub fn n_edges(&self) -> usize { self.edges.len() } pub fn clear(&mut self) { self.resize(0); } pub fn resize(&mut self, new_n_vertices: usize) { let n_vertices = self.n_vertices(); if n_vertices == 0 { self.edges.clear(); self.edge_ranges.clear(); } else if new_n_vertices < n_vertices { self.edge_ranges .resize_with(new_n_vertices, || unreachable!("should be shrinking")); if let Some(last) = self.edge_ranges.last() { let new_n_edges = last.end; self.edges .resize_with(new_n_edges, || unreachable!("should be shrinking edges")); } else { unreachable!("handled zero case earlier, should be unreachable") } } else { let n_edges = self.n_edges(); self.edge_ranges .resize_with(new_n_vertices, || n_edges..n_edges); } } #[inline(always)] pub fn vertices(&self) -> impl Iterator<Item = usize> { 0..self.n_vertices() } #[inline] pub fn edges(&self) -> &[Edge<M>] { &self.edges } #[inline(always)] pub fn edges_at(&self, vertex: usize) -> &[Edge<M>] { &self.edges[self.edge_ranges[vertex].clone()] } #[inline(always)] pub fn neighbors<'a>(&'a self, vertex: usize) -> impl Iterator<Item = usize> + 'a { self.neighbors_meta(vertex).map(|(to, _)| to) } #[inline(always)] pub fn neighbors_meta<'a>(&'a self, vertex: usize) -> impl Iterator<Item = (usize, &M)> + 'a { self.edges_at(vertex).iter().map(|e| (e.to, &e.meta)) } #[inline] pub fn random_walk<'a, R: Rng + 'a>( &'a self, start: usize, mut rng: R, ) -> impl Iterator<Item = usize> + 'a { std::iter::successors(Some(start), move |&last| { let range = self.edge_ranges[last].clone(); self.edges[range].choose(&mut rng).map(|e| e.to) }) } pub fn maximum_matching(&self) -> Option<()> { let (_left, _right) = self.bipartite_coloring()?; todo!() } pub fn bipartite_coloring(&self) -> Option<(Vec<usize>, Vec<usize>)> { let (start, _) = self .edge_ranges .iter() .find_position(|&r| r.end > r.start)?; #[derive(Copy, Clone, Eq, PartialEq)] enum Color { Red, Blue, None, } let mut colors = vec![Color::None; self.n_vertices()]; let mut to_visit = vec![self.edge_ranges[start].clone()]; colors[start] = Color::Red; while let Some(edge_ids) = to_visit.last_mut() { match edge_ids.next().map(|id| &self.edges[id]) { Some(edge) => { let next_color = match colors[edge.from] { Color::Red => Color::Blue, Color::Blue => Color::Red, Color::None => unreachable!("error"), }; if colors[edge.to] == Color::None { colors[edge.to] = next_color; to_visit.push(self.edge_ranges[edge.to].clone()); } else if colors[edge.to] != next_color { dbg!("wrong color, not bipartite"); return None; } } None => { to_visit.pop(); } } } let mut red = Vec::with_capacity(colors.len()); let mut blue = Vec::with_capacity(colors.len()); for (i, color) in colors.into_iter().enumerate() { match color { Color::Red => red.push(i), Color::Blue => blue.push(i), Color::None => { if !self.edges_at(i).is_empty() { dbg!("couldn't reach all vertices (multiple connected components)"); return None; } } } } Some((red, blue)) } #[doc(hidden)] fn update_edge_ranges(&mut self) { let old_num_vertices = self.n_vertices(); let num_vertices_from_edges = self.edges.last().map(|e| e.from + 1).unwrap_or(0); let mut n_vertices = usize::max(old_num_vertices, num_vertices_from_edges); let mut offsets = Vec::with_capacity(n_vertices + 1); for (i, &Edge { from, to, .. }) in self.edges.iter().enumerate() { n_vertices = usize::max(n_vertices, to + 1); if offsets.len() < from + 1 { offsets.resize(from + 1, i); } } offsets.resize(n_vertices + 1, self.edges.len()); self.edge_ranges = offsets .into_iter() .tuple_windows() .map(|(a, b)| a..b) .collect(); } }
use itertools::Itertools; use rand::prelude::*; use std::iter::FromIterator; use std::ops::Range; use super::Edge; #[derive(Debug, Clone, Eq, PartialEq)] pub struct CompressedGraph<M = ()> { edges: Vec<Edge<M>>, edge_ranges: Vec<Range<usize>>, } impl<M> Default for CompressedGraph<M> { fn default() -> Self { Self { edges: Default::default(), edge_ranges: Default::default(), } } } impl<T, M> Extend<T> for CompressedGraph<M> where T: Into<Edge<M>>, { fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { let mut new_edges: Vec<Edge<M>> = iter.into_iter().map(|e| e.into()).collect(); if new_edges.is_empty() { return; } new_edges.sort_unstable_by_key(|e| (e.from, e.to)); let old_edges = self.edges.drain(..); self.edges = Itertools::merge_by(old_edges, new_edges.into_iter(), |a, b| { (a.from, a.to).lt(&(b.from, b.to)) }) .collect(); self.update_edge_ranges(); } } impl<T, M> FromIterator<T> for CompressedGraph<M> where T: Into<Edge<M>>, { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { Self::from_edges(iter) } } impl<M> CompressedGraph<M> { pub fn new<I, T>(n_vertices: usize, edges: I) -> Self where I: IntoIterator<Item = T>, Self: Extend<T>, { let mut ret = Self::from_edges(edges); ret.resize(n_vertices); ret } pub fn from_edges<I, T>(iter: I) -> Self where I: IntoIterator<Item = T>, Self: Extend<T>, { let mut ret = Self::default(); ret.extend(iter); ret } #[inline(always)] pub fn n_vertices(&self) -> usize { self.edge_ranges.len() } #[inline(always)] pub fn n_edges(&self) -> usize { self.edges.len() } pub fn clear(&mut self) { self.resize(0); } pub fn resize(&mut self, new_n_vertices: usize) { let n_vertices = self.n_vertices(); if n_vertices == 0 { self.edges.clear(); self.edge_ranges.clear(); } else if new_n_vertices < n_vertices { self.edge_ranges .resize_with(new_n_vertices, || unreachable!("should be shrinking")); if let Some(last) = self.edge_ranges.last() { let new_n_edges = last.end; self.edges .resize_with(new_n_edges, || unreachable!("should be shrinking edges")); } else { unreachable!("handled zero case earlier, should be unreachable") } } else { let n_edges = self.n_edges(); self.edge_ranges .resize_with(new_n_vertices, || n_edges..n_edges); } } #[inline(always)] pub fn vertices(&self) -> impl Iterator<Item = usize> { 0..self.n_vertices() } #[inline] pub fn edges(&self) -> &[Edge<M>] { &self.edges } #[inline(always)] pub fn edges_at(&self, vertex: usize) -> &[Edge<M>] { &self.edges[self.edge_ranges[vertex].clone()] } #[inline(always)] pub fn neighbors<'a>(&'a self, vertex: usize) -> impl Iterator<Item = usize> + 'a { self.neighbors_meta(vertex).map(|(to, _)| to) } #[inline(always)] pub fn neighbors_meta<'a>(&'a self, vertex: usize) -> impl Iterator<Item = (usize, &M)> + 'a { self.edges_at(vertex).iter().map(|e| (e.to, &e.meta)) } #[inline] pub fn random_walk<'a, R: Rng + 'a>( &'a self, start: usize, mut rng: R, ) -> impl Iterator<Item = usize> + 'a { std::iter::successors(Some(start), move |&last| { let range = self.edge_ranges[last].clone(); self.edges[range].choose(&mut rng).map(|e| e.to) }) } pub fn maximum_matching(&self) -> Option<()> { let (_left, _right) = self.bipartite_coloring()?; todo!() } pub fn bipartite_coloring(&self) -> Option<(Vec<usize>, Vec<usize>)> { let (start, _) = self .edge_ranges .iter() .find_position(|&r| r.end > r.start)?; #[derive(Copy, Clone, Eq, PartialEq)] enum Color { Red, Blue, None, } let mut colors = vec![Color::None; self.n_vertices()]; let mut to_visit = vec![self.edge_ranges[start].clone()]; colors[start] = Color::Red; while let Some(edge_ids) = to_visit.last_mut() { match edge_ids.next().map(|id| &self.edges[id]) { Some(edge) => { let next_color = match colors[edge.from] { Color::Red => Color::Blue, Color::Blue => Color::Red, Color::None => unreachable!("error"), }; if colors[edge.to] == Color::None { colors[edge.to] = next_color; to_visit.push(self.edge_ranges[edge.to].clone()); } else if colors[edge.to] != next_color { dbg!("wrong color, not bipartite"); return None; } } None => { to_visit.pop(); } } } let mut red = Vec::with_capacity(colors.len()); let mut blue = Vec::with_capacity(colors.len()); for (i, color) in colors.into_iter().enumerate() { match color { Color::Red => red.push(i), Color::Blue => blue.push(i), Color::None => { if !self.edges_at(i).is_empty() { dbg!("couldn't reach all vertices (multiple connected components)"); return None; } } } } Some((red, blue)) } #[doc(hidden)] fn update_edge_ranges(&mut self) { let old_num_vertices = self.n_vertices(); let num_vertices_from_edges = self.edges.last().map(|e| e.from + 1).unwrap_or(0); let mut n_vertices = usize::max(old_num_vertices, num_vertices_from_edges); let mut offsets = Vec::with_capacity(n_vertices + 1); for (i, &Edge { from, to, .. }) in self.edges.iter().enumerate() { n_vertices = usize::max(n_vertices, to + 1);
} offsets.resize(n_vertices + 1, self.edges.len()); self.edge_ranges = offsets .into_iter() .tuple_windows() .map(|(a, b)| a..b) .collect(); } }
if offsets.len() < from + 1 { offsets.resize(from + 1, i); }
if_condition
[ { "content": "#[inline]\n\npub fn iota<I: Idx>(start: I) -> std::iter::Map<RangeFrom<usize>, impl Fn(usize) -> I> {\n\n (start.index()..).map(I::new)\n\n}\n\n\n\n// NOTE: I don't want this to take arbitrary RangeBounds because it would either have\n\n// to use dynamic polymorphism, or panic on RangeFrom.\n", "file_path": "rsp2/src/util/newtype-indices/indexed_vec.rs", "rank": 0, "score": 379569.1226557903 }, { "content": "#[inline]\n\npub fn range<I: Idx>(range: Range<I>) -> std::iter::Map<Range<usize>, impl Fn(usize) -> I> {\n\n (range.start.index()..range.end.index()).map(I::new)\n\n}\n\n\n\n/// # Construction\n\nimpl<I: Idx, T> IndexVec<I, T> {\n\n #[inline]\n\n pub fn from_raw(raw: Vec<T>) -> Self {\n\n IndexVec { raw, _marker: PhantomData }\n\n }\n\n\n\n #[inline]\n\n pub fn new() -> Self {\n\n IndexVec::from_raw(Vec::new())\n\n }\n\n\n\n #[inline]\n\n pub fn from_elem<S>(elem: T, universe: &Indexed<I, [S]>) -> Self\n\n where T: Clone,\n\n {\n", "file_path": "rsp2/src/util/newtype-indices/indexed_vec.rs", "rank": 1, "score": 343138.5700278904 }, { "content": "/// Helper to call `MPI_ABORT` if a panic occurs inside the continuation,\n\n/// *after* allowing the panic implementation to unwind back out.\n\n///\n\n/// This will be completely ineffective if the panic implementation does not unwind.\n\npub fn with_mpi_abort_on_unwind<R>(func: impl std::panic::UnwindSafe + FnOnce() -> R) -> R {\n\n use crate::mpi::{AsCommunicator, Communicator};\n\n\n\n with_default_root(|root| {\n\n let res = std::panic::catch_unwind(func);\n\n match res {\n\n Ok(r) => return r,\n\n Err(_payload) => {\n\n // we won't need to worry about printing a message, under the assumption\n\n // that the panic hook already did so before beginning to unwind.\n\n root.as_communicator().abort(1);\n\n },\n\n }\n\n })\n\n}\n\n\n", "file_path": "rsp2/src/io/lammps/low_level/mpi_helper.rs", "rank": 2, "score": 274464.7244534214 }, { "content": "// Provides the default `mpi::Root`.\n\n//\n\n// This exists because I had to give up on making the final product generic over Communicators.\n\n//\n\n// It is returned continuation-style because it is impossible to construct one that is `'static`.\n\nfn with_default_root<R>(continuation: impl FnOnce(mpi::Process<'_, mpi::SystemCommunicator>) -> R) -> R {\n\n use crate::mpi::Communicator;\n\n\n\n let world = mpi::SystemCommunicator::world();\n\n let root = world.process_at_rank(0);\n\n continuation(root)\n\n}\n\n\n\nimpl<D: DispatchMultiProcess> MpiOnDemandInner<D> {\n\n fn root_dispatch(&self, root: &impl mpi::Root, arg: D::Input) -> D::Output {\n\n assert!(this_process_is_root(root));\n\n assert_eq!(Broadcast::broadcast(root, Some(EventType::Dispatch)), EventType::Dispatch);\n\n\n\n let arg = Broadcast::broadcast(root, Some(arg));\n\n self.dispatch.dispatch(root, arg)\n\n }\n\n\n\n fn non_root_dispatch(&self, root: &impl mpi::Root) {\n\n assert!(!this_process_is_root(root));\n\n\n", "file_path": "rsp2/src/io/lammps/low_level/mpi_helper.rs", "rank": 3, "score": 273842.9325465836 }, { "content": "pub fn sparse_grad_from_bond_grad(bond_grads: impl IntoIterator<Item=BondGrad>) -> BTreeMap<usize, V3> {\n\n let mut map = BTreeMap::new();\n\n for item in bond_grads {\n\n *map.entry(item.plus_site).or_insert_with(V3::zero) += item.grad;\n\n *map.entry(item.minus_site).or_insert_with(V3::zero) -= item.grad;\n\n }\n\n map\n\n}\n", "file_path": "rsp2/src/tasks/potential/helper.rs", "rank": 4, "score": 270728.7345005398 }, { "content": "pub fn partial_max<T: PartialOrd>(it: impl IntoIterator<Item=T>) -> Option<T> {\n\n let mut it = it.into_iter();\n\n let first = it.next()?;\n\n Some(it.fold(first, |acc, b| {\n\n if acc < b {\n\n b\n\n } else {\n\n acc\n\n }\n\n }))\n\n}\n", "file_path": "rsp2/src/util/integration-test/util.rs", "rank": 5, "score": 269376.79133925494 }, { "content": "pub fn load(mut file: impl Read) -> FailResult<Assemble>\n\n{ _load(&mut file) }\n\n\n", "file_path": "rsp2/src/io/structure/layers_yaml.rs", "rank": 6, "score": 256436.324024386 }, { "content": "// FIXME this really doesn't belong here, but it's the easiest reuse of code\n\npub fn load_layer_sc_info(mut file: impl Read) -> FailResult<Vec<(M33<i32>, [u32; 3], usize)>>\n\n{ _load_layer_sc_info(&mut file) }\n\n\n", "file_path": "rsp2/src/io/structure/layers_yaml.rs", "rank": 7, "score": 248137.057951941 }, { "content": "pub fn zip_eq<As, Bs>(a: As, b: Bs) -> std::iter::Zip<As::IntoIter, Bs::IntoIter>\n\nwhere\n\n As: IntoIterator, As::IntoIter: ExactSizeIterator,\n\n Bs: IntoIterator, Bs::IntoIter: ExactSizeIterator,\n\n{\n\n let (a, b) = (a.into_iter(), b.into_iter());\n\n assert_eq!(a.len(), b.len());\n\n a.zip(b)\n\n}\n\n\n\n// do something only the first time the macro is encountered\n\n#[macro_export]\n\nmacro_rules! once {\n\n ($($tok:tt)*) => {{\n\n use std::sync::{Once};\n\n static ONCE: Once = Once::new();\n\n ONCE.call_once(|| { $($tok)* });\n\n }};\n\n}\n\n\n", "file_path": "rsp2/src/util/macros/lib.rs", "rank": 8, "score": 245326.4625430748 }, { "content": "pub fn zip_eq<As, Bs>(a: As, b: Bs) -> std::iter::Zip<As::IntoIter, Bs::IntoIter>\n\nwhere\n\n As: IntoIterator, As::IntoIter: ExactSizeIterator,\n\n Bs: IntoIterator, Bs::IntoIter: ExactSizeIterator,\n\n{\n\n let (a, b) = (a.into_iter(), b.into_iter());\n\n assert_eq!(a.len(), b.len());\n\n a.zip(b)\n\n}\n\n\n\nmacro_rules! impl_newtype_debug {\n\n ($Type:ident) => {\n\n impl std::fmt::Debug for $Type {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n std::fmt::Debug::fmt(&self.0, f)\n\n }\n\n }\n\n };\n\n}\n\n\n", "file_path": "rsp2/src/util/integration-test/util.rs", "rank": 9, "score": 242361.27719995967 }, { "content": "fn load_frame_or_eof(r: &mut Lines<&mut dyn BufRead>) -> FailResult<Option<Xyz>>\n\n{\n\n let count = match r.next() {\n\n None => return Ok(None), // eof\n\n Some(line) => line?.trim().parse::<usize>()?,\n\n };\n\n let title = r.next().ok_or_else(|| format_err!(\"unexpected EOF!\"))??;\n\n\n\n let mut elements = Vec::with_capacity(count);\n\n let mut carts = Vec::with_capacity(count);\n\n for read_so_far in 0..count {\n\n let line = r.next().ok_or_else(|| {\n\n format_err!(\"unexpected EOF! (expected {} atoms, found {})\", count, read_so_far)\n\n })??;\n\n\n\n let mut words = line.split_whitespace();\n\n elements.push(Element::from_symbol(words.next().ok_or_else(|| {\n\n format_err!(\"unexpected empty line when reading atom from XYZ file\")\n\n })?)?);\n\n\n", "file_path": "rsp2/src/io/structure/xyz.rs", "rank": 10, "score": 226261.44529549888 }, { "content": "/// Make an empty summary.\n\npub fn no_summary() -> Value { Value::Mapping(Default::default()) }\n\n\n", "file_path": "rsp2/src/tasks/ui/cfg_merging.rs", "rank": 11, "score": 226097.55239668227 }, { "content": "// Color range used by most columns that contain probabilities in [0, 1]\n\nfn default_prob_color_range() -> ColorByRange<f64> {\n\n use ansi_term::Colour::*;\n\n ColorByRange::new(vec![\n\n (0.999, Cyan.bold()),\n\n (0.9, Cyan.normal()),\n\n (0.1, Yellow.normal()),\n\n (1e-4, Red.bold()),\n\n (1e-10, Red.normal()),\n\n ], Black.normal()) // make zeros \"disappear\"\n\n}\n\n\n\n/// Simple Display impl for probabilities (i.e. from 0 to 1).\n\n///\n\n/// Shows a float at dynamically-chosen fixed precision.\n\n#[derive(Debug, Copy, Clone)]\n\npub struct FixedProb(f64, usize);\n\nimpl fmt::Display for FixedProb {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result\n\n { write!(f, \"{:width$.prec$}\", self.0, prec = self.1, width = self.1 + 2) }\n\n}\n", "file_path": "rsp2/src/tasks/cmd/ev_analyses.rs", "rank": 12, "score": 222294.5171848607 }, { "content": "pub fn dot<A, B>(a: &A, b: &B) -> <A as ndarray::linalg::Dot<B>>::Output\n\nwhere A: ndarray::linalg::Dot<B>\n\n{ a.dot(b) }\n\n\n\npub use self::c_matrix::CMatrix;\n\nmod c_matrix {\n\n use super::*;\n\n use slice_of_array::IsSliceomorphic;\n\n use slice_of_array::prelude::*;\n\n\n\n /// Owned, contiguous, C-order matrix data.\n\n ///\n\n /// Convenient for interfacing with LAPACKe, which tends to assume that\n\n /// one of the strides is equal to 1.\n\n #[derive(Debug, Clone)]\n\n pub struct CMatrix<A = f64>(\n\n // invariant: .strides[1] == 1\n\n // invariant: .strides[0] == .cols()\n\n // invariant: .len() == product of dims\n\n Array2<A>\n", "file_path": "rsp2/src/linalg/lib.rs", "rank": 13, "score": 217620.44005236356 }, { "content": "#[allow(unused)]\n\npub fn ensure_only_carts(coords: &mut Coords) {\n\n // The signature of `carts_mut()` guarantees that it drops all fractional data;\n\n // it could not be correct otherwise.\n\n let _ = coords.carts_mut();\n\n}\n\n\n", "file_path": "rsp2/src/tasks/potential/mod.rs", "rank": 14, "score": 217382.5190108313 }, { "content": "/// Get a map of each value to all of its indices.\n\nfn get_lookup_table<T: Hash + Eq + Clone>(slice: &[T]) -> HashMap<T, Vec<usize>> {\n\n let mut map = HashMap::new();\n\n for (index, key) in slice.iter().enumerate() {\n\n map.entry(key.clone()).or_insert_with(Vec::new).push(index);\n\n }\n\n map\n\n}\n\n//=================================================================\n\n\n", "file_path": "rsp2/src/structure/algo/bonds.rs", "rank": 15, "score": 208575.21233765458 }, { "content": "// This initializes MPI so it must be done at the very beginning.\n\n//\n\n// The closure runs on only one process.\n\nfn wrap_main_with_lammps_on_demand(continuation: impl UnwindSafe + FnOnce(Option<LammpsOnDemand>)) -> ! {\n\n #[cfg(feature = \"mpi\")] {\n\n let required = mpi::Threading::Serialized;\n\n let (_universe, actual) = {\n\n mpi::initialize_with_threading(required).expect(\"Could not initialize MPI!\")\n\n };\n\n\n\n // 'actual >= required' would be nicer, but I don't think MPI specifies comparison ordering\n\n assert_eq!(actual, required);\n\n\n\n LammpsOnDemand::with_mpi_abort_on_unwind(|| {\n\n LammpsOnDemand::install(|on_demand| continuation(Some(on_demand)));\n\n });\n\n\n\n // NOTE: drop of _universe here issues MPI_Finalize\n\n }\n\n #[cfg(not(feature = \"mpi\"))] {\n\n continuation(None);\n\n }\n\n exit(0)\n\n}\n\n\n", "file_path": "rsp2/src/tasks/entry_points.rs", "rank": 16, "score": 205408.44167820277 }, { "content": "fn fix_version(it: &mut Option<u32>) -> Result<(), Error> {\n\n match *it {\n\n Some(x) if x == 0 || x > MAX_VERSION => {\n\n bail!(\"`version: {}` is invalid. (1 <= version <= {})\", x, MAX_VERSION);\n\n },\n\n None => {\n\n warn!(\"\\\n\n Settings file has no `version` field! Assuming `version: 1`. \\\n\n (the latest is version {})\\\n\n \", MAX_VERSION);\n\n *it = Some(1);\n\n },\n\n _ => {},\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rsp2/src/tasks/config/validation.rs", "rank": 17, "score": 204975.65233472374 }, { "content": "// hack for type inference issues\n\npub fn paint<T>(\n\n style: ansi_term::Style,\n\n value: T,\n\n) -> Wrapper<T, T>\n\n{ gpaint(style, value) }\n\n\n", "file_path": "rsp2/src/tasks/ui/color.rs", "rank": 18, "score": 204910.44340781064 }, { "content": "/// Wrapper around `std::fs::copy` that adds context.\n\npub fn copy(src: impl AsRef<Path>, dest: impl AsRef<Path>) -> FailResult<()>\n\n{\n\n let (src, dest) = (src.as_ref(), dest.as_ref());\n\n fs::copy(src, dest)\n\n .map(|_| ()) // number of bytes; don't care\n\n .with_context(|e|\n\n format!(\"could not copy file '{}' to '{}': {}\",\n\n src.display(), dest.display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 19, "score": 200571.9543446931 }, { "content": "/// Wrapper around `std::fs::write` that adds context.\n\npub fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> FailResult<()>\n\n{\n\n std::fs::write(path.as_ref(), contents)\n\n .with_context(|e| format!(\"{}: could not write file: {}\", path.as_ref().display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 20, "score": 200571.9543446931 }, { "content": "// Error-chaining wrapper around `hard_link`\n\npub fn hard_link(src: impl AsRef<Path>, dest: impl AsRef<Path>) -> FailResult<()>\n\n{\n\n let (src, dest) = (src.as_ref(), dest.as_ref());\n\n fs::hard_link(src, dest)\n\n .with_context(|e|\n\n format!(\"could not hard-link '{}' to '{}': {}\",\n\n src.display(), dest.display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 21, "score": 198375.88198153366 }, { "content": "/// Wrapper around `std::fs::remove_file` that adds context.\n\npub fn remove_file(dir: impl AsRef<Path>) -> FailResult<()>\n\n{\n\n fs::remove_file(dir.as_ref())\n\n .with_context(|e| format!(\"{}: could not remove file: {}\", dir.as_ref().display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 22, "score": 198269.93789615505 }, { "content": "/// Simulates `rm -rf`.\n\n///\n\n/// Properties:\n\n/// * Deletes files and folders alike.\n\n/// * Does not require the path or its ancestors to exist.\n\n/// * **Does** fail if other problems occur (e.g. insufficient permissions).\n\n/// * Does **not** follow symbolic links.\n\npub fn rm_rf(path: impl AsRef<Path>) -> FailResult<()>\n\n{\n\n use std::io::ErrorKind;\n\n\n\n let path = path.as_ref();\n\n\n\n // directoryness is only checked *after* failed deletion, to reduce race conditions\n\n match fs::remove_file(path) {\n\n Ok(()) => { return Ok(()); },\n\n Err(e) => {\n\n match (e.kind(), path.is_dir()) {\n\n (ErrorKind::NotFound, _) => { return Ok(()); },\n\n (ErrorKind::Other, true) => {},\n\n _ => bail!(\"{}: could not delete: {}\", path.display(), e),\n\n }\n\n }\n\n }\n\n\n\n match fs::remove_dir_all(path) {\n\n Ok(()) => { return Ok(()); },\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 23, "score": 198269.93789615505 }, { "content": "/// Wrapper around `std::fs::remove_file` that adds context.\n\npub fn remove_dir(dir: impl AsRef<Path>) -> FailResult<()>\n\n{\n\n fs::remove_dir(dir.as_ref())\n\n .with_context(|e| format!(\"{}: could not remove directory: {}\", dir.as_ref().display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 24, "score": 198269.93789615505 }, { "content": "/// Wrapper around `std::fs::create_dir` that adds context.\n\npub fn create_dir(dir: impl AsRef<Path>) -> FailResult<()>\n\n{\n\n fs::create_dir(dir.as_ref())\n\n .with_context(|e| format!(\"{}: could not create directory: {}\", dir.as_ref().display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 25, "score": 198269.93789615505 }, { "content": "pub fn gpaint<U, T>(\n\n style: ansi_term::Style,\n\n value: U,\n\n) -> Wrapper<U, T>\n\n{ Wrapper { style, value, _target: Default::default() } }\n\n\n\n/// A wrapper for colorizing all formatting traits like `Display`.\n\n///\n\n/// It has two parameters so that it can `borrow()` `U` as `T` when it wants to.\n\n/// (otherwise, it would have to store `&T`, making it virtually impossible to\n\n/// return one of these from a function)\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct Wrapper<U, T=U> {\n\n style: ansi_term::Style,\n\n value: U,\n\n _target: std::marker::PhantomData<T>,\n\n}\n\n\n\nmacro_rules! derive_fmt_impl {\n\n ($Trait:path)\n", "file_path": "rsp2/src/tasks/ui/color.rs", "rank": 26, "score": 196641.88222456299 }, { "content": "fn non_empty_env(key: impl AsRef<OsStr>) -> Option<OsString> {\n\n match std::env::var_os(key) {\n\n None => None,\n\n Some(s) => match s.is_empty() {\n\n true => None,\n\n false => Some(s),\n\n }\n\n }\n\n}\n", "file_path": "rsp2/src/util/fs/tempdir.rs", "rank": 27, "score": 194442.32005407946 }, { "content": "/// Wrapper around `File::create` that adds context.\n\npub fn create(path: impl AsRef<Path>) -> FailResult<File>\n\n{\n\n File::create(path.as_ref())\n\n .with_context(|e| format!(\"{}: could not create file: {}\", path.as_ref().display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 28, "score": 193828.53142390796 }, { "content": "/// Wrapper around `File::open` that adds context.\n\npub fn open(path: impl AsRef<Path>) -> FailResult<File>\n\n{\n\n File::open(path.as_ref())\n\n .with_context(|e| format!(\"{}: could not open file: {}\", path.as_ref().display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 29, "score": 193828.53142390796 }, { "content": "pub fn this_process_is_root(root: &impl mpi::Root) -> bool\n\n{ mpi::Communicator::rank(root.as_communicator()) == root.root_rank() }\n", "file_path": "rsp2/src/io/lammps/low_level/mpi_helper.rs", "rank": 30, "score": 193280.32720462553 }, { "content": "/// Wrapper around `std::fs::canonicalize` that adds context.\n\npub fn canonicalize(dir: impl AsRef<Path>) -> FailResult<PathBuf>\n\n{\n\n fs::canonicalize(dir.as_ref())\n\n .with_context(|e| format!(\"{}: could not normalize: {}\", dir.as_ref().display(), e))\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 31, "score": 191280.265078182 }, { "content": "// Reverse of `broadcast_via_mut_ref`.\n\nfn broadcast_into_via_option<T, R>(\n\n root: &R,\n\n buf: &mut T,\n\n broadcast: impl FnOnce(&R, Option<T>) -> T,\n\n)\n\nwhere\n\n R: mpi::AsCommunicator + mpi::Root,\n\n T: Default,\n\n{\n\n let value = std::mem::replace(buf, Default::default());\n\n let value = broadcast(root, Some(value));\n\n *buf = value;\n\n}\n\n\n", "file_path": "rsp2/src/io/lammps/low_level/mpi_helper.rs", "rank": 32, "score": 191020.64173740524 }, { "content": "/// Trait for producing fresh instances of an `AlgorithmStateFn`.\n\n///\n\n/// Don't worry too much about this; this trait exists simply so that a single Builder can be cloned\n\n/// or used for multiple `acgsd` calls.\n\n///\n\n/// As a convenience, this is implemented for all cloneable `AlgorithmStateFn`s, so in general any\n\n/// closure that takes `AlgorithmState` will do. (as long as calls to a clone of the closure do not\n\n/// affect the \"freshness\" of the original closure; i.e. don't track prior inputs in an\n\n/// `Rc<RefCell<_>>`, or at least don't use them in a manner which affects the output!)\n\npub trait BuildAlgorithmStateFn: objekt::Clone {\n\n type Output;\n\n\n\n /// Produce a fresh instance of the AlgorithmStateFn, with no history of calls made to it yet.\n\n fn build(&self) -> Box<dyn FnMut(AlgorithmState<'_>) -> Self::Output>;\n\n}\n\n\n\nimpl<F, B> BuildAlgorithmStateFn for F\n\nwhere\n\n F: FnMut(AlgorithmState<'_>) -> B,\n\n F: Clone + 'static,\n\n{\n\n type Output = B;\n\n\n\n fn build(&self) -> Box<dyn FnMut(AlgorithmState<'_>) -> B> { Box::new(self.clone()) }\n\n}\n\n\n", "file_path": "rsp2/src/minimize/src/cg.rs", "rank": 33, "score": 190578.9083312994 }, { "content": "/// Solves `output = square * rhs` using LAPACKe's dgesv.\n\npub fn lapacke_linear_solve(mut square: CMatrix, mut rhs: CMatrix) -> Result<CMatrix, DegenerateMatrixError> {\n\n assert!(square.is_square());\n\n assert_eq!(square.cols(), rhs.rows());\n\n\n\n let layout = lapacke::Layout::RowMajor;\n\n\n\n let n = rhs.rows() as i32;\n\n let nrhs = rhs.cols() as i32;\n\n let lda = square.stride() as i32;\n\n let ldb = rhs.stride() as i32;\n\n\n\n {\n\n // lapacke hates size-zero arrays.\n\n let a = match square.len() {\n\n 0 => return Ok(rhs), // rhs must also have zero size; trivial solution\n\n _ => square.c_order_data_mut(),\n\n };\n\n let b = match rhs.len() {\n\n 0 => return Ok(rhs), // trivial solution\n\n _ => rhs.c_order_data_mut(),\n", "file_path": "rsp2/src/linalg/lib.rs", "rank": 34, "score": 190419.36902883268 }, { "content": "/// Helper function which may be used by some impls of `Permute`.\n\n///\n\n/// Partitions each element of a Vec (producing many `Unlabeled<T>`s), then zips them\n\n/// up into a single `Unlabeled<Vec<T>>`.\n\n///\n\n/// This is *not* the same thing as `Vec::into_unlabeled_partitions`.\n\npub fn partition_each_item<'iter, L, T>(part: &'iter Part<L>, items: Vec<T>) -> Unlabeled<'iter, Vec<T>>\n\nwhere T: Partition<'iter>,\n\n{Box::new({\n\n items.into_iter()\n\n // (over each item)\n\n .map(|x| x.into_unlabeled_partitions(part))\n\n .fold(\n\n std::iter::repeat_with(|| vec![])\n\n .take(part.region_keys().len())\n\n .collect(),\n\n |mut output: Vec<Vec<T>>, item_partitions| {\n\n // (over each partition)\n\n assert_eq!(output.len(), item_partitions.len());\n\n for (out_partition, item_partition) in output.iter_mut().zip(item_partitions) {\n\n out_partition.push(item_partition);\n\n }\n\n output\n\n }\n\n ).into_iter()\n\n})}\n", "file_path": "rsp2/src/util/soa-ops/part.rs", "rank": 35, "score": 189787.22577388067 }, { "content": "#[allow(bad_style)]\n\npub fn default_CH_pol_constants() -> PolConstants {\n\n enum_map!{\n\n BondType::CC => Some(PolConstant {\n\n c1: 0.32, c2: 2.60, c3: 7.55,\n\n max_len: 1.6,\n\n }),\n\n BondType::CH => Some(PolConstant {\n\n c1: 0.32, c2: 2.60, c3: 7.55,\n\n max_len: 1.3,\n\n }),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "rsp2/src/tasks/math/bond_polarizability.rs", "rank": 36, "score": 189594.9679889131 }, { "content": "/// Minimizes 2-norm of `matrix * x - rhs` using LAPACKe's dgelss.\n\npub fn lapacke_least_squares_svd(mut matrix: CMatrix, mut rhs: CMatrix) -> Result<Array2<f64>, Error> {\n\n assert!(matrix.cols() <= rhs.rows());\n\n\n\n let layout = lapacke::Layout::RowMajor;\n\n\n\n let m = matrix.rows() as i32;\n\n let n = matrix.cols() as i32;\n\n let nrhs = rhs.cols() as i32;\n\n let lda = matrix.stride() as i32;\n\n let ldb = rhs.stride() as i32;\n\n\n\n let rcond = -1f64; // use machine precision\n\n\n\n // lapacke hates size-zero arrays.\n\n assert_ne!(ldb, 0, \"TODO\"); // I think this is trivial? (return rhs)\n\n assert_ne!(lda, 0, \"TODO\"); // I think this is underdetermined/singular when ldb != 0?\n\n {\n\n let a = matrix.c_order_data_mut();\n\n let b = rhs.c_order_data_mut();\n\n\n", "file_path": "rsp2/src/linalg/lib.rs", "rank": 37, "score": 189240.89980183868 }, { "content": "/// Canonicalizes a path where the final component need not exist.\n\n///\n\n/// NOTE: will behave strangely for paths that end in ..\n\n/// due to how Path::parent is defined\n\npub fn canonicalize_parent(path: impl AsRef<Path>) -> FailResult<PathBuf>\n\n{\n\n let path = path.as_ref();\n\n if path.exists() {\n\n return canonicalize(path);\n\n }\n\n match split(path) {\n\n None => Ok(path.into()),\n\n Some((parent, name)) => {\n\n canonicalize(parent).map(|p| p.join(name))\n\n },\n\n }\n\n}\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 38, "score": 188844.10550369247 }, { "content": "// Helper that adapts existing functions with a signature like `Root::broadcast_into`\n\n// into a Broadcast impl, given the existence of a `Default` impl.\n\nfn broadcast_via_mut_ref<T, R>(\n\n root: &R,\n\n value: Option<T>,\n\n broadcast_into: impl FnOnce(&R, &mut T),\n\n) -> T\n\nwhere\n\n R: mpi::AsCommunicator + mpi::Root,\n\n T: Default,\n\n{\n\n if this_process_is_root(root) && value.is_none() {\n\n panic!(\"root did not provide value to broadcast\");\n\n }\n\n let mut buf = value.unwrap_or_else(Default::default);\n\n broadcast_into(root, &mut buf);\n\n buf\n\n}\n\n\n", "file_path": "rsp2/src/io/lammps/low_level/mpi_helper.rs", "rank": 39, "score": 188337.3529931245 }, { "content": "/// Constructs a Yaml like `{a: {b: {c: value}}}`\n\npub fn make_nested_mapping(path: &[impl AsRef<str>], value: Value) -> Value {\n\n path.iter().rev().fold(value, |value, key| make_singleton(key, value))\n\n}\n\n\n", "file_path": "rsp2/src/tasks/ui/cfg_merging.rs", "rank": 40, "score": 182527.32298654478 }, { "content": "/// Wrapper around `File::open` that adds context and makes a `BufReader`.\n\npub fn open_text(path: impl AsRef<Path>) -> FailResult<BufReader<File>>\n\n{ open(path).map(BufReader::new) }\n\n\n", "file_path": "rsp2/src/util/fs/lib.rs", "rank": 41, "score": 182527.32298654478 }, { "content": "pub fn load_json<T>(path: impl AsRef<Path>) -> Result<T, Error>\n\nwhere T: serde::de::DeserializeOwned,\n\n{\n\n let file = FileRead::open(path.as_ref())?;\n\n\n\n // HACK; I'd really like to have something more akin\n\n // to the dwim module in rsp2's python parts...\n\n if path.as_ref().extension() == Some(\"xz\".as_ref()) {\n\n Ok(::serde_json::from_reader(xz2::read::XzDecoder::new(file))?)\n\n } else {\n\n Ok(::serde_json::from_reader(file)?)\n\n }\n\n}\n\n\n", "file_path": "rsp2/src/util/integration-test/filetypes.rs", "rank": 42, "score": 178956.4634556088 }, { "content": "fn argsort_floats(xs: impl IntoIterator<Item=f64>) -> Perm {\n\n Perm::argsort(&xs.into_iter().map(|x| NotNan::new(x).unwrap()).collect_vec())\n\n}\n\n\n\n// -------------------------------------------------------------\n\n\n", "file_path": "rsp2/src/structure/algo/layer.rs", "rank": 43, "score": 174510.58311988792 }, { "content": "pub fn save_json<T>(path: impl AsRef<Path>, obj: &T) -> Result<(), Error>\n\nwhere T: serde::Serialize,\n\n{\n\n let mut file = FileWrite::create(path)?;\n\n serde_json::to_writer(&mut file, obj)?;\n\n writeln!(file)?;\n\n Ok(())\n\n}\n", "file_path": "rsp2/src/util/integration-test/filetypes.rs", "rank": 44, "score": 173481.11694059736 }, { "content": "/// Helper for producing a column with the change in value between succesive iterations,\n\n/// just like the one produced by `basic_output_fn`.\n\npub fn output_fn_dv_formatter(\n\n prefix: &str,\n\n precision: usize,\n\n) -> impl Clone + FnMut(f64) -> String {\n\n let mut last_value = None;\n\n\n\n // sign leading_digit decimal (precision digits) \"e-10\"\n\n let float_width = 1 + 1 + 1 + precision + 4;\n\n let width = prefix.len() + 1 + float_width;\n\n\n\n let prefix = prefix.to_string();\n\n move |value| {\n\n let out = match last_value {\n\n None => format!(\"{:width$}\", \"\", width=width),\n\n Some(last) => format!(\n\n \"{} {:+f$.prec$e}\", prefix, value - last,\n\n f=float_width, prec=precision,\n\n ),\n\n };\n\n last_value = Some(value);\n\n out\n\n }\n\n}\n\n\n", "file_path": "rsp2/src/minimize/src/cg.rs", "rank": 45, "score": 172783.97083941766 }, { "content": "/// Helper for producing a column with the cosines between succesive directions, just like\n\n/// the one produced by `basic_output_fn`.\n\n///\n\n/// The word \"cos: \" is included in the output, since it is hidden on the first iteration.\n\npub fn output_fn_cosine_formatter(\n\n prefix: &str,\n\n num_cosines: usize,\n\n precision: usize,\n\n) -> impl Clone + FnMut(Option<&[f64]>) -> String {\n\n let mut past_directions = VecDeque::<Vec<_>>::new();\n\n\n\n let word_len = precision + 3; // fit sign, leading 0/1, decimal point.\n\n let total_len = prefix.len() + (1 + word_len) * num_cosines; // fit words and spaces\n\n let prefix = prefix.to_string();\n\n\n\n move |direction| {\n\n\n\n let mut s = String::new();\n\n if !past_directions.is_empty() {\n\n write!(&mut s, \"{}\", prefix).unwrap();\n\n let latest = direction.expect(\"called with None after first call!?\");\n\n for other in &past_directions {\n\n write!(\n\n &mut s, \" {:>+word$.prec$}\",\n", "file_path": "rsp2/src/minimize/src/cg.rs", "rank": 46, "score": 172783.37788920396 }, { "content": "pub fn get_basic_output_fn(\n\n mut emit: impl Clone + FnMut(fmt::Arguments<'_>),\n\n) -> impl Clone + FnMut(AlgorithmState<'_>) {\n\n let mut dv_formatter = output_fn_dv_formatter(\"dv:\", 1);\n\n let mut cos_formatter = output_fn_cosine_formatter(\"cos:\", 3, 2);\n\n\n\n move |state: AlgorithmState<'_>| {\n\n emit(format_args!(\n\n \" i: {i:>6} v: {v:7.3} {dv} g: {g:>6.1e} max: {gm:>6.1e} a: {a:>6.1e} {cos}\",\n\n i = state.iterations,\n\n v = state.value,\n\n dv = dv_formatter(state.value),\n\n g = vnorm(&state.gradient),\n\n a = state.alpha,\n\n gm = state.gradient.iter().cloned().map(f64::abs).fold(0.0, f64::max),\n\n cos = cos_formatter(state.direction),\n\n ));\n\n }\n\n}\n\n\n", "file_path": "rsp2/src/minimize/src/cg.rs", "rank": 47, "score": 172773.62982623745 }, { "content": "/// Trait for applying a `Part` to a `Vec` (or similar type), breaking it into pieces.\n\n///\n\n/// By making this a trait, it can be implemented on types like rsp2's own\n\n/// `Coords` or anything else that contains data per-atom (such as eigenvectors).\n\n///\n\n/// The lifetime argument ensures that the iterator returned by `into_unlabeled_partitions`\n\n/// does not outlive either Self or the partition; this allows the iterator to capture self\n\n/// by value, and the partition by reference. Ultimately, we would need both GATs and\n\n/// impl-Trait-on-Trait-impls to get rid of it.\n\npub trait Partition<'iter>: Sized + 'iter {\n\n /// Variant of `into_partitions` which composes more easily, and is\n\n /// therefore the one you need to implement.\n\n ///\n\n /// It returns an iterator over the partitions of `self`.\n\n ///\n\n /// See `into_partitions` for more info.\n\n fn into_unlabeled_partitions<L>(self, part: &'iter Part<L>) -> Unlabeled<'iter, Self>;\n\n\n\n /// Consume self to produce partitions.\n\n ///\n\n /// The ordering within each partition is specified, in order to allow\n\n /// composite types to reliably be able to defer to the implementations\n\n /// defined on each of their members. This is not usually a concern\n\n /// since virtually all implementations will ultimately defer to `Vec<_>`\n\n /// for their implementation... but in case you must know:\n\n ///\n\n /// The ordering within each partition of the output must reflect the\n\n /// original order of those elements relative to each other in the\n\n /// input vec, rather than the order of the indices in `part`.\n", "file_path": "rsp2/src/util/soa-ops/part.rs", "rank": 48, "score": 171624.07998288103 }, { "content": "pub fn get_param_opt_output_fn(\n\n opt_helper: Rc<RelaxationOptimizationHelper>,\n\n mut emit: impl Clone + FnMut(std::fmt::Arguments<'_>) + 'static,\n\n) -> impl Clone + FnMut(cg::AlgorithmState<'_>) + 'static {\n\n use rsp2_slice_math::vnorm;\n\n use std::fmt::Write;\n\n\n\n let mut dv_formatter = cg::output_fn_dv_formatter(\"dv:\", 1);\n\n let mut cos_formatter = cg::output_fn_cosine_formatter(\"cos:\", 3, 2);\n\n let opt_helper = opt_helper.clone();\n\n\n\n move |state: cg::AlgorithmState<'_>| {\n\n // Use cartesian gradient instead of actual gradient\n\n let (cart_grad, d_param) = opt_helper.unflatten_grad(state.position, state.gradient);\n\n let num_atoms = cart_grad.len();\n\n emit(format_args!(\n\n \" i: {i:>6} v: {v:7.3} {dv} g: {g:>6.1e} max: {gm:>6.1e} {fpar} α: {a:>6.1e} {cos}\",\n\n i = state.iterations,\n\n v = state.value,\n\n dv = dv_formatter(state.value),\n", "file_path": "rsp2/src/tasks/cmd/relaxation.rs", "rank": 49, "score": 170905.11213570225 }, { "content": "pub fn calc_bonds(\n\n // columns are lattice vectors\n\n lattice: &Matrix3<f64>,\n\n coords: &CoordMat<f64>,\n\n cutoff_distance: f64,\n\n) -> Vec<Vec<Bond>> {\n\n assert!(cutoff_distance > 0.0);\n\n\n\n // number of necessary periodic images in each lattice direction\n\n let (ia, ib, ic) = (0..3)\n\n .map(|idx| {\n\n let direction = lattice.column(idx);\n\n // determine how many images of the unit cell we need for each periodic direction by first\n\n // getting the distance between the two planes in the periodic cell that go in this lattice\n\n // direction\n\n let plane_vec_a = lattice.column((idx + 1) % 3);\n\n let plane_vec_b = lattice.column((idx + 2) % 3);\n\n let plane_normal = plane_vec_a.cross(&plane_vec_b);\n\n let plane_distance = f64::abs(plane_normal.dot(&direction) / plane_normal.norm());\n\n // and then just comparing that distance to the cutoff\n", "file_path": "src/adjacency/mod.rs", "rank": 50, "score": 169392.40765209243 }, { "content": "pub fn calc_graph(\n\n // columns are lattice vectors\n\n lattice: &Matrix3<f64>,\n\n coords: &CoordMat<f64>,\n\n cutoff_distance: f64,\n\n) -> CompressedGraph {\n\n let bond_list = calc_bonds(lattice, coords, cutoff_distance);\n\n bond_list.into_iter().flat_map(|b| b.into_iter()).collect()\n\n}\n\n\n", "file_path": "src/adjacency/mod.rs", "rank": 51, "score": 169392.40765209243 }, { "content": "fn load(mut r: Lines<&mut dyn BufRead>) -> FailResult<Vec<Xyz>> {\n\n let mut out = vec![];\n\n while let Some(frame) = load_frame_or_eof(&mut r)? {\n\n out.push(frame);\n\n }\n\n Ok(out)\n\n}\n\n\n", "file_path": "rsp2/src/io/structure/xyz.rs", "rank": 52, "score": 168978.12755316956 }, { "content": "fn invert_each(perms: impl IntoIterator<Item=Perm>) -> Vec<Perm>\n\n{ perms.into_iter().map(|p| p.inverted()).collect() }\n\n\n\npub(crate) fn brute_force_with_sort_trick<M: Ord>(\n\n lattice: &Lattice,\n\n from_meta: &[M],\n\n from: CoordsKind<impl AsRef<[V3]>>,\n\n to_meta: &[M],\n\n to: CoordsKind<impl AsRef<[V3]>>,\n\n tol: f64,\n\n) -> Result<Perm, PositionMatchError>\n\n{Ok({\n\n let (perm_from, sorted_from) = fracs_sorted_by_lattice_distance(lattice, from, from_meta);\n\n let (perm_to, sorted_to) = fracs_sorted_by_lattice_distance(lattice, to, to_meta);\n\n\n\n let perm_between = brute_force_near_identity(\n\n lattice, &sorted_from[..], &sorted_to[..], tol,\n\n )?;\n\n\n\n // Compose all of the permutations for the full permutation.\n", "file_path": "rsp2/src/structure/algo/find_perm.rs", "rank": 53, "score": 168896.587132859 }, { "content": "/// Compute a numerical derivative using finite differences.\n\npub fn slope(\n\n interval_width: f64,\n\n kind: Option<DerivativeKind>,\n\n point: f64,\n\n mut value_fn: impl FnMut(f64) -> f64,\n\n) -> f64 {\n\n try_slope::<Never, _>(interval_width, kind, point, |x| Ok(value_fn(x)))\n\n .unwrap_or_else(|e| match e {})\n\n}\n\n\n", "file_path": "rsp2/src/minimize/src/numerical.rs", "rank": 54, "score": 166922.5445599981 }, { "content": "/// Compute a numerical second derivative using finite differences.\n\npub fn diff_2(\n\n interval_width: f64,\n\n kind: Option<DerivativeKind>,\n\n point: f64,\n\n mut value_fn: impl FnMut(f64) -> f64,\n\n) -> f64 {\n\n try_diff_2::<Never, _>(interval_width, kind, point, |x| Ok(value_fn(x)))\n\n .unwrap_or_else(|e| match e {})\n\n}\n\n\n", "file_path": "rsp2/src/minimize/src/numerical.rs", "rank": 55, "score": 166922.49460528354 }, { "content": "/// Numerically compute a gradient.\n\n///\n\n/// This independently performs a slope check along each individual\n\n/// axis of the input. The number of function calls it makes will\n\n/// be linearly proportional to the input size. This might be\n\n/// prohibitively expensive!!\n\npub fn gradient(\n\n interval_width: f64,\n\n kind: Option<DerivativeKind>,\n\n point: &[f64],\n\n mut value_fn: impl FnMut(&[f64]) -> f64,\n\n) -> Vec<f64> {\n\n try_gradient::<Never, _>(interval_width, kind, point, |x| Ok(value_fn(x)))\n\n .unwrap_or_else(|e| match e {})\n\n}\n\n\n", "file_path": "rsp2/src/minimize/src/numerical.rs", "rank": 56, "score": 166917.14547718578 }, { "content": "pub fn compute(\n\n params: &Params,\n\n interactions: &Interactions,\n\n coords: &Coords,\n\n use_rayon: bool,\n\n) -> FailResult<(f64, Vec<V3>)> {\n\n let bond_deltas = interactions.compute_bond_deltas(coords, use_rayon);\n\n let (value, d_deltas) = compute_rebo_bonds(params, &interactions, &bond_deltas, use_rayon)?;\n\n\n\n let mut d_positions = IndexVec::from_elem_n(V3::zero(), interactions.num_sites());\n\n for site_i in interactions.sites() {\n\n for bond_ij in interactions.bonds(site_i) {\n\n let site_j = interactions.bond_target(bond_ij);\n\n\n\n // delta_ij = (-pos_i) + pos_j\n\n d_positions[site_i] -= d_deltas[bond_ij];\n\n d_positions[site_j] += d_deltas[bond_ij];\n\n }\n\n }\n\n Ok((value, d_positions.raw))\n\n}\n\n\n", "file_path": "rsp2/src/potentials/rebo/nonreactive.rs", "rank": 57, "score": 166917.14547718578 }, { "content": "/// Produce a variety of data describing the displacements in terms of rsp2's conventions\n\n/// (whereas most other methods on `DirWithDisps` use phonopy's conventions).\n\npub fn phonopy_displacements(\n\n settings: &cfg::Phonons,\n\n prim_coords: &Coords,\n\n prim_meta: HList2<\n\n meta::SiteElements,\n\n meta::SiteMasses,\n\n >,\n\n sc: &SupercellToken,\n\n // supercell coordinates in rsp2's ordering convention, as created by `sc`\n\n our_super_coords: &Coords,\n\n) -> FailResult<PhonopyDisplacements> {\n\n let displacement_distance = settings.displacement_distance.expect(\"(bug) missing displacement-distance should have been caught earlier\");\n\n let symmetry_tolerance = settings.symmetry_tolerance.expect(\"(bug) missing symmetry-tolerance should have been caught earlier\");\n\n let dir = {\n\n let mut builder = {\n\n builder::Builder::new()\n\n // HACK: Give phonopy a slightly smaller symprec to ensure that, in case\n\n // rsp2 and phonopy find different spacegroups, phonopy should find the\n\n // smaller one.\n\n .symmetry_tolerance(symmetry_tolerance * 0.99)\n", "file_path": "rsp2/src/tasks/cmd/phonopy.rs", "rank": 58, "score": 164571.66934442392 }, { "content": "pub fn compute_displacements(\n\n cfg: &cfg::PhononDispFinderRsp2Directions,\n\n int_rots: impl IntoIterator<Item=IntRot>,\n\n stars: &Stars,\n\n coords: &Coords,\n\n amplitude: f64,\n\n) -> Vec<(usize, V3)> {\n\n let int_rots = int_rots.into_iter().collect::<IndexVec<usize, _>>();\n\n\n\n let go = |choices: &[_]| {\n\n _compute_displacements::<usize, _, _, _>(\n\n choices, &int_rots[..], stars, coords.lattice(), amplitude,\n\n ).raw\n\n };\n\n\n\n match cfg {\n\n cfg::PhononDispFinderRsp2Directions::Axial => go(&DIRECTIONS_AXIAL),\n\n cfg::PhononDispFinderRsp2Directions::Diag => go(&DIRECTIONS_DIAG_1),\n\n cfg::PhononDispFinderRsp2Directions::Diag2 => go(&DIRECTIONS_DIAG_2),\n\n cfg::PhononDispFinderRsp2Directions::Survey => {\n\n debug!(\"Surveying displacement implementations:\");\n\n debug!(\" axial: Produces {}\", go(&DIRECTIONS_AXIAL).len());\n\n debug!(\" diag: Produces {}\", go(&DIRECTIONS_DIAG_1).len());\n\n debug!(\" diag-2: Produces {}\", go(&DIRECTIONS_DIAG_2).len());\n\n go(&DIRECTIONS_DIAG_2)\n\n },\n\n }\n\n}\n\n\n", "file_path": "rsp2/src/tasks/math/displacements.rs", "rank": 59, "score": 164566.6888689152 }, { "content": "pub fn find_all_interactions(\n\n params: &Params,\n\n coords: &Coords,\n\n elements: &[Element],\n\n) -> FailResult<Interactions> {\n\n let ref types = elements.iter().cloned().map(AtomType::from_element).collect::<FailResult<Vec<_>>>()?;\n\n let ref graph = {\n\n rsp2_structure::bonds::FracBonds::compute_with_meta(\n\n coords, types.iter().cloned(),\n\n // FIXME should return None for other elements\n\n |&a, &b| Some(params.by_type[a][b].cutoff_region.1),\n\n )?.to_periodic_graph()\n\n };\n\n\n\n let potential = InteractionsPotential { params: params.clone() };\n\n Interactions::compute(potential, coords, types, graph)\n\n}\n\n\n\n//---------------------------------------------------------------------------------\n\n\n\npub struct BondGrad {\n\n pub plus_site: usize,\n\n pub minus_site: usize,\n\n pub cart_vector: V3,\n\n pub grad: V3,\n\n}\n\n\n", "file_path": "rsp2/src/potentials/rebo/nonreactive.rs", "rank": 60, "score": 164566.6888689152 }, { "content": "pub fn compute_by_bond(\n\n params: &Params,\n\n interactions: &Interactions,\n\n coords: &Coords,\n\n use_rayon: bool,\n\n) -> FailResult<(f64, Vec<BondGrad>)> {\n\n let bond_deltas = interactions.compute_bond_deltas(coords, use_rayon);\n\n let (value, d_deltas) = compute_rebo_bonds(params, &interactions, &bond_deltas, use_rayon)?;\n\n\n\n let mut grad_items = Vec::with_capacity(interactions.num_bonds());\n\n for site_i in interactions.sites() {\n\n for bond_ij in interactions.bonds(site_i) {\n\n let site_j = interactions.bond_target(bond_ij);\n\n\n\n // delta_ij = (-pos_i) + pos_j\n\n grad_items.push(BondGrad {\n\n plus_site: site_j.0,\n\n minus_site: site_i.0,\n\n cart_vector: bond_deltas[bond_ij],\n\n grad: d_deltas[bond_ij],\n\n });\n\n }\n\n }\n\n Ok((value, grad_items))\n\n}\n\n\n", "file_path": "rsp2/src/potentials/rebo/nonreactive.rs", "rank": 61, "score": 164566.6888689152 }, { "content": "// FIXME:\n\n// Workaround for there currently being no way to write\n\n// `-> impl IntoIterator<IntoIter=impl Clone + ExactSizeIterator>`\n\npub trait IntoExactSizeCloneIterator {\n\n type Item;\n\n type IntoIter: Clone + ExactSizeIterator<Item=Self::Item>;\n\n\n\n fn into_exact_size_clone_iterator(self) -> Self::IntoIter;\n\n}\n\n\n\nimpl<I> IntoExactSizeCloneIterator for I\n\nwhere\n\n I: IntoIterator,\n\n <I as IntoIterator>::IntoIter: Clone + ExactSizeIterator<Item = <I as IntoIterator>::Item>,\n\n{\n\n type Item = <I as IntoIterator>::Item;\n\n type IntoIter = <I as IntoIterator>::IntoIter;\n\n\n\n fn into_exact_size_clone_iterator(self) -> Self::IntoIter { self.into_iter() }\n\n}\n", "file_path": "rsp2/src/tasks/math/frac_bonds_with_skin.rs", "rank": 62, "score": 163258.16287383166 }, { "content": " pub trait Differentiable1d: Clone {\n\n type Derivative: Differentiable1d;\n\n fn evaluate(&self, x: f64) -> f64;\n\n fn derivative(&self) -> Self::Derivative;\n\n\n\n fn scale_x(&self, scale: f64) -> ScaleX<Self> { ScaleX(scale, self.clone()) }\n\n fn scale_y(&self, scale: f64) -> ScaleY<Self> { ScaleY(scale, self.clone()) }\n\n fn recenter(&self, center: f64) -> Recenter<Self> { Recenter(center, self.clone()) }\n\n }\n\n\n\n /// Computes `f(center + x)`\n\n #[derive(Debug, Copy, Clone)] pub struct Recenter<F>(pub f64, pub F);\n\n /// Computes `f(scale * x)`\n\n #[derive(Debug, Copy, Clone)] pub struct ScaleX<F>(pub f64, pub F);\n\n /// Computes `scale * f(x)`\n\n #[derive(Debug, Copy, Clone)] pub struct ScaleY<F>(pub f64, pub F);\n\n\n\n impl<F:Differentiable1d> Differentiable1d for Recenter<F> {\n\n type Derivative = Recenter<F::Derivative>;\n\n fn evaluate(&self, x: f64) -> f64 { self.1.evaluate(self.0 + x) }\n", "file_path": "rsp2/src/minimize/src/test.rs", "rank": 63, "score": 163069.51577090297 }, { "content": "/// Compute depermutations for all operators in a spacegroup.\n\n///\n\n/// Apologies for the invented terminology; see `conventions.md` about the difference\n\n/// between copermutations and depermutations.\n\n///\n\n/// This method can be called on superstructures so long as pure translations\n\n/// are not included in the list of operators. Be aware that if the superlattice\n\n/// breaks symmetries of the primitive structure, those symmetries might not have\n\n/// a valid representation as a permutation (and the method will fail).\n\n///\n\n/// `tol = 0` is explicitly supported as long as you only seek the identity.\n\n//\n\n// (NOTE: currently, it actually fails even earlier, when trying to construct IntRot\n\n// (which fails if and only if the superlattice breaks the symmetry).\n\n// I don't know / have not yet proven whether there may exist symmetry-broken\n\n// supercells which DO have a valid permutation representation)\n\npub fn spacegroup_deperms(\n\n // Arbitrary superstructure (the thing we want to permute)\n\n coords: &Coords,\n\n\n\n // Spacegroup operators.\n\n //\n\n // * Must be closed under composition.\n\n // * Must not include pure translations. (this limitation is because the\n\n // the method used to equate two operators only considers the rotations)\n\n ops: &[CartOp],\n\n\n\n tol: f64,\n\n) -> Result<Vec<Perm>, Error>\n\n{ spacegroup_coperms(coords, ops, tol).map(invert_each) }\n\n\n", "file_path": "rsp2/src/structure/algo/find_perm.rs", "rank": 64, "score": 162335.37910598353 }, { "content": "/// Compute copermutations for all operators in a spacegroup.\n\n///\n\n/// Apologies for the invented terminology; see `conventions.md` about the difference\n\n/// between copermutations and depermutations.\n\n///\n\n/// This method can be called on superstructures so long as pure translations\n\n/// are not included in the list of operators. Be aware that if the superlattice\n\n/// breaks symmetries of the primitive structure, those symmetries might not have\n\n/// a valid representation as a permutation (and the method will fail).\n\n///\n\n/// `tol = 0` is explicitly supported as long as you only seek the identity.\n\n//\n\n// (NOTE: currently, it actually fails even earlier, when trying to construct IntRot\n\n// (which fails if and only if the superlattice breaks the symmetry).\n\n// I don't know / have not yet proven whether there may exist symmetry-broken\n\n// supercells which DO have a valid permutation representation)\n\npub fn spacegroup_coperms(\n\n // Arbitrary superstructure (the thing we want to permute)\n\n coords: &Coords,\n\n\n\n // Spacegroup operators.\n\n //\n\n // * Must be closed under composition.\n\n // * Must not include pure translations. (this limitation is because the\n\n // the method used to equate two operators only considers the rotations)\n\n ops: &[CartOp],\n\n\n\n tol: f64,\n\n) -> Result<Vec<Perm>, Error>\n\n{\n\n let dummy_meta = vec![(); coords.num_atoms()];\n\n spacegroup_coperms_with_meta(coords, &dummy_meta, ops, tol)\n\n}\n\n\n", "file_path": "rsp2/src/structure/algo/find_perm.rs", "rank": 65, "score": 162335.37910598353 }, { "content": "#[cfg(test)]\n\npub fn init_test_logger() {\n\n let _ = init_global_logger();\n\n}\n\n\n\n/// Returned by `init_global_logger` to remind you to set the logfile once possible,\n\n/// or to disable its logging. (which can be done by calling the `start` or `disable` method)\n\npub struct SetGlobalLogfile(());\n\nimpl SetGlobalLogfile {\n\n pub fn start(self, path: PathFile) -> FailResult<()> {\n\n let result = GLOBAL_LOGFILE.start(path).map_err(Into::into);\n\n std::mem::forget(self);\n\n result\n\n }\n\n\n\n pub fn disable(self) {\n\n GLOBAL_LOGFILE.disable();\n\n std::mem::forget(self);\n\n }\n\n}\n\n\n", "file_path": "rsp2/src/tasks/ui/logging.rs", "rank": 66, "score": 162331.8306842654 }, { "content": "pub fn read_dynmat(\n\n input_path: impl AsPath,\n\n) -> FailResult<DynamicalMatrix> {\n\n let cereal = call_script_and_communicate_with_args(\n\n PY_CALL_READ_DYNMAT,\n\n &(),\n\n |cmd| { cmd.arg(input_path.as_path()); },\n\n )?;\n\n\n\n DynamicalMatrix::from_cereal(cereal)\n\n}\n\n\n", "file_path": "rsp2/src/tasks/cmd/python/convert.rs", "rank": 67, "score": 162331.8306842654 }, { "content": "pub fn write_dynmat(\n\n output_path: impl AsPath,\n\n dynmat: &DynamicalMatrix,\n\n) -> FailResult<()> {\n\n call_script_and_communicate_with_args(\n\n PY_CALL_WRITE_DYNMAT,\n\n &dynmat.cereal(),\n\n |cmd| { cmd.arg(output_path.as_path()); },\n\n )\n\n}\n", "file_path": "rsp2/src/tasks/cmd/python/convert.rs", "rank": 68, "score": 162331.8306842654 }, { "content": "pub fn run_make_supercell(\n\n structure: StoredStructure,\n\n dims_str: &str,\n\n layer_sc_mode: LayerScMode,\n\n output: impl AsPath,\n\n) -> FailResult<()> {\n\n let StoredStructure {\n\n title, mut coords, mut elements, mut layers, mut masses,\n\n mut layer_sc_matrices, frac_bonds,\n\n } = structure;\n\n\n\n if let Some(_) = frac_bonds {\n\n // TODO: support this properly.\n\n warn!(\"\\\n\n Supercells of bond graphs are not yet implemented, so the created supercell will be \\\n\n missing a bond graph. (don't worry too much about this; rsp2 will typically \\\n\n generate a new bond graph when run on the output).\\\n\n \");\n\n };\n\n\n", "file_path": "rsp2/src/tasks/cmd/mod.rs", "rank": 69, "score": 162331.8306842654 }, { "content": "#[allow(unused)]\n\npub fn unfold_gamma_phonon(\n\n config: &Config,\n\n threading: Threading,\n\n // Takes CoordStructure because I think there might be a case for\n\n // supporting <M: Eq + Hash>, with the semantics that atoms with\n\n // non-equal metadata are \"distinct\" and contributions between\n\n // them to a projection cannot cancel.\n\n superstructure: &Coords,\n\n // eigenvector_q: &SuperFracQ, // NOTE: only gamma now\n\n eigenvector: KetRef<'_>,\n\n supercell_matrix: &ScMatrix,\n\n) -> Vec<([u32; 3], f64)>\n\n{\n\n let unfolder = GammaUnfolder::from_config(config, threading, superstructure, supercell_matrix);\n\n let indices = unfolder.q_indices().iter().cloned();\n\n let probs = unfolder.unfold_phonon(threading, eigenvector);\n\n izip!(indices, probs).collect()\n\n}\n\n\n\n/// Contains precomputed information derived from the\n", "file_path": "rsp2/src/tasks/math/bands.rs", "rank": 70, "score": 162331.8306842654 }, { "content": "/// Perform conjugate gradient using the default configuration for ACGSD, and with a stop condition\n\n/// that can be deserialized from JSON.\n\n///\n\n/// See [`Builder::new_acgsd`] for more information.\n\npub fn acgsd<F: DiffFn>(\n\n stop_condition: &StopCondition,\n\n initial_position: &[f64],\n\n compute: F,\n\n) -> Result<Output, Failure<F::Error>> {\n\n Builder::new_acgsd()\n\n .stop_condition(stop_condition.to_function())\n\n .run(initial_position, compute)\n\n}\n\n\n\n//==================================================================================================\n\n\n\n// Types used inside the implementation of acgsd.\n\npub(crate) mod internal_types {\n\n #[derive(Debug, Clone)]\n\n pub(crate) struct Point {\n\n pub(crate) position: Vec<f64>,\n\n pub(crate) gradient: Vec<f64>,\n\n pub(crate) value: f64,\n\n }\n", "file_path": "rsp2/src/minimize/src/cg.rs", "rank": 71, "score": 162156.06540856685 }, { "content": "// FIXME: These functions for summary.yaml probably don't belong here,\n\n// but `merge_summaries` is here to use a private function.\n\n/// Merges summary.yaml files for output.\n\npub fn merge_summaries(a: Value, b: Value) -> Value {\n\n // Reuse the dumb algorithm because is actually perfect for this use case.\n\n // Summaries should continue to use the dumb algorithm even if config files\n\n // get a redesigned algorithm at some point.\n\n dumb_config_merge(a, b)\n\n}\n\n\n", "file_path": "rsp2/src/tasks/ui/cfg_merging.rs", "rank": 72, "score": 161959.61381662736 }, { "content": "#[inline(always)] // elide large stack-to-stack copies\n\nfn axpy_mut<T: Copy>(a: &mut [T], alpha: f64, b: &[T])\n\nwhere\n\n f64: ops::Mul<T, Output=T>,\n\n T: ops::AddAssign<T>,\n\n{\n\n for (a, b) in zip_eq!(a, b) {\n\n *a += alpha * *b;\n\n }\n\n}\n\n\n", "file_path": "rsp2/src/potentials/rebo/nonreactive.rs", "rank": 73, "score": 160688.04898244585 }, { "content": "/// Perform conjugate gradient using the default configuration for CG-DESCENT, and with a\n\n/// stop condition that can be deserialized from JSON.\n\n///\n\n/// See [`Builder::new_hager`] for more information.\n\npub fn cg_descent<F: DiffFn>(\n\n stop_condition: &StopCondition,\n\n initial_position: &[f64],\n\n compute: F,\n\n) -> Result<Output, Failure<F::Error>> {\n\n Builder::new_hager()\n\n .stop_condition(stop_condition.to_function())\n\n .run(initial_position, compute)\n\n}\n\n\n", "file_path": "rsp2/src/minimize/src/cg.rs", "rank": 74, "score": 160287.4635991066 }, { "content": "/// Implements sparse force sets in terms of dense force sets.\n\n///\n\n/// Assumes `compute_dense_force` produces values that only differ from the\n\n/// original forces in a neighborhood of the displacement. This can be true if\n\n/// the potential...\n\n///\n\n/// * is deterministic,\n\n/// * implements a cutoff radius, and\n\n/// * does not recklessly adjust coordinates\n\n///\n\n/// ...so that with the help of the \"ensure_only_carts\", even this\n\n/// exact equality check should be effective at sparsifying the data.\n\n///\n\n/// Which is good, because it's tough to define an approximate scale for comparison\n\n/// here, as the forces are the end-result of catastrophic cancellations.\n\npub fn sparse_deltas_from_dense_deterministic(\n\n original_force: &[V3],\n\n final_force: &[V3],\n\n) -> BTreeMap<usize, V3> {\n\n zip_eq!(original_force, final_force).enumerate()\n\n .map(|(atom, (old, new))| (atom, new - old))\n\n .filter(|&(_, v)| v != V3::zero())\n\n .collect()\n\n}\n\n\n\n//--------------------------------\n\n\n\npub use disp_fn_helper::DispFnHelper;\n\npub mod disp_fn_helper {\n\n use super::*;\n\n\n\n /// A type that can help simplify `DispFn` implementations by remembering the equilibrium\n\n /// coords and gradient.\n\n ///\n\n /// With this type, you only need to implement the `disp_fn_helper::`[`Callback`] trait\n", "file_path": "rsp2/src/tasks/potential/helper.rs", "rank": 75, "score": 160204.2476823464 }, { "content": "#[inline(always)] // elide large stack-to-stack copies\n\nfn sbvec_filled<T: Clone>(fill: T, len: usize) -> SiteBondVec<T>\n\n{ std::iter::repeat(fill).take(len).collect() }\n\n\n", "file_path": "rsp2/src/potentials/rebo/nonreactive.rs", "rank": 76, "score": 159083.18052390995 }, { "content": "// Call a function during unwind.\n\n//\n\n// It will be called after the panic handler (which is what usually prints the panic message and\n\n// backtrace), making this a great way to ensure that a message appears at the very end of a\n\n// test's captured STDERR.\n\nstruct DoAfterPanic<F: FnMut()>(F);\n\n\n\nimpl<F: FnMut()> Drop for DoAfterPanic<F> {\n\n fn drop(&mut self) {\n\n if std::thread::panicking() {\n\n (self.0)()\n\n }\n\n }\n\n}\n\n\n\n/// Proof of the global environment for a test case having been set up.\n\npub struct Environment(());\n\n\n\nstatic ENVIRONMENT_ONCE: std::sync::Once = std::sync::Once::new();\n\n\n\nimpl Environment {\n\n /// Set up the global environment for the test case.\n\n ///\n\n /// Most notably, this sets up a logger that prints to the captured stderr.\n\n ///\n", "file_path": "rsp2/src/util/integration-test/cli_test.rs", "rank": 77, "score": 158067.71837735732 }, { "content": "// Configuration YAML obtained from CLI args, for future runs.\n\n// (disables the requirement for having at least one)\n\nstruct ConfigOverrideArgs(Option<ConfigSources>);\n\n\n\nimpl CliDeserialize for ConfigArgs {\n\n fn _augment_clap_app<'a, 'b>(app: clap::App<'a, 'b>) -> clap::App<'a, 'b> {\n\n app.args(&[\n\n arg!(*config [-c][--config]=CONFIG... crate::ui::cfg_merging::CONFIG_HELP_STR),\n\n ])\n\n }\n\n\n\n fn _resolve_args(m: &clap::ArgMatches<'_>) -> FailResult<Self>\n\n { ConfigSources::resolve_from_args(m.expect_values_of(\"config\")).map(ConfigArgs) }\n\n}\n\n\n\nimpl CliDeserialize for ConfigOverrideArgs {\n\n fn _augment_clap_app<'a, 'b>(app: clap::App<'a, 'b>) -> clap::App<'a, 'b> {\n\n app.args(&[\n\n arg!(?config [-c][--config]=CONFIG... crate::ui::cfg_merging::CONFIG_OVERRIDE_HELP_STR),\n\n ])\n\n }\n\n\n", "file_path": "rsp2/src/tasks/entry_points.rs", "rank": 78, "score": 157785.43314631656 }, { "content": "#[cfg(feature = \"mpi\")]\n\npub fn num_mpi_processes() -> u32 {\n\n use mpi::traits::Communicator;\n\n\n\n let world = mpi::topology::SystemCommunicator::world();\n\n world.size() as _\n\n}\n", "file_path": "rsp2/src/tasks/env.rs", "rank": 79, "score": 156605.0488486871 }, { "content": "/// Intended to be used during relaxation.\n\n///\n\n/// *Attempts* to produce a set of eigenkets containing many or all of the non-acoustic modes of\n\n/// negative eigenvalue (possibly along with other modes that do not meet this condition);\n\n/// however, it may very well miss some.\n\n///\n\n/// If none of the modes produced are negative, then it is safe (-ish) to assume that the matrix\n\n/// has no such eigenmodes. (At least, that is the intent!)\n\npub fn compute_negative_eigensolutions_gamma(\n\n dynmat: &DynamicalMatrix,\n\n max_solutions: usize,\n\n shift_invert_attempts: u32,\n\n) -> FailResult<(Vec<f64>, GammaBasis3)> {\n\n trace!(\"Computing most negative eigensolutions.\");\n\n scripts::Negative {\n\n matrix: dynmat.cereal(),\n\n max_solutions,\n\n shift_invert_attempts,\n\n dense: false,\n\n }.invoke_gamma()\n\n}\n\n\n", "file_path": "rsp2/src/tasks/cmd/python/scipy_eigsh.rs", "rank": 80, "score": 156250.147871117 }, { "content": "fn scale_ranges__repeat_count() -> u32 { 1 }\n", "file_path": "rsp2/src/tasks/config/config.rs", "rank": 81, "score": 156007.14064871022 }, { "content": "/// Workaround to use metadata where thread safety is required.\n\n///\n\n/// Basically, metadata is not threadsafe due to heavy use of Rc.\n\n/// This makes a sendable function that produces a copy of Self\n\n/// each time it is called.\n\npub trait MetaSendable: Sized + Clone {\n\n fn sendable<'a>(&'a self) -> Box<dyn Fn() -> Self + Send + Sync + 'a>;\n\n}\n\n\n\nimpl<T: Clone + Sync> MetaSendable for std::rc::Rc<[T]> {\n\n fn sendable<'a>(&'a self) -> Box<dyn Fn() -> Self + Send + Sync + 'a> {\n\n let send = &self[..];\n\n Box::new(move || send.into())\n\n }\n\n}\n\n\n\nimpl MetaSendable for FracBonds {\n\n fn sendable<'a>(&'a self) -> Box<dyn Fn() -> Self + Send + Sync + 'a> {\n\n let send = &**self;\n\n Box::new(move || Rc::new(send.clone()))\n\n }\n\n}\n\n\n\nimpl<V: MetaSendable> MetaSendable for Option<V> {\n\n fn sendable<'a>(&'a self) -> Box<dyn Fn() -> Self + Send + Sync + 'a> {\n", "file_path": "rsp2/src/tasks/meta.rs", "rank": 82, "score": 155682.6801021451 }, { "content": "fn linspace(r: Range<f64>, n: usize, extend_borders: bool) -> (Vec<i32>, Vec<f64>)\n\n{\n\n assert!(n > 1, \"cannot perform linspace with n < 2\");\n\n\n\n let (mut indices, mut values): (Vec<i32>, Vec<f64>) = (0..n as i32)\n\n .map(|i| (i, i as f64 / (n as f64 - 1f64)))\n\n .map(|(i, a)| (i, (1.0 - a) * r.start + a * r.end))\n\n .unzip();\n\n\n\n assert_eq!(values[0], r.start);\n\n assert_eq!(*values.last().unwrap(), r.end);\n\n assert_eq!(values.len(), n);\n\n\n\n if extend_borders {\n\n let step = values[1] - values[0];\n\n values.push(r.end + step);\n\n indices.push(n as i32);\n\n values.insert(0, r.start - step);\n\n indices.insert(0, -1);\n\n }\n\n\n\n (indices, values)\n\n}\n\n\n", "file_path": "rsp2/src/tasks/cmd/integrate_2d.rs", "rank": 83, "score": 154914.60307836672 }, { "content": "/// Initialize LAMMPS, do nothing of particular value, and exit.\n\n///\n\n/// For debugging linker errors.\n\npub fn link_test() -> FailResult<()>\n\n{Ok({\n\n let _ = unsafe { LammpsOwner::new(&[\"lammps\", \"-log\", \"none\"])? };\n\n})}\n\n\n\n/// Initialize LAMMPS, do nothing of particular value, and exit.\n\n///\n\n/// For debugging linker errors.\n", "file_path": "rsp2/src/io/lammps/lib.rs", "rank": 84, "score": 154477.4658467681 }, { "content": "/// Generates a finite group from a non-empty set of generators.\n\n///\n\n/// The generators may contain duplicates or extraneous elements.\n\n///\n\n/// The order of the output is arbitrary, but consistent for\n\n/// inputs that are related by a group isomorphism.\n\npub fn generate_finite_group<G>(\n\n generators: &[G],\n\n mut g_fn: impl FnMut(&G, &G) -> G,\n\n) -> Vec<G>\n\nwhere G: Hash + Eq + Clone,\n\n{\n\n use std::collections::{HashSet, VecDeque};\n\n assert!(generators.len() > 0, \"empty groups do not exist!\");\n\n\n\n let mut seen = HashSet::new();\n\n let mut out = vec![];\n\n\n\n let mut queue: VecDeque<_> = generators.iter().cloned().collect();\n\n\n\n while let Some(g) = queue.pop_front() {\n\n if seen.insert(g.clone()) {\n\n queue.extend(generators.iter().map(|h| g_fn(&g, h)));\n\n out.push(g);\n\n }\n\n }\n\n out\n\n}\n", "file_path": "rsp2/src/structure/algo/group.rs", "rank": 85, "score": 154477.4658467681 }, { "content": "// Temporarily allocate a C string for the duration of a closure.\n\n//\n\n// The closure may make arbitrary modifications to the string's\n\n// content (including writes of interior NUL bytes), but must not\n\n// write beyond the `s.len() + 1` allocated bytes for the C string.\n\nfn with_temporary_c_str<B, F>(s: &str, f: F) -> B\n\n where F: FnOnce(*mut c_char) -> B\n\n{\n\n // It is not safe to use CString here; LAMMPS may write NUL bytes\n\n // that change the length of the string.\n\n let mut bytes = s.to_string().into_bytes();\n\n bytes.push(0);\n\n f(bytes.as_mut_ptr() as *mut c_char)\n\n}\n\n\n\nuse self::black_hole::BlackHole;\n\nmod black_hole {\n\n use std::fmt;\n\n\n\n /// Contains something that is dangerous to obtain references to.\n\n ///\n\n /// It will never be seen again (except to be dropped).\n\n pub struct BlackHole<T>(T);\n\n impl<T> BlackHole<T> {\n\n pub fn entrap(x: T) -> BlackHole<T> { BlackHole(x) }\n", "file_path": "rsp2/src/io/lammps/low_level/plain.rs", "rank": 86, "score": 153977.98913125234 }, { "content": "fn auto_adjust_lattice(diag: &mut [f64; 3], skews: &mut Skews) {\n\n fn do_element(d: &mut f64, s: &mut f64) {\n\n if 2.0 * s.abs() > d.abs() {\n\n // shrink skew by 1 ULP\n\n *s = s.signum() * next_after(s.abs(), 0.0);\n\n }\n\n if 2.0 * s.abs() > d.abs() {\n\n // that wasn't enough?\n\n // then increase diag by 1 ULP\n\n *d = d.signum() * next_after(d.abs(), std::f64::INFINITY);\n\n }\n\n }\n\n\n\n // (actually, xx might change by up to two ULPs)\n\n do_element(&mut diag[0], &mut skews.xy);\n\n do_element(&mut diag[0], &mut skews.xz);\n\n do_element(&mut diag[1], &mut skews.yz);\n\n}\n\n\n\nuse std::os::raw::c_double;\n\n#[link(name = \"m\")]\n\nextern {\n\n fn nextafter(from: c_double, to: c_double) -> c_double;\n\n}\n", "file_path": "rsp2/src/io/lammps/lib.rs", "rank": 87, "score": 153931.2509600703 }, { "content": "// Run a callback in eco mode without needing to create a PotentialBuilder.\n\nfn eco_mode_without_potential<B, F>(\n\n settings: &Settings,\n\n on_demand: Option<LammpsOnDemand>,\n\n continuation: F,\n\n) -> FailResult<B>\n\nwhere F: FnOnce(EcoModeProof<'_>) -> FailResult<B>,\n\n{\n\n // can't use rsp2_lammps_wrap::potential::None due to Meta type mismatch\n\n //\n\n // FIXME: This is dumb; creating a dummy potential just so we can make a builder\n\n // so we can call this method. LammpsOnDemand should expose an `eco_mode` method.\n\n #[derive(Debug, Copy, Clone, PartialEq, Eq, Default)]\n\n pub struct NoPotential;\n\n impl rsp2_lammps_wrap::Potential for NoPotential {\n\n type Meta = CommonMeta;\n\n\n\n fn atom_types(&self, _: &Coords, _: &Self::Meta) -> Vec<rsp2_lammps_wrap::AtomType>\n\n { unreachable!() }\n\n\n\n fn init_info(&self, _: &Coords, _: &Self::Meta) -> rsp2_lammps_wrap::InitInfo\n", "file_path": "rsp2/src/tasks/cmd/mod.rs", "rank": 88, "score": 153873.82228051266 }, { "content": "pub trait VeclikeIterator: Iterator + ExactSizeIterator + DoubleEndedIterator + std::iter::FusedIterator {}\n\nimpl<I> VeclikeIterator for I\n\n where I: Iterator + ExactSizeIterator + DoubleEndedIterator + std::iter::FusedIterator {}\n\n\n\n//--------------------------------------------------------\n\n\n\npub mod ext_traits {\n\n use path_abs::PathDir;\n\n use std::path::Path;\n\n use std::fmt;\n\n\n\n extension_trait!{\n\n <'a> pub ArgMatchesExt<'a> for clap::ArgMatches<'a> {\n\n // For when the value ought to exist because it was 'required(true)'\n\n // (and therefore clap would have panicked if it were missing)\n\n fn expect_value_of(&self, s: &str) -> String\n\n { self.value_of(s).unwrap_or_else(|| panic!(\"BUG! ({} was required)\", s)).into() }\n\n\n\n fn expect_values_of(&self, s: &str) -> Vec<String>\n\n { self.values_of(s).unwrap_or_else(|| panic!(\"BUG! ({} was required)\", s)).map(Into::into).collect() }\n", "file_path": "rsp2/src/tasks/util/mod.rs", "rank": 89, "score": 153645.88044728036 }, { "content": "pub fn require_simple_axis_normal(normal: V3<i32>, lattice: &Lattice) -> Result<usize, Error> {\n\n let axis = {\n\n let mut sorted = normal;\n\n sorted.sort_unstable();\n\n ensure!(sorted == V3([0, 0, 1]),\n\n \"unsupported layer normal: {:?}\", normal);\n\n\n\n normal.iter().position(|&x| x == 1).unwrap()\n\n };\n\n\n\n let norms = lattice.norms();\n\n let vecs = lattice.vectors();\n\n for k in 0..3 {\n\n if k != axis {\n\n let cos = dot(&vecs[k], &vecs[axis]) / (norms[k] * norms[axis]);\n\n ensure!(cos.abs() < 1e-7,\n\n \"the normal must be perpendicular to the other two lattice vectors.\");\n\n }\n\n }\n\n Ok(axis)\n", "file_path": "rsp2/src/structure/algo/layer.rs", "rank": 90, "score": 153392.30026408253 }, { "content": "#[cfg(feature = \"mpi\")]\n\npub fn mpi_link_test() -> FailResult<()>\n\n{Ok({\n\n println!(\"{}\", crate::mpi::library_version().unwrap());\n\n LammpsOnDemandImpl::install(\n\n LammpsDispatch::new(),\n\n |on_demand| {\n\n unsafe { MpiLammpsOwner::new(\n\n on_demand,\n\n &[\"lammps\", \"-log\", \"none\"],\n\n )}.map(drop)\n\n },\n\n );\n\n})}\n\n\n", "file_path": "rsp2/src/io/lammps/lib.rs", "rank": 91, "score": 152449.61510465827 }, { "content": "pub fn check_availability() -> FailResult<()> {\n\n use self::scipy_eigsh::PY_CHECK_SCIPY_AVAILABILITY;\n\n use self::scipy_eigsh::ScipyAvailabilityError;\n\n\n\n use self::spglib::PY_CHECK_SPGLIB_AVAILABILITY;\n\n use self::spglib::SpglibAvailabilityError;\n\n\n\n call_script_and_check_success(PY_NOOP, PythonExecutionError)?;\n\n call_script_and_check_success(PY_CHECK_SCIPY_AVAILABILITY, ScipyAvailabilityError)?;\n\n call_script_and_check_success(PY_CHECK_SPGLIB_AVAILABILITY, SpglibAvailabilityError)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rsp2/src/tasks/cmd/python/mod.rs", "rank": 92, "score": 152449.61510465827 }, { "content": "#[inline(always)]\n\npub fn dot<V>(a: &V, b: &V) -> ScalarT<V>\n\nwhere V: Dot,\n\n{ Dot::dot(a, b) }\n\n\n\n/// Element type of the vector.\n\npub type ScalarT<V> = <V as IsV>::Scalar;\n", "file_path": "rsp2/src/util/array-types/methods_v.rs", "rank": 93, "score": 149928.1373310284 }, { "content": "pub fn linesearch<E, F>(\n\n params: &Settings,\n\n initial_alpha: f64,\n\n mut compute: F,\n\n) -> Result<f64, E>\n\nwhere F: FnMut(f64) -> Result<(f64, f64), E>,\n\n{\n\n let compute = |alpha| {\n\n let (value, slope) = compute(alpha)?;\n\n Ok(Bound { alpha, value, slope })\n\n };\n\n\n\n // I highly doubt that statically known function will help optimize\n\n // linesearches very much, and boxing helps us handle negative slope.\n\n let mut compute: Box<dyn FnMut(f64) -> Result<Bound, E>> = Box::new(compute);\n\n let mut initial = compute(0.0)?;\n\n\n\n if initial.slope > 0.0 {\n\n debug!(\"Positive initial slope, turning around. (slope = {:e})\", initial.slope);\n\n compute = Box::new(move |alpha| {\n", "file_path": "rsp2/src/minimize/src/hager_ls.rs", "rank": 94, "score": 149576.66642135594 }, { "content": "/// `diff_2` for functions that can fail.\n\npub fn try_diff_2<E, F>(\n\n step: f64,\n\n kind: Option<DerivativeKind>,\n\n point: f64,\n\n value_fn: F,\n\n) -> Result<f64, E>\n\nwhere\n\n F: FnMut(f64) -> Result<f64, E>,\n\n{\n\n // http://www.holoborodko.com/pavel/numerical-methods/numerical-derivative/central-differences/#comment-1719\n\n match kind.unwrap_or_default() {\n\n DerivativeKind::Stencil(3) => {\n\n let numer = stencil_sum!(value_fn, point, step, [\n\n (offset: -1.0, coeff: +1.0),\n\n (offset: -0.0, coeff: -2.0),\n\n (offset: +1.0, coeff: +1.0),\n\n ]);\n\n let denom = step * step;\n\n Ok(numer / denom)\n\n },\n", "file_path": "rsp2/src/minimize/src/numerical.rs", "rank": 95, "score": 149576.66642135594 }, { "content": "pub fn linesearch<E, F>(\n\n settings: &Settings,\n\n mut alpha: f64,\n\n mut compute: F,\n\n) -> Result<f64, Either<LinesearchError, E>>\n\nwhere F: FnMut(f64) -> Result<(f64, f64), E>,\n\n{\n\n let mut compute = |alpha| compute(alpha).map_err(Right);\n\n\n\n let (mut value, mut slope) = compute(0.0)?;\n\n\n\n assert!(alpha > 0.0, \"non-positive initial alpha: {}\", alpha);\n\n if slope > 0.0 {\n\n return Err(Left(ErrorKind::Uphill { slope }.into()));\n\n }\n\n\n\n let initial_value = value;\n\n\n\n // Right hand side quantities for the wolfe condition linesearch.\n\n // - sufficient decrease\n", "file_path": "rsp2/src/minimize/src/strong_ls.rs", "rank": 96, "score": 149576.66642135594 }, { "content": "pub fn linesearch<E, F>(\n\n from: f64,\n\n initial_step: f64,\n\n mut compute: F,\n\n) -> Result<Result<SlopeBound, E>, GoldenSearchError>\n\nwhere F: FnMut(f64) -> Result<Slope, E>\n\n{\n\n // early wrapping:\n\n // - SlopeBound for internal use\n\n // - Detect nonsensical slopes\n\n // - Result<Slope, Result<TheirError, OurError>> for easy short-circuiting\n\n let compute = move |alpha| {\n\n let slope = compute(alpha).map_err(Ok)?;\n\n if !slope.0.is_finite() {\n\n return Err(Err(ErrorKind::FunctionOutput(slope.0).into()));\n\n }\n\n trace!(\"LS-iter: a: {:<23e} s: {:<23e}\", alpha, slope.0);\n\n Ok(SlopeBound { alpha, slope: slope.0 })\n\n };\n\n\n", "file_path": "rsp2/src/minimize/src/exact_ls.rs", "rank": 97, "score": 149576.66642135594 }, { "content": "/// `slope` for functions that can fail.\n\npub fn try_slope<E, F>(\n\n step: f64,\n\n kind: Option<DerivativeKind>,\n\n point: f64,\n\n value_fn: F,\n\n) -> Result<f64, E>\n\nwhere\n\n F: FnMut(f64) -> Result<f64, E>,\n\n{\n\n // http://www.holoborodko.com/pavel/numerical-methods/numerical-derivative/central-differences/\n\n match kind.unwrap_or_default() {\n\n DerivativeKind::Stencil(3) => {\n\n let numer = stencil_sum!(value_fn, point, step, [\n\n (offset: -1.0, coeff: -1.0),\n\n (offset: +1.0, coeff: +1.0),\n\n ]);\n\n let denom = 2.0 * step;\n\n Ok(numer / denom)\n\n },\n\n\n", "file_path": "rsp2/src/minimize/src/numerical.rs", "rank": 98, "score": 149576.66642135594 }, { "content": "/// `gradient` for functions that can fail.\n\npub fn try_gradient<E, F>(\n\n interval_width: f64,\n\n kind: Option<DerivativeKind>,\n\n point: &[f64],\n\n mut value_fn: F,\n\n) -> Result<Vec<f64>, E>\n\nwhere\n\n F: FnMut(&[f64]) -> Result<f64, E>,\n\n{\n\n let kind = kind.unwrap_or_default();\n\n point.iter().enumerate()\n\n .map(|(i, &center)| {\n\n let mut point = point.to_vec(); // reset modifications\n\n try_slope(\n\n interval_width,\n\n Some(kind),\n\n center,\n\n |x| { point[i] = x; value_fn(&point) },\n\n )\n\n })\n", "file_path": "rsp2/src/minimize/src/numerical.rs", "rank": 99, "score": 149576.66642135594 } ]
Rust
src/packets.rs
gkbrk/RustOre-Classic
0ceb927ad06c5a6905811ba56137abff6b5ceb66
use std::io::MemReader; use std::io::net::tcp::TcpStream; use config::Configuration; use mc_string::MCString; pub struct Packet{ pub packet_id: u8, pub packet_len: uint, pub data: Vec<u8> } impl Packet{ pub fn receive(mut conn: TcpStream) -> Packet{ let packet_id = conn.read_byte().unwrap(); let packet_len = match packet_id{ 0x00 => 130, 0x05 => 8, 0x08 => 9, 0x0d => 65, _ => 0 }; let data = conn.read_exact(packet_len).unwrap(); return Packet{ packet_id: packet_id, packet_len: packet_len, data: data }; } pub fn parse_player_ident(&self) -> PlayerIdent{ let mut reader = MemReader::new(self.data.clone()); return PlayerIdent{ version: reader.read_u8().unwrap(), username: reader.read_mc_string(), verification_key: reader.read_mc_string(), unused: reader.read_u8().unwrap() }; } pub fn parse_set_block(&self) -> SetBlock{ let mut reader = MemReader::new(self.data.clone()); return SetBlock{ x: reader.read_be_i16().unwrap(), y: reader.read_be_i16().unwrap(), z: reader.read_be_i16().unwrap(), destroyed: match reader.read_u8().unwrap(){ 0x00 => true, 0x01 => false, _ => false }, block_id: reader.read_u8().unwrap() }; } pub fn parse_position_and_orientation(&self) -> PositionAndOrientation{ let mut reader = MemReader::new(self.data.clone()); return PositionAndOrientation{ player_id: reader.read_u8().unwrap(), x: reader.read_be_i16().unwrap(), y: reader.read_be_i16().unwrap(), z: reader.read_be_i16().unwrap(), yaw: reader.read_u8().unwrap(), pitch: reader.read_u8().unwrap() }; } pub fn parse_message(&self) -> Message{ let mut reader = MemReader::new(self.data.clone()); return Message{ unused: reader.read_u8().unwrap(), message: reader.read_mc_string() }; } } #[deriving(Clone)] struct PlayerIdent{ pub version: u8, pub username: String, pub verification_key: String, unused: u8 } #[deriving(Clone)] struct SetBlock{ pub x: i16, pub y: i16, pub z: i16, pub destroyed: bool, pub block_id: u8 } #[deriving(Clone)] struct PositionAndOrientation{ pub player_id: u8, pub x: i16, pub y: i16, pub z: i16, pub yaw: u8, pub pitch: u8 } #[deriving(Clone)] struct Message{ unused: u8, pub message: String } pub trait MCPackets{ fn send_server_ident(&mut self, config: Configuration); fn send_ping(&mut self); fn send_level_init(&mut self); fn send_chunk_data(&mut self, length: i16, data: &[u8], percentage: u8); fn send_level_finalize(&mut self, x_size: i16, y_size: i16, z_size: i16); fn send_spawn_player(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8); fn send_pos(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8); fn send_chat_message(&mut self, player_id: i8, message: String); } impl MCPackets for TcpStream{ fn send_server_ident(&mut self, config: Configuration){ self.write_u8(0x00); self.write_u8(0x07); self.write_mc_string(config.server_name); self.write_mc_string(config.server_motd); self.write_u8(0x00); } fn send_ping(&mut self){ self.write_u8(0x01); } fn send_level_init(&mut self){ self.write_u8(0x02); } fn send_chunk_data(&mut self, length: i16, data: &[u8], percentage: u8){ self.write_u8(0x03); self.write_be_i16(length); self.write(data); for i in range(0, 1024 - length){ self.write_u8(0x00); } self.write_u8(percentage); } fn send_level_finalize(&mut self, x_size: i16, y_size: i16, z_size: i16){ self.write_u8(0x04); self.write_be_i16(x_size); self.write_be_i16(y_size); self.write_be_i16(z_size); } fn send_spawn_player(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8){ self.write_u8(0x07); self.write_i8(-1); self.write_mc_string("gokberkdoga".to_string()); self.write_be_i16(x); self.write_be_i16(y); self.write_be_i16(z); self.write_u8(yaw); self.write_u8(pitch); } fn send_pos(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8){ self.write_u8(0x08); self.write_i8(-1); self.write_be_i16(x); self.write_be_i16(y); self.write_be_i16(z); self.write_u8(yaw); self.write_u8(pitch); } fn send_chat_message(&mut self, player_id: i8, message: String){ self.write_u8(0x0d); self.write_i8(player_id); self.write_mc_string(message); } }
use std::io::MemReader; use std::io::net::tcp::TcpStream; use config::Configuration; use mc_string::MCString; pub struct Packet{ pub packet_id: u8, pub packet_len: uint, pub data: Vec<u8> } impl Packet{ pub fn receive(mut conn: TcpStream) -> Packet{ let packet_id = conn.read_byte().unwrap(); let packet_len = match packet_id{ 0x00 => 130, 0x05 => 8, 0x08 => 9, 0x0d => 65, _ => 0 }; let data = conn.read_exact(packet_len).unwrap(); return Packet{ packet_id: packet_id, packet_len: packet_len, data: data }; } pub fn parse_player_ident(&self) -> PlayerIdent{ let mut reader = MemReader::new(self.data.clone()); return PlayerIdent{ version: reader.read_u8().unwrap(), username: reader.read_mc_string(), verification_key: reader.read_mc_string(), unused: reader.read_u8().unwrap() }; } pub fn parse_set_block(&self)
} fn send_ping(&mut self){ self.write_u8(0x01); } fn send_level_init(&mut self){ self.write_u8(0x02); } fn send_chunk_data(&mut self, length: i16, data: &[u8], percentage: u8){ self.write_u8(0x03); self.write_be_i16(length); self.write(data); for i in range(0, 1024 - length){ self.write_u8(0x00); } self.write_u8(percentage); } fn send_level_finalize(&mut self, x_size: i16, y_size: i16, z_size: i16){ self.write_u8(0x04); self.write_be_i16(x_size); self.write_be_i16(y_size); self.write_be_i16(z_size); } fn send_spawn_player(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8){ self.write_u8(0x07); self.write_i8(-1); self.write_mc_string("gokberkdoga".to_string()); self.write_be_i16(x); self.write_be_i16(y); self.write_be_i16(z); self.write_u8(yaw); self.write_u8(pitch); } fn send_pos(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8){ self.write_u8(0x08); self.write_i8(-1); self.write_be_i16(x); self.write_be_i16(y); self.write_be_i16(z); self.write_u8(yaw); self.write_u8(pitch); } fn send_chat_message(&mut self, player_id: i8, message: String){ self.write_u8(0x0d); self.write_i8(player_id); self.write_mc_string(message); } }
-> SetBlock{ let mut reader = MemReader::new(self.data.clone()); return SetBlock{ x: reader.read_be_i16().unwrap(), y: reader.read_be_i16().unwrap(), z: reader.read_be_i16().unwrap(), destroyed: match reader.read_u8().unwrap(){ 0x00 => true, 0x01 => false, _ => false }, block_id: reader.read_u8().unwrap() }; } pub fn parse_position_and_orientation(&self) -> PositionAndOrientation{ let mut reader = MemReader::new(self.data.clone()); return PositionAndOrientation{ player_id: reader.read_u8().unwrap(), x: reader.read_be_i16().unwrap(), y: reader.read_be_i16().unwrap(), z: reader.read_be_i16().unwrap(), yaw: reader.read_u8().unwrap(), pitch: reader.read_u8().unwrap() }; } pub fn parse_message(&self) -> Message{ let mut reader = MemReader::new(self.data.clone()); return Message{ unused: reader.read_u8().unwrap(), message: reader.read_mc_string() }; } } #[deriving(Clone)] struct PlayerIdent{ pub version: u8, pub username: String, pub verification_key: String, unused: u8 } #[deriving(Clone)] struct SetBlock{ pub x: i16, pub y: i16, pub z: i16, pub destroyed: bool, pub block_id: u8 } #[deriving(Clone)] struct PositionAndOrientation{ pub player_id: u8, pub x: i16, pub y: i16, pub z: i16, pub yaw: u8, pub pitch: u8 } #[deriving(Clone)] struct Message{ unused: u8, pub message: String } pub trait MCPackets{ fn send_server_ident(&mut self, config: Configuration); fn send_ping(&mut self); fn send_level_init(&mut self); fn send_chunk_data(&mut self, length: i16, data: &[u8], percentage: u8); fn send_level_finalize(&mut self, x_size: i16, y_size: i16, z_size: i16); fn send_spawn_player(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8); fn send_pos(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8); fn send_chat_message(&mut self, player_id: i8, message: String); } impl MCPackets for TcpStream{ fn send_server_ident(&mut self, config: Configuration){ self.write_u8(0x00); self.write_u8(0x07); self.write_mc_string(config.server_name); self.write_mc_string(config.server_motd); self.write_u8(0x00);
random
[ { "content": "fn handle_connection(config: Configuration, mut conn: TcpStream, mutex_world: Arc<Mutex<World>>){\n\n let ip = match conn.peer_name(){\n\n Ok(x) => x.ip,\n\n Err(x) => {return;}\n\n };\n\n println!(\"{} is connecting to us...\", ip);\n\n loop{\n\n let packet = Packet::receive(conn.clone());\n\n //println!(\"{}\", packet.packet_id);\n\n \n\n if packet.packet_id == 0x00{\n\n let parsed = packet.parse_player_ident();\n\n //if config.online_mode & !is_authenticated(config.clone().salt, parsed.clone().username, parsed.clone().verification_key){\n\n // println!(\"Player tried to join without auth!\");\n\n // conn.close_read();\n\n // return;\n\n //}\n\n println!(\"{}\", parsed.username);\n\n \n\n conn.send_server_ident(config.clone());\n", "file_path": "src/main.rs", "rank": 0, "score": 48514.47123177751 }, { "content": "fn main(){\n\n let config = Configuration::get_default_config();\n\n \n\n let mut mc_world = World::new(10, 10, 10);\n\n for i in range(0u, 10){\n\n for i1 in range(0u, 10){\n\n for i2 in range(0u, 10){\n\n mc_world.set_block(i, i1, i2, 0x01);\n\n }\n\n }\n\n }\n\n let mutex_world = Arc::new(Mutex::new(mc_world));\n\n \n\n let heartbeat_sender = Heartbeat::new(config.clone());\n\n heartbeat_sender.spawn_task();\n\n \n\n let mut acceptor = TcpListener::bind((config.address.as_slice(), config.port)).listen().unwrap();\n\n println!(\"RustOre is listening on {}:{}\", config.address, config.port);\n\n for connection in acceptor.incoming(){\n\n let config_clone = config.clone();\n\n let mutex_world_clone = mutex_world.clone();\n\n Thread::spawn(move || {\n\n handle_connection(config_clone, connection.unwrap(), mutex_world_clone);\n\n }).detach();\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 6, "score": 28793.515954192404 }, { "content": "pub trait MCString{\n\n fn read_mc_string(&mut self) -> String;\n\n fn write_mc_string(&mut self, mc_str: String);\n\n}\n\n\n\nimpl MCString for TcpStream{\n\n fn read_mc_string(&mut self) -> String{\n\n let mut bytes: Vec<u8> = self.read_exact(64).unwrap();\n\n let mut length: uint = 0;\n\n bytes.reverse();\n\n for i in range(0u, bytes.len()){\n\n if *bytes.get_mut(i).unwrap() != 0x20{\n\n length = i;\n\n break;\n\n }\n\n }\n\n let mut splitted = bytes.slice_from(length).to_vec();\n\n splitted.reverse();\n\n return String::from_utf8(splitted.slice_from(0).to_vec()).unwrap();\n\n }\n", "file_path": "src/mc_string.rs", "rank": 7, "score": 24296.54711331273 }, { "content": "extern crate flate2;\n\n\n\nuse flate2::writer::GzEncoder;\n\nuse std::io::MemWriter;\n\n\n\nuse std::io::net::tcp::TcpStream;\n\n\n\nuse packets::MCPackets;\n\n\n\n#[deriving(Clone)]\n\npub struct World{\n\n pub x_size: uint,\n\n pub y_size: uint,\n\n pub z_size: uint,\n\n pub blocks: Vec<u8> \n\n}\n\n\n\nimpl World{\n\n pub fn new(x_size: uint, y_size: uint, z_size: uint) -> World{\n\n let mut block_vec: Vec<u8> = Vec::new();\n", "file_path": "src/world.rs", "rank": 15, "score": 10.449233731143828 }, { "content": " \n\n \n\n //Send debug level data\n\n let mut level = mutex_world.lock().unwrap();\n\n level.send_world(conn.clone());\n\n \n\n //conn.send_spawn_player(5*32, 15*32, 5*32, 5, 5);\n\n conn.send_pos(5*32, 25*32, 5*32, 5, 5);\n\n }else if packet.packet_id == 0x08{\n\n //println!(\"Player moved\");\n\n }else if packet.packet_id == 0x05{\n\n let parsed = packet.parse_set_block();\n\n let mut level = mutex_world.lock().unwrap();\n\n if parsed.destroyed{\n\n level.set_block(parsed.x as uint, parsed.y as uint, parsed.z as uint, 0x00);\n\n }else{\n\n level.set_block(parsed.x as uint, parsed.y as uint, parsed.z as uint, parsed.block_id);\n\n }\n\n }else if packet.packet_id == 0x0d{\n\n let parsed = packet.parse_message();\n\n println!(\"{}\", parsed.message);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 7.616916047185012 }, { "content": " },\n\n None => {return;}\n\n }\n\n }\n\n \n\n pub fn get_block(&mut self, x: uint, y: uint, z: uint) -> u8{\n\n let block = self.calculate_block_from_coord(x, y, z);\n\n return *self.blocks.get_mut(block).unwrap();\n\n }\n\n \n\n pub fn gzip_world(&mut self) -> Vec<u8>{\n\n let mut gzipper = GzEncoder::new(MemWriter::new(), flate2::CompressionLevel::Default);\n\n gzipper.write_be_i32((self.x_size * self.y_size * self.z_size) as i32);\n\n for block in self.blocks.iter(){\n\n gzipper.write_u8(*block);\n\n }\n\n return gzipper.finish().unwrap().unwrap();\n\n }\n\n \n\n pub fn send_world(&mut self, mut conn: TcpStream){\n", "file_path": "src/world.rs", "rank": 17, "score": 7.404375136807257 }, { "content": " for i in range(0u, x_size * y_size * z_size){\n\n block_vec.push(0x00);\n\n }\n\n return World{\n\n x_size: x_size,\n\n y_size: y_size,\n\n z_size: z_size,\n\n blocks: block_vec\n\n };\n\n }\n\n \n\n pub fn calculate_block_from_coord(&mut self, x: uint, y: uint, z: uint) -> uint{\n\n return (y * self.z_size + z) * self.x_size + x;\n\n }\n\n \n\n pub fn set_block(&mut self, x: uint, y: uint, z: uint, block_id: u8){\n\n let block = self.calculate_block_from_coord(x, y, z);\n\n match self.blocks.get_mut(block){\n\n Some(x) => {\n\n *x = block_id;\n", "file_path": "src/world.rs", "rank": 18, "score": 7.186142563239249 }, { "content": " \n\n fn write_mc_string(&mut self, mc_str: String){\n\n self.write(mc_str.as_bytes());\n\n for i in range(0, 64 - mc_str.as_bytes().len()){\n\n self.write_u8(0x20);\n\n }\n\n }\n\n}\n\n\n\nimpl MCString for MemReader{\n\n fn read_mc_string(&mut self) -> String{\n\n let mut bytes: Vec<u8> = self.read_exact(64).unwrap();\n\n let mut length: uint = 0;\n\n bytes.reverse();\n\n for i in range(0u, bytes.len()){\n\n if *bytes.get_mut(i).unwrap() != 0x20{\n\n length = i;\n\n break;\n\n }\n\n }\n\n let mut splitted = bytes.slice_from(length).to_vec();\n\n splitted.reverse();\n\n return String::from_utf8(splitted.slice_from(0).to_vec()).unwrap();\n\n }\n\n \n\n fn write_mc_string(&mut self, mc_str: String){\n\n }\n\n}\n", "file_path": "src/mc_string.rs", "rank": 19, "score": 5.437765302575108 }, { "content": "//extern crate crypto;\n\n//use self::crypto::md5::Md5;\n\n//use self::crypto::digest::Digest;\n\n\n\n//pub fn is_authenticated(salt: String, username: String, verification_key: String) -> bool{\n\n// let mut md5_hasher = Md5::new();\n\n// md5_hasher.input_str((salt + username.as_slice()).as_slice());\n\n// return md5_hasher.result_str() == verification_key;\n\n//}\n", "file_path": "src/authentication_verifier.rs", "rank": 20, "score": 5.434759930269681 }, { "content": "use std::rand::{thread_rng, Rng};\n\n\n\n#[deriving(Clone)]\n\npub struct Configuration{\n\n pub address: String,\n\n pub port: u16,\n\n pub max_players: uint,\n\n pub server_name: String,\n\n pub server_motd: String,\n\n pub is_public: String,\n\n pub online_mode: bool,\n\n pub salt: String,\n\n pub heartbeat_interval: i64\n\n}\n\n\n\nimpl Configuration{\n\n pub fn get_default_config() -> Configuration{\n\n return Configuration{\n\n address: \"0.0.0.0\".to_string(),\n\n port: 25565,\n", "file_path": "src/config.rs", "rank": 21, "score": 5.233233945397328 }, { "content": "extern crate curl;\n\n\n\nuse curl::http;\n\nuse config::Configuration;\n\nuse std::io::timer;\n\nuse std::time::Duration;\n\nuse std::thread::Thread;\n\n\n\n#[deriving(Clone)]\n\npub struct Heartbeat{\n\n config: Configuration\n\n}\n\n\n\nimpl Heartbeat{\n\n pub fn new(config: Configuration) -> Heartbeat{\n\n return Heartbeat{\n\n config: config\n\n };\n\n }\n\n \n", "file_path": "src/heartbeat.rs", "rank": 22, "score": 5.053566589646119 }, { "content": " conn.send_level_init();\n\n \n\n let gb = self.gzip_world();\n\n let bytes = gb.as_slice();\n\n let total_bytes = bytes.len();\n\n let mut cur_byte: uint = 0;\n\n loop{\n\n if total_bytes - cur_byte > 1024{\n\n let bytes_vec = bytes.to_vec();\n\n let partial_bytes = bytes_vec.slice(cur_byte, cur_byte + 1024);\n\n conn.send_chunk_data(1024, partial_bytes, ((cur_byte / total_bytes * 100) as u8));\n\n cur_byte += 1024;\n\n }else if total_bytes - cur_byte > 0{\n\n let bytes_vec = bytes.to_vec();\n\n let partial_bytes = bytes_vec.slice(cur_byte, total_bytes);\n\n conn.send_chunk_data((total_bytes - cur_byte) as i16, partial_bytes, ((cur_byte / total_bytes * 100) as u8));\n\n cur_byte += total_bytes - cur_byte;\n\n }else{\n\n break;\n\n }\n\n }\n\n \n\n conn.send_level_finalize(self.x_size as i16, self.y_size as i16, self.z_size as i16);\n\n }\n\n}\n", "file_path": "src/world.rs", "rank": 23, "score": 4.846454385859708 }, { "content": "extern crate flate2;\n\nextern crate curl;\n\n\n\nuse std::io::{Listener, Acceptor};\n\nuse std::io::net::tcp::{TcpListener, TcpStream};\n\n\n\nuse std::sync::{Mutex, Arc};\n\n\n\nuse std::thread::Thread;\n\n\n\nuse config::Configuration;\n\nuse packets::{Packet, MCPackets};\n\nuse heartbeat::Heartbeat;\n\n//use authentication_verifier::is_authenticated;\n\nuse world::World;\n\n\n\nmod mc_string;\n\nmod packets;\n\nmod config;\n\nmod heartbeat;\n\nmod authentication_verifier;\n\nmod world;\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 4.337949438455682 }, { "content": "#[deriving(Clone)]\n\npub struct Player{\n\n pub name: String,\n\n pub x: u32,\n\n pub y: u32,\n\n pub z: u32,\n\n pub yaw: u8,\n\n pub pitch: u8,\n\n pub stream: TcpStream\n\n}", "file_path": "src/player.rs", "rank": 25, "score": 4.115553886941909 }, { "content": "use std::io::net::tcp::TcpStream;\n\nuse std::io::MemReader;\n\n\n", "file_path": "src/mc_string.rs", "rank": 26, "score": 4.015640886065889 }, { "content": " pub fn send_heartbeat(&self){\n\n let response = http::handle().get(format!(\"https://minecraft.net/heartbeat.jsp?port={}&max={}&name={}&public={}&version=7&salt={}&users=0\", self.config.port, self.config.max_players, self.config.server_name.as_slice(), self.config.is_public.as_slice(), self.config.salt.as_slice())).exec().unwrap();\n\n }\n\n \n\n pub fn loop_blocking(&self){\n\n loop{\n\n self.send_heartbeat();\n\n println!(\"Sent heartbeat!\");\n\n timer::sleep(Duration::seconds(self.config.heartbeat_interval));\n\n }\n\n }\n\n \n\n pub fn spawn_task(&self){\n\n let clone = self.clone();\n\n Thread::spawn(move || {\n\n clone.loop_blocking();\n\n }).detach();\n\n }\n\n}\n", "file_path": "src/heartbeat.rs", "rank": 27, "score": 2.438238202228863 } ]
Rust
src/db/users.rs
Follpvosten/swiki
7a5d216cd9776fa2958531bc8d357f0e695d3635
use std::{convert::TryFrom, result::Result as StdResult}; use rocket::{ outcome::try_outcome, request::{FromRequest, Outcome}, tokio::task::spawn_blocking, Request, }; use sqlx::PgPool; use uuid::Uuid; use zeroize::Zeroize; use crate::{Db, Error, Result}; #[derive(Debug, Clone, Copy)] pub struct UserSession { pub session_id: Uuid, pub user_id: Uuid, } #[rocket::async_trait] impl<'r> FromRequest<'r> for &'r UserSession { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { use rocket::outcome::IntoOutcome; let result = request .local_cache_async(async { let session_id = request .cookies() .get("session_id") .and_then(|cookie| base64::decode(cookie.value()).ok()) .and_then(|vec| uuid::Bytes::try_from(vec.as_slice()).ok()) .map(Uuid::from_bytes)?; let db: &Db = request.rocket().state()?; let user_id = match db.get_session_user(session_id).await { Err(e) => { log::error!("Error getting session user: {}", e); None } Ok(user_id) => Some(user_id), }?; user_id.map(|user_id| UserSession { session_id, user_id, }) }) .await; result.as_ref().or_forward(()) } } #[derive(Debug, Clone, serde::Serialize)] pub struct LoggedUser { id: Uuid, name: String, is_admin: bool, } impl LoggedUser { pub fn is_admin(&self) -> bool { self.is_admin } } #[rocket::async_trait] impl<'r> FromRequest<'r> for LoggedUser { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { use crate::error::IntoOutcomeHack; use rocket::outcome::IntoOutcome; let session: &UserSession = try_outcome!(request.guard().await); let db: &Db = try_outcome!(request.rocket().state().or_forward(())); async fn get_user_info(pool: &PgPool, id: Uuid) -> Result<(bool, String)> { Ok( sqlx::query!(r#"SELECT name, is_admin FROM "user" WHERE id = $1"#, id) .fetch_one(pool) .await .map(|r| (r.is_admin, r.name))?, ) } let (is_admin, name) = try_outcome!(get_user_info(db, session.user_id).await.into_outcome_hack()); Outcome::Success(LoggedUser { id: session.user_id, name, is_admin, }) } } #[derive(Debug, Clone, serde::Serialize)] pub struct LoggedAdmin(LoggedUser); #[rocket::async_trait] impl<'r> FromRequest<'r> for LoggedAdmin { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { let logged_user: LoggedUser = try_outcome!(request.guard().await); if logged_user.is_admin { Outcome::Success(LoggedAdmin(logged_user)) } else { Outcome::Forward(()) } } } fn hash_password(password: &str) -> StdResult<String, argon2::Error> { fn gen_salt() -> Vec<u8> { use rand::Rng; rand::thread_rng() .sample_iter(&rand::distributions::Alphanumeric) .take(32) .collect() } let config = argon2::Config { variant: argon2::Variant::Argon2i, ..Default::default() }; let salt = gen_salt(); argon2::hash_encoded(password.as_bytes(), &salt, &config) } fn verify_password(hash: &str, password: &str) -> StdResult<bool, argon2::Error> { argon2::verify_encoded(hash, password.as_bytes()) } pub async fn name_exists(pool: &PgPool, username: &str) -> Result<bool> { Ok(sqlx::query_scalar!( r#"SELECT EXISTS(SELECT 1 FROM "user" WHERE name = $1) AS "a!""#, username ) .fetch_one(pool) .await?) } pub async fn register(pool: &PgPool, username: &str, mut password: String) -> Result<Uuid> { if name_exists(pool, username).await? { return Err(Error::UserAlreadyExists(username.to_string())); } let id = Uuid::new_v4(); let pw_hash = spawn_blocking(move || { let res = hash_password(&password); password.zeroize(); res }) .await??; sqlx::query!( r#"INSERT INTO "user"(id, name, pw_hash, is_admin) VALUES($1, $2, $3, (SELECT COUNT(*) FROM "user") = 0)"#, id, username, pw_hash ) .execute(pool) .await?; Ok(id) } pub async fn try_login(pool: &PgPool, username: &str, mut password: String) -> Result<UserSession> { let (user_id, hash) = sqlx::query!( r#"SELECT id, pw_hash FROM "user" WHERE name = $1"#, username ) .fetch_optional(pool) .await? .map(|r| (r.id, r.pw_hash)) .ok_or_else(|| Error::UserNotFound(username.to_string()))?; let pw_valid = spawn_blocking(move || { let res = verify_password(&hash, &password); password.zeroize(); res }) .await??; if pw_valid { let session_id = create_session(pool, user_id).await?; Ok(UserSession { session_id, user_id, }) } else { Err(Error::WrongPassword) } } async fn create_session(pool: &PgPool, user_id: Uuid) -> Result<Uuid> { let session_id = Uuid::new_v4(); sqlx::query!( "INSERT INTO session(session_id, user_id) VALUES($1, $2)", session_id, user_id ) .execute(pool) .await?; Ok(session_id) } pub async fn destroy_session(pool: &PgPool, session_id: Uuid) -> Result<()> { sqlx::query!("DELETE FROM session WHERE session_id = $1", session_id) .execute(pool) .await?; Ok(()) } pub async fn get_session_user(pool: &PgPool, session_id: Uuid) -> Result<Option<Uuid>> { Ok(sqlx::query_scalar!( "SELECT user_id FROM session WHERE session_id = $1", session_id ) .fetch_optional(pool) .await?) } pub async fn is_admin(pool: &PgPool, user_id: Uuid) -> Result<bool> { Ok( sqlx::query_scalar!(r#"SELECT is_admin FROM "user" WHERE id = $1"#, user_id) .fetch_optional(pool) .await? .unwrap_or(false), ) }
use std::{convert::TryFrom, result::Result as StdResult}; use rocket::{ outcome::try_outcome, request::{FromRequest, Outcome}, tokio::task::spawn_blocking, Request, }; use sqlx::PgPool; use uuid::Uuid; use zeroize::Zeroize; use crate::{Db, Error, Result}; #[derive(Debug, Clone, Copy)] pub struct UserSession { pub session_id: Uuid, pub user_id: Uuid, } #[rocket::async_trait] impl<'r> FromRequest<'r> for &'r UserSession { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { use rocket::outcome::IntoOutcome; let result = request .local_cache_async(async { let session_id = request .cookies() .get("session_id") .and_then(|cookie| base64::decode(cookie.value()).ok()) .and_then(|vec| uuid::Bytes::try_from(vec.as_slice()).ok()) .map(Uuid::from_bytes)?; let db: &Db = request.rocket().state()?;
user_id.map(|user_id| UserSession { session_id, user_id, }) }) .await; result.as_ref().or_forward(()) } } #[derive(Debug, Clone, serde::Serialize)] pub struct LoggedUser { id: Uuid, name: String, is_admin: bool, } impl LoggedUser { pub fn is_admin(&self) -> bool { self.is_admin } } #[rocket::async_trait] impl<'r> FromRequest<'r> for LoggedUser { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { use crate::error::IntoOutcomeHack; use rocket::outcome::IntoOutcome; let session: &UserSession = try_outcome!(request.guard().await); let db: &Db = try_outcome!(request.rocket().state().or_forward(())); async fn get_user_info(pool: &PgPool, id: Uuid) -> Result<(bool, String)> { Ok( sqlx::query!(r#"SELECT name, is_admin FROM "user" WHERE id = $1"#, id) .fetch_one(pool) .await .map(|r| (r.is_admin, r.name))?, ) } let (is_admin, name) = try_outcome!(get_user_info(db, session.user_id).await.into_outcome_hack()); Outcome::Success(LoggedUser { id: session.user_id, name, is_admin, }) } } #[derive(Debug, Clone, serde::Serialize)] pub struct LoggedAdmin(LoggedUser); #[rocket::async_trait] impl<'r> FromRequest<'r> for LoggedAdmin { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { let logged_user: LoggedUser = try_outcome!(request.guard().await); if logged_user.is_admin { Outcome::Success(LoggedAdmin(logged_user)) } else { Outcome::Forward(()) } } } fn hash_password(password: &str) -> StdResult<String, argon2::Error> { fn gen_salt() -> Vec<u8> { use rand::Rng; rand::thread_rng() .sample_iter(&rand::distributions::Alphanumeric) .take(32) .collect() } let config = argon2::Config { variant: argon2::Variant::Argon2i, ..Default::default() }; let salt = gen_salt(); argon2::hash_encoded(password.as_bytes(), &salt, &config) } fn verify_password(hash: &str, password: &str) -> StdResult<bool, argon2::Error> { argon2::verify_encoded(hash, password.as_bytes()) } pub async fn name_exists(pool: &PgPool, username: &str) -> Result<bool> { Ok(sqlx::query_scalar!( r#"SELECT EXISTS(SELECT 1 FROM "user" WHERE name = $1) AS "a!""#, username ) .fetch_one(pool) .await?) } pub async fn register(pool: &PgPool, username: &str, mut password: String) -> Result<Uuid> { if name_exists(pool, username).await? { return Err(Error::UserAlreadyExists(username.to_string())); } let id = Uuid::new_v4(); let pw_hash = spawn_blocking(move || { let res = hash_password(&password); password.zeroize(); res }) .await??; sqlx::query!( r#"INSERT INTO "user"(id, name, pw_hash, is_admin) VALUES($1, $2, $3, (SELECT COUNT(*) FROM "user") = 0)"#, id, username, pw_hash ) .execute(pool) .await?; Ok(id) } pub async fn try_login(pool: &PgPool, username: &str, mut password: String) -> Result<UserSession> { let (user_id, hash) = sqlx::query!( r#"SELECT id, pw_hash FROM "user" WHERE name = $1"#, username ) .fetch_optional(pool) .await? .map(|r| (r.id, r.pw_hash)) .ok_or_else(|| Error::UserNotFound(username.to_string()))?; let pw_valid = spawn_blocking(move || { let res = verify_password(&hash, &password); password.zeroize(); res }) .await??; if pw_valid { let session_id = create_session(pool, user_id).await?; Ok(UserSession { session_id, user_id, }) } else { Err(Error::WrongPassword) } } async fn create_session(pool: &PgPool, user_id: Uuid) -> Result<Uuid> { let session_id = Uuid::new_v4(); sqlx::query!( "INSERT INTO session(session_id, user_id) VALUES($1, $2)", session_id, user_id ) .execute(pool) .await?; Ok(session_id) } pub async fn destroy_session(pool: &PgPool, session_id: Uuid) -> Result<()> { sqlx::query!("DELETE FROM session WHERE session_id = $1", session_id) .execute(pool) .await?; Ok(()) } pub async fn get_session_user(pool: &PgPool, session_id: Uuid) -> Result<Option<Uuid>> { Ok(sqlx::query_scalar!( "SELECT user_id FROM session WHERE session_id = $1", session_id ) .fetch_optional(pool) .await?) } pub async fn is_admin(pool: &PgPool, user_id: Uuid) -> Result<bool> { Ok( sqlx::query_scalar!(r#"SELECT is_admin FROM "user" WHERE id = $1"#, user_id) .fetch_optional(pool) .await? .unwrap_or(false), ) }
let user_id = match db.get_session_user(session_id).await { Err(e) => { log::error!("Error getting session user: {}", e); None } Ok(user_id) => Some(user_id), }?;
assignment_statement
[ { "content": "type Result<T> = std::result::Result<T, Error>;\n\n\n\n// Route modules\n\nmod articles;\n\nmod settings;\n\nmod users;\n\n\n", "file_path": "src/main.rs", "rank": 0, "score": 122101.93948178331 }, { "content": "pub fn routes() -> Vec<rocket::Route> {\n\n rocket::routes![\n\n profile,\n\n register_page,\n\n register_form,\n\n login_redirect,\n\n login_page,\n\n login_form,\n\n logout,\n\n ]\n\n}\n\n\n", "file_path": "src/users.rs", "rank": 1, "score": 104269.2253082417 }, { "content": "pub fn routes() -> Vec<rocket::Route> {\n\n rocket::routes![panel_page, panel_redirect, admin_settings, admin_redirect]\n\n}\n\n\n\n#[get(\"/\")]\n\nasync fn panel_page(db: &State<Db>, cfg: &State<Config>, user: LoggedUser) -> Result<Template> {\n\n let mut context = json! {{\n\n \"site_name\": &cfg.site_name,\n\n \"default_path\": &cfg.default_path,\n\n \"user\": user,\n\n }};\n\n if user.is_admin() {\n\n let registration_enabled = db.registration_enabled().await?;\n\n context.as_object_mut().unwrap().extend(vec![(\n\n \"registration_enabled\".into(),\n\n registration_enabled.into(),\n\n )]);\n\n }\n\n Ok(Template::render(\"settings_panel\", dbg!(context)))\n\n}\n\n\n", "file_path": "src/settings.rs", "rank": 2, "score": 104269.22530824169 }, { "content": "fn rocket() -> Rocket<Build> {\n\n rocket::build()\n\n .mount(\"/\", rocket::routes![index])\n\n .mount(\"/\", articles::routes())\n\n .mount(\"/u\", users::routes())\n\n .mount(\"/settings\", settings::routes())\n\n .mount(\"/res\", FileServer::from(\"static\"))\n\n .manage(Cache::default())\n\n .attach(AdHoc::try_on_ignite(\"Read config\", |rocket| async {\n\n let mut config: Config = match rocket.figment().extract() {\n\n Ok(c) => c,\n\n Err(e) => {\n\n log::error!(\"Failed to parse config: {}\", e);\n\n return Err(rocket);\n\n }\n\n };\n\n if config.default_path.is_empty() {\n\n config.default_path = \"/\".to_string() + &config.main_page;\n\n }\n\n Ok(rocket.manage(config))\n", "file_path": "src/main.rs", "rank": 5, "score": 92222.59571072596 }, { "content": "// Ouch: I can't implement IntoOutcome for crate::Result<S>.\n\n// I also can't just impl crate::Result<S> and add such a method.\n\n// So I'll have to use a helper trait...\n\npub trait IntoOutcomeHack<S> {\n\n fn into_outcome_hack(self) -> Outcome<S, (Status, Error), ()>;\n\n}\n\nimpl<S> IntoOutcomeHack<S> for crate::Result<S> {\n\n fn into_outcome_hack(self) -> Outcome<S, (Status, Error), ()> {\n\n match self {\n\n Ok(val) => Outcome::Success(val),\n\n Err(e) => Outcome::Failure((e.status(), e)),\n\n }\n\n }\n\n}\n\n\n\nimpl<'r> Responder<'r, 'static> for Error {\n\n fn respond_to(self, request: &'r Request<'_>) -> response::Result<'static> {\n\n // If this doesn't return Some, we're dead anyways because the whole\n\n // runtime was initialized in the wrong way\n\n let cfg: &crate::Config = request.rocket().state().unwrap();\n\n let status = self.status();\n\n let context = serde_json::json! {{\n\n \"site_name\": &cfg.site_name,\n\n \"default_path\": &cfg.default_path,\n\n \"status\": status.to_string(),\n\n \"error\": self.to_string(),\n\n }};\n\n response::status::Custom(status, Template::render(\"error\", context)).respond_to(request)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 6, "score": 85156.97341584362 }, { "content": "#[get(\"/<_username>\", rank = 4)]\n\nfn profile(_db: &State<Db>, _username: String, _user: Option<LoggedUser>) -> Result<Template> {\n\n todo!()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::generate_captcha;\n\n\n\n #[test]\n\n fn captcha_generation() {\n\n // Do it 5 times to be sure.\n\n for _ in 0..5 {\n\n let (solution, base64) = generate_captcha().expect(\"captcha generation failed\");\n\n // Check if it's valid base64\n\n assert!(base64::decode(&base64).is_ok());\n\n // We always call add_chars(5)\n\n assert_eq!(solution.len(), 5);\n\n // And I'm pretty sure it should only do alphanumerical characters\n\n assert!(solution.chars().all(|c| c.is_ascii_alphanumeric()));\n\n }\n\n }\n\n}\n", "file_path": "src/users.rs", "rank": 7, "score": 81349.49836222059 }, { "content": "fn block_on<F, R>(fut: F) -> R\n\nwhere\n\n F: std::future::Future<Output = R>,\n\n{\n\n rocket::tokio::runtime::Runtime::new()\n\n .unwrap()\n\n .block_on(fut)\n\n}\n", "file_path": "src/tests.rs", "rank": 8, "score": 79201.03647055263 }, { "content": "pub fn routes() -> Vec<Route> {\n\n rocket::routes![\n\n search,\n\n create,\n\n get,\n\n edit_page,\n\n edit_form,\n\n redirect_to_login_get,\n\n redirect_to_login_post,\n\n revs,\n\n rev\n\n ]\n\n}\n\n\n", "file_path": "src/articles.rs", "rank": 9, "score": 71865.85151006897 }, { "content": "/// Generate a captcha.\n\n/// Returns the captcha as base64 and the characters it contains.\n\nfn generate_captcha() -> Result<(String, String)> {\n\n use captcha::{\n\n filters::{Dots, Noise, Wave},\n\n Captcha,\n\n };\n\n use rand::Rng;\n\n\n\n let mut captcha = Captcha::new();\n\n let mut rng = rand::thread_rng();\n\n captcha\n\n .add_chars(5)\n\n .apply_filter(Noise::new(0.4))\n\n .apply_filter(Wave::new(rng.gen_range(1.0..3.0), rng.gen_range(10.0..30.0)).horizontal())\n\n .apply_filter(Wave::new(rng.gen_range(1.0..3.0), rng.gen_range(10.0..30.0)).vertical())\n\n .view(220, 120)\n\n .apply_filter(Dots::new(rng.gen_range(3..6)));\n\n let result = (\n\n captcha.chars_as_string(),\n\n captcha.as_base64().ok_or(Error::CaptchaPngError)?,\n\n );\n", "file_path": "src/users.rs", "rank": 10, "score": 69716.02389406344 }, { "content": "/// Helper method that returns a captcha id and its solution from a new challenge.\n\n/// Will panic if getting any of these fails.\n\nfn register_challenge(client: &Client) -> (Uuid, String) {\n\n let register_challenge_response = client.get(\"/u/register\").dispatch();\n\n // We need the html.\n\n let body = register_challenge_response.into_string().unwrap();\n\n // Parse it into a document we can use.\n\n let document = scraper::Html::parse_document(&body);\n\n // Select the element which gives us the captcha id\n\n let selector = Selector::parse(\"input[name='captcha_id']\").unwrap();\n\n let input = document.select(&selector).next().unwrap();\n\n // And extract it\n\n let value = input.value().attr(\"value\").unwrap();\n\n let captcha_id: Uuid = value.parse().unwrap();\n\n // Here we cheat and ask the cache for the solution\n\n let captcha_solution = client\n\n .rocket()\n\n .state::<Cache>()\n\n .unwrap()\n\n .get_solution(captcha_id)\n\n .unwrap();\n\n (captcha_id, captcha_solution)\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 11, "score": 65788.344273186 }, { "content": "fn content_type_form() -> ContentType {\n\n ContentType::new(\"application\", \"x-www-form-urlencoded\")\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 12, "score": 63767.24061641653 }, { "content": "#[derive(serde::Serialize)]\n\nstruct RevContext<'a> {\n\n site_name: &'a str,\n\n default_path: &'a str,\n\n article_name: String,\n\n user: Option<LoggedUser>,\n\n rev_id: i64,\n\n content: String,\n\n author: String,\n\n date: DateTime<Utc>,\n\n specific_rev: bool,\n\n}\n\n\n", "file_path": "src/articles.rs", "rank": 13, "score": 45516.86261775633 }, { "content": "#[derive(Debug, serde::Serialize)]\n\nstruct RegisterPageContext<'a> {\n\n site_name: &'a str,\n\n default_path: &'a str,\n\n page_name: &'static str,\n\n username: Option<String>,\n\n captcha_base64: String,\n\n captcha_uuid: String,\n\n pwds_dont_match: bool,\n\n username_taken: bool,\n\n no_username: bool,\n\n failed_captcha: bool,\n\n}\n\nimpl<'a> Default for RegisterPageContext<'a> {\n\n fn default() -> Self {\n\n Self {\n\n site_name: \"\",\n\n default_path: \"\",\n\n page_name: \"Register\",\n\n username: None,\n\n captcha_base64: Default::default(),\n", "file_path": "src/users.rs", "rank": 14, "score": 44118.508321416666 }, { "content": "#[derive(serde::Serialize)]\n\nstruct NewRevContext<'a> {\n\n site_name: &'a str,\n\n default_path: &'a str,\n\n article_name: String,\n\n user: LoggedUser,\n\n old_content: String,\n\n new_article: bool,\n\n invalid_name_change: bool,\n\n}\n\n#[get(\"/<article_name>/edit\")]\n\nasync fn edit_page(\n\n db: &State<Db>,\n\n cfg: &State<Config>,\n\n article_name: String,\n\n // This route will only be called when a user is logged in.\n\n user: LoggedUser,\n\n) -> Result<Template> {\n\n // For a new article, the only difference is the content being empty string.\n\n let (old_content, new_article) = sqlx::query_scalar!(\n\n \"SELECT content FROM revision r\n", "file_path": "src/articles.rs", "rank": 15, "score": 44118.508321416666 }, { "content": "#[test]\n\n#[serial]\n\nfn redirects() {\n\n let client = client();\n\n let assert_redirect = |uri: &str, location| {\n\n let response = client.get(dbg!(uri)).dispatch();\n\n assert_eq!(\n\n response.status(),\n\n Status::SeeOther,\n\n \"body: {:?}\",\n\n response.into_string()\n\n );\n\n assert_eq!(response.headers().get_one(\"Location\"), Some(location));\n\n };\n\n let assert_no_redirect = |uri: &str| {\n\n let response = client.get(uri).dispatch();\n\n assert_ne!(response.status(), Status::SeeOther);\n\n };\n\n // Always redirect / to main\n\n assert_redirect(\"/\", \"/Main\");\n\n // When not logged in, don't allow any edits\n\n assert_redirect(\"/Main/edit\", \"/u/login\");\n", "file_path": "src/tests.rs", "rank": 16, "score": 40687.12142878576 }, { "content": "fn render_404(\n\n cfg: &Config,\n\n article_name: &str,\n\n user: &Option<LoggedUser>,\n\n) -> status::Custom<Template> {\n\n let context = json! {{\n\n \"site_name\": cfg.site_name,\n\n \"default_path\": cfg.default_path,\n\n \"article_name\": article_name,\n\n \"user\": user,\n\n }};\n\n status::Custom(Status::NotFound, Template::render(\"article_404\", context))\n\n}\n\n\n", "file_path": "src/articles.rs", "rank": 17, "score": 40687.12142878576 }, { "content": "#[test]\n\nfn launch() {\n\n client();\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 18, "score": 40687.12142878576 }, { "content": "#[test]\n\n#[serial]\n\nfn search() {\n\n let client = client();\n\n // Helper to reload the search index\n\n let reload = || {\n\n client\n\n .rocket()\n\n .state::<ArticleIndex>()\n\n .unwrap()\n\n .reader\n\n .reload()\n\n .unwrap();\n\n };\n\n register_and_login(&client, \"search\");\n\n // To get some value to compare to, we just note down the length of the search page\n\n let first_body_length = client\n\n .get(\"/search?q=Baguette\")\n\n .dispatch()\n\n .into_bytes()\n\n .unwrap()\n\n .len();\n", "file_path": "src/tests.rs", "rank": 19, "score": 40687.12142878576 }, { "content": "#[get(\"/search?<q>\", rank = 0)]\n\nfn search(\n\n cfg: &State<Config>,\n\n index: &State<ArticleIndex>,\n\n user: Option<LoggedUser>,\n\n q: String,\n\n) -> Result<Template> {\n\n let results = index.search_by_text(&q)?;\n\n let exact_match = results.iter().any(|r| r.title == q);\n\n let context = json! {{\n\n \"site_name\": &cfg.site_name,\n\n \"default_path\": &cfg.default_path,\n\n \"exact_match\": exact_match,\n\n \"results\": index.search_by_text(&q)?,\n\n \"page_name\": \"Search\",\n\n \"user\": user,\n\n \"query\": q,\n\n }};\n\n Ok(Template::render(\"search\", context))\n\n}\n\n\n", "file_path": "src/articles.rs", "rank": 20, "score": 40687.12142878576 }, { "content": "#[test]\n\n#[serial]\n\nfn failed_register() {\n\n let client = client();\n\n // We'll test all of the ways registering can fail, oh boy\n\n // Helper function so we can check the output\n\n // This will also assert that the status is BadRequest\n\n let get_html = |request: &RegisterRequest| {\n\n let response = post_form(&client, \"/u/register\", request);\n\n assert_eq!(\n\n response.status(),\n\n Status::BadRequest,\n\n \"request: {:?}\\nresponse: {:?}\",\n\n request,\n\n response.into_string()\n\n );\n\n let text = response.into_string().unwrap();\n\n scraper::Html::parse_document(&text)\n\n };\n\n // Helper function to check if any of the p.help.is-danger elements on the\n\n // given Html has the given text as content\n\n let assert_help_text = |html: &scraper::Html, content: &str| {\n", "file_path": "src/tests.rs", "rank": 21, "score": 39193.158190017784 }, { "content": "#[test]\n\n#[serial]\n\nfn admin_permissions_and_settings() {\n\n let client = client();\n\n let db = client.rocket().state::<Db>().unwrap();\n\n async fn load_admin(db: &Db) -> Option<String> {\n\n sqlx::query_scalar!(r#\"SELECT name FROM \"user\" WHERE is_admin = TRUE\"#)\n\n .fetch_optional(&db.pool)\n\n .await\n\n .unwrap()\n\n }\n\n let admin = match block_on(load_admin(db)) {\n\n Some(name) => name,\n\n None => {\n\n register_account(&client, \"Admin\", PASSWORD);\n\n \"Admin\".into()\n\n }\n\n };\n\n // Only the first account should be an admin\n\n register_account(&client, \"User\", PASSWORD);\n\n // Now we check if the admin flag actually gets applied\n\n // Log in as admin and change settings\n", "file_path": "src/tests.rs", "rank": 22, "score": 37867.22516314059 }, { "content": "#[test]\n\n#[serial]\n\nfn creating_and_editing_articles() {\n\n let client = client();\n\n // We need to be logged in for this\n\n register_and_login(&client, \"creating and editing\");\n\n\n\n // Let's keep a reference to the db around, it will help\n\n let db = client.rocket().state::<Db>().unwrap();\n\n\n\n // Create an actual new article\n\n let response = post_form(\n\n &client,\n\n \"/MyNewArticle/edit\",\n\n AddRevRequest {\n\n title: None,\n\n content: \"Some content blah blah blah\".into(),\n\n },\n\n );\n\n assert_eq!(response.status(), Status::Ok);\n\n // We will want its id to check for the changes\n\n let article_id = block_on(db.article_id_by_name(\"MyNewArticle\"))\n", "file_path": "src/tests.rs", "rank": 23, "score": 37867.22516314059 }, { "content": "#[test]\n\n#[serial]\n\nfn register_login_logout() {\n\n let client = client();\n\n // There should be no cookies before logging in\n\n assert_eq!(client.cookies().iter().count(), 0);\n\n // There's one cookie, the session id, when you're logged in\n\n register_and_login(&client, \"login logout\");\n\n assert_eq!(client.cookies().iter().count(), 1);\n\n assert!(client.cookies().get(\"session_id\").is_some());\n\n // After logging out, no more cookies should be present\n\n logout(&client);\n\n assert_eq!(client.cookies().iter().count(), 0);\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 24, "score": 37867.22516314059 }, { "content": "#[test]\n\n#[serial]\n\nfn basic_article_routes() {\n\n let client = client();\n\n let assert_status = |uri: &str, status: Status| {\n\n let response = client.get(uri).dispatch();\n\n assert_eq!(response.status(), status, \"{}\", uri);\n\n };\n\n let ok = Status::Ok;\n\n let notfound = Status::NotFound;\n\n // At the start, the Main page doesn't exist, but it's a special case\n\n assert_status(\"/Main\", ok);\n\n // You cannot look at its revisions though, as there are none.\n\n assert_status(\"/Main/revs\", notfound);\n\n assert_status(\"/Main/rev/1\", notfound);\n\n // Search should always succeed\n\n assert_status(\"/search?q=blah\", ok);\n\n // Same for the \"create article\" helper\n\n assert_status(\"/create\", ok);\n\n // An unknown article should return 404\n\n assert_status(\"/Blahblub\", notfound);\n\n // Same for unknown revs\n\n assert_status(\"/Main/revs/1\", notfound);\n\n // And a combination of those\n\n assert_status(\"/Blahblub/revs/1\", notfound);\n\n // Login so we can see the edit page\n\n register_and_login(&client, \"basic article routes\");\n\n assert_status(\"/Main/edit\", ok);\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 25, "score": 37867.22516314059 }, { "content": "fn post_form<'a>(\n\n client: &'a Client,\n\n uri: &'static str,\n\n data: impl serde::Serialize,\n\n) -> LocalResponse<'a> {\n\n let request_body = serde_urlencoded::to_string(data).unwrap();\n\n client\n\n .post(uri)\n\n .header(content_type_form())\n\n .body(request_body)\n\n .dispatch()\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 26, "score": 37575.95862741107 }, { "content": "fn client() -> Client {\n\n Client::tracked(rocket()).expect(\"failed to create rocket client\")\n\n}\n", "file_path": "src/tests.rs", "rank": 27, "score": 37575.95862741107 }, { "content": "#[post(\"/admin\", rank = 2)]\n\nfn admin_redirect() -> Redirect {\n\n Redirect::to(\"/settings\")\n\n}\n", "file_path": "src/settings.rs", "rank": 28, "score": 36250.025600533874 }, { "content": "#[get(\"/\", rank = 2)]\n\nfn panel_redirect() -> Redirect {\n\n Redirect::to(\"/u/login\")\n\n}\n\n\n\n#[derive(FromForm)]\n\n#[cfg_attr(test, derive(serde::Serialize))]\n\npub struct AdminSettings {\n\n pub registration_enabled: bool,\n\n}\n\n\n\n#[post(\"/admin\", data = \"<form>\")]\n\nasync fn admin_settings(\n\n db: &State<Db>,\n\n cfg: &State<Config>,\n\n form: Form<AdminSettings>,\n\n // Only admins can call this\n\n // TODO: Mark down the admin's userid somewhere\n\n admin: LoggedAdmin,\n\n) -> Result<Template> {\n\n let AdminSettings {\n", "file_path": "src/settings.rs", "rank": 29, "score": 36250.025600533874 }, { "content": "fn logout(client: &Client) {\n\n let response = client.get(\"/u/logout\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 30, "score": 34936.36093845322 }, { "content": "fn serialize_snippet<S: serde::Serializer>(\n\n snippet: &SnippetOrFirstSentence,\n\n s: S,\n\n) -> std::result::Result<S::Ok, S::Error> {\n\n match snippet {\n\n SnippetOrFirstSentence::Snippet(snippet) => s.serialize_str(&snippet.to_html()),\n\n SnippetOrFirstSentence::FirstSentence(string) => s.serialize_str(string),\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum SnippetOrFirstSentence {\n\n Snippet(Snippet),\n\n FirstSentence(String),\n\n}\n\n\n\n#[derive(Debug, serde::Serialize)]\n\npub struct SearchResult {\n\n pub title: String,\n\n #[serde(serialize_with = \"serialize_snippet\")]\n\n pub snippet: SnippetOrFirstSentence,\n\n pub last_edited: DateTime<Utc>,\n\n}\n\n\n", "file_path": "src/search.rs", "rank": 31, "score": 31598.402232990884 }, { "content": "fn markdown_to_html(input: &str) -> String {\n\n let callback = &mut |broken_link: BrokenLink| {\n\n Some((\n\n (\"/\".to_string() + broken_link.reference).into(),\n\n broken_link.reference.to_owned().into(),\n\n ))\n\n };\n\n let parser =\n\n Parser::new_with_broken_link_callback(input, Options::all(), Some(callback)).map(|ev| {\n\n match ev {\n\n pulldown_cmark::Event::SoftBreak => pulldown_cmark::Event::HardBreak,\n\n _ => ev,\n\n }\n\n });\n\n let mut output = String::new();\n\n html::push_html(&mut output, parser);\n\n output\n\n}\n\n\n\n/// Context used to render an existing article revision.\n", "file_path": "src/articles.rs", "rank": 32, "score": 31598.402232990884 }, { "content": "fn markdown_to_text(input: &str) -> String {\n\n // TODO: This is pretty unnecessary since I actually just want to strip\n\n // the square brackets from broken links. Hm.\n\n let callback = &mut |broken_link: BrokenLink| {\n\n Some((\n\n (\"/\".to_string() + broken_link.reference).into(),\n\n broken_link.reference.to_owned().into(),\n\n ))\n\n };\n\n let parser = Parser::new_with_broken_link_callback(input, Options::all(), Some(callback))\n\n .filter_map(|event| match event {\n\n Event::Text(_) => Some(event),\n\n Event::Start(Tag::Link(_, _, _)) | Event::End(Tag::Link(_, _, _)) => None,\n\n _ => Some(Event::Text(CowStr::Borrowed(\" \"))),\n\n });\n\n // The output will very likely be shorter than the input, but never longer\n\n let mut output = String::with_capacity(input.len());\n\n html::push_html(&mut output, parser);\n\n output.trim().into()\n\n}\n", "file_path": "src/search.rs", "rank": 33, "score": 31598.402232990884 }, { "content": "#[rocket::get(\"/\")]\n\nfn index(cfg: &State<Config>) -> Redirect {\n\n Redirect::to(cfg.default_path.clone())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 30685.62120462705 }, { "content": "#[get(\"/<_article_name>/edit\", rank = 2)]\n\nfn redirect_to_login_get(_article_name: String) -> Redirect {\n\n Redirect::to(\"/u/login\")\n\n}\n", "file_path": "src/articles.rs", "rank": 35, "score": 29761.827443307084 }, { "content": "#[post(\"/<_article_name>/edit\", rank = 2)]\n\nfn redirect_to_login_post(_article_name: String) -> Redirect {\n\n Redirect::to(\"/u/login\")\n\n}\n\n\n\n#[get(\"/<article_name>/revs\")]\n\nasync fn revs(\n\n db: &State<Db>,\n\n cfg: &State<Config>,\n\n article_name: String,\n\n user: Option<LoggedUser>,\n\n) -> Result<status::Custom<Template>> {\n\n let revisions = db::articles::list_revisions(db, &article_name).await?;\n\n if revisions.is_empty() {\n\n return Ok(render_404(&*cfg, &article_name, &user));\n\n }\n\n let context = json! {{\n\n \"site_name\": &cfg.site_name,\n\n \"default_path\": &cfg.default_path,\n\n \"article_name\": article_name,\n\n \"user\": user,\n", "file_path": "src/articles.rs", "rank": 36, "score": 29761.827443307084 }, { "content": "/// Login with a default username and password.\n\n/// Useful if you don't care about the user and just need a session.\n\nfn register_and_login(client: &Client, username: &str) {\n\n // Register a default account\n\n register_account(client, username, PASSWORD);\n\n // Then we log in, which should give us the appropriate cookies\n\n login(client, username, PASSWORD);\n\n}\n", "file_path": "src/tests.rs", "rank": 37, "score": 29722.632534122506 }, { "content": "#[get(\"/login\", rank = 2)]\n\nfn login_page(cfg: &State<Config>) -> Template {\n\n let context = json! {{\n\n \"site_name\": &cfg.site_name,\n\n \"default_path\": &cfg.default_path,\n\n \"page_name\": \"Login\",\n\n }};\n\n Template::render(\"login\", context)\n\n}\n\n#[derive(Debug, FromForm)]\n\n#[cfg_attr(test, derive(serde::Serialize))]\n\npub(crate) struct LoginRequest {\n\n pub(crate) username: String,\n\n pub(crate) password: String,\n\n}\n\n#[post(\"/login\", data = \"<form>\")]\n\nasync fn login_form(\n\n cfg: &State<Config>,\n\n db: &State<Db>,\n\n form: Form<LoginRequest>,\n\n cookies: &CookieJar<'_>,\n", "file_path": "src/users.rs", "rank": 38, "score": 29719.651359676136 }, { "content": "use std::array::TryFromSliceError;\n\n\n\nuse rocket::{\n\n http::Status,\n\n outcome::Outcome,\n\n response::{self, Responder},\n\n Request,\n\n};\n\nuse rocket_dyn_templates::{tera, Template};\n\nuse tantivy::{query::QueryParserError, TantivyError};\n\nuse uuid::Uuid;\n\n\n\nuse crate::db::articles::RevId;\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub enum Error {\n\n #[error(\"Error reading config file: {0}\")]\n\n FigmentError(#[from] figment::Error),\n\n #[error(\"Error accessing database: {0}\")]\n\n SqlxError(#[from] sqlx::Error),\n", "file_path": "src/error.rs", "rank": 39, "score": 28060.49064639387 }, { "content": " use Error::*;\n\n match self {\n\n FigmentError(_)\n\n | SqlxError(_)\n\n | CaptchaPngError\n\n | DatabaseRequestGuardFailed\n\n | Argon2Error(_)\n\n | BincodeError(_)\n\n | InvalidIdData(_)\n\n | RevisionDataInconsistent(_)\n\n | ArticleDataInconsistent(_)\n\n | TemplateError(_)\n\n | TokioJoinError(_)\n\n | TantivyError(_)\n\n | QueryParserError(_) => Status::InternalServerError,\n\n UserAlreadyExists(_)\n\n | IdenticalNewRevision\n\n | DuplicateArticleName(_)\n\n | WrongPassword => Status::BadRequest,\n\n UserNotFound(_) | RevisionUnknown(_, _) | CaptchaNotFound => Status::NotFound,\n\n }\n\n }\n\n}\n\n\n\n// Ouch: I can't implement IntoOutcome for crate::Result<S>.\n\n// I also can't just impl crate::Result<S> and add such a method.\n\n// So I'll have to use a helper trait...\n", "file_path": "src/error.rs", "rank": 40, "score": 28052.078028968474 }, { "content": " #[error(\"Database returned inconsistent data: article id {0:?} not found\")]\n\n ArticleDataInconsistent(Uuid),\n\n #[error(\"Error rendering template: {0}\")]\n\n TemplateError(#[from] tera::Error),\n\n #[error(\"Captcha error; please retry!\")]\n\n CaptchaNotFound,\n\n #[error(\"An unexpected error occured when trying to generate a captcha\")]\n\n CaptchaPngError,\n\n #[error(\"Error trying to join a blocking task: {0}\")]\n\n TokioJoinError(#[from] rocket::tokio::task::JoinError),\n\n #[error(\"Internal rocket error: failed to get database\")]\n\n DatabaseRequestGuardFailed,\n\n #[error(\"Error updating search index: {0}\")]\n\n TantivyError(#[from] TantivyError),\n\n #[error(\"Error parsing search query: {0}\")]\n\n QueryParserError(#[from] QueryParserError),\n\n}\n\n\n\nimpl Error {\n\n pub fn status(&self) -> Status {\n", "file_path": "src/error.rs", "rank": 41, "score": 28050.868513170884 }, { "content": " #[error(\"Error hashing password: {0}\")]\n\n Argon2Error(#[from] argon2::Error),\n\n #[error(\"Data could not be (de)serialized: {0}\")]\n\n BincodeError(#[from] bincode::Error),\n\n #[error(\"Username already taken: {0}\")]\n\n UserAlreadyExists(String),\n\n #[error(\"Unknown user: {0}\")]\n\n UserNotFound(String),\n\n #[error(\"Wrong password\")]\n\n WrongPassword,\n\n #[error(\"Revision '{1:?}' on article {0:1} does not exist\")]\n\n RevisionUnknown(Uuid, i64),\n\n #[error(\"New content is identical to the previous revision\")]\n\n IdenticalNewRevision,\n\n #[error(\"Error changing article name: Article {0} already exists\")]\n\n DuplicateArticleName(String),\n\n #[error(\"Tried to read a byte slice with the wrong length\")]\n\n InvalidIdData(#[from] TryFromSliceError),\n\n #[error(\"Database is inconsistent: Revision {0:?} is missing fields\")]\n\n RevisionDataInconsistent(RevId),\n", "file_path": "src/error.rs", "rank": 42, "score": 28045.89217622967 }, { "content": "fn login(client: &Client, username: &str, password: &str) {\n\n // This is fairly straightforward compared to registering lol\n\n let response = post_form(\n\n client,\n\n \"/u/login\",\n\n LoginRequest {\n\n username: username.into(),\n\n password: password.into(),\n\n },\n\n );\n\n // If this request succeeds, we're logged in\n\n assert_eq!(\n\n response.status(),\n\n Status::Ok,\n\n \"Failed to log in: {:?}\",\n\n response.into_string()\n\n );\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 43, "score": 27388.11570744293 }, { "content": "fn register_account(client: &Client, username: &str, password: &str) {\n\n let (captcha_id, captcha_solution) = register_challenge(client);\n\n // Send off the request\n\n let response = post_form(\n\n client,\n\n \"/u/register\",\n\n RegisterRequest {\n\n username: username.into(),\n\n password: password.into(),\n\n pwd_confirm: password.into(),\n\n captcha_id,\n\n captcha_solution,\n\n },\n\n );\n\n // If it succeeds, we're registered\n\n assert_eq!(\n\n response.status(),\n\n Status::Ok,\n\n \"Failed to register: {:?}\",\n\n response.into_string()\n\n );\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 44, "score": 26590.75064819189 }, { "content": "#[get(\"/login\")]\n\nfn login_redirect(cfg: &State<Config>, _session: &UserSession) -> Redirect {\n\n Redirect::to(cfg.default_path.clone())\n\n}\n", "file_path": "src/users.rs", "rank": 45, "score": 25860.44154852943 }, { "content": "#[get(\"/create\", rank = 0)]\n\nfn create(cfg: &State<Config>, user: Option<LoggedUser>) -> Template {\n\n let context = json! {{\n\n \"site_name\": &cfg.site_name,\n\n \"default_path\": &cfg.default_path,\n\n \"page_name\": \"New Article\",\n\n \"user\": user,\n\n }};\n\n Template::render(\"article_create\", context)\n\n}\n\n\n\n#[get(\"/<article_name>\", rank = 3)]\n\nasync fn get(\n\n db: &State<Db>,\n\n cfg: &State<Config>,\n\n article_name: String,\n\n user: Option<LoggedUser>,\n\n) -> Result<status::Custom<Template>> {\n\n if let Some(rev) = db.get_current_rev(&article_name).await? {\n\n let DisplayRevision {\n\n rev_id,\n", "file_path": "src/articles.rs", "rank": 46, "score": 25271.36288692477 }, { "content": " fn deref(&self) -> &Self::Target {\n\n &self.pool\n\n }\n\n}\n\n\n\n/// Settings keys\n\nmod flags {\n\n pub const REGISTRATION_ENABLED: &str = \"global:registration_enabled\";\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct EnabledRegistration;\n\n#[rocket::async_trait]\n\nimpl<'r> FromRequest<'r> for EnabledRegistration {\n\n type Error = crate::Error;\n\n\n\n async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {\n\n use crate::error::IntoOutcomeHack;\n\n use rocket::outcome::IntoOutcome;\n\n let db: &Db = try_outcome!(request.rocket().state().or_forward(()));\n", "file_path": "src/db/mod.rs", "rank": 48, "score": 24824.377021314933 }, { "content": "use rocket::{\n\n outcome::try_outcome,\n\n request::{FromRequest, Outcome},\n\n Request,\n\n};\n\nuse sqlx::PgPool;\n\nuse uuid::Uuid;\n\n\n\nuse crate::Result;\n\n\n\npub mod articles;\n\nuse articles::{ArticleWithRevision, DisplayRevision};\n\npub mod users;\n\nuse users::UserSession;\n\n\n\npub struct Db {\n\n pub pool: PgPool,\n\n}\n\nimpl std::ops::Deref for Db {\n\n type Target = PgPool;\n", "file_path": "src/db/mod.rs", "rank": 51, "score": 24819.733678695793 }, { "content": " if try_outcome!(db.registration_enabled().await.into_outcome_hack()) {\n\n Outcome::Success(EnabledRegistration)\n\n } else {\n\n Outcome::Forward(())\n\n }\n\n }\n\n}\n\n\n\nimpl Db {\n\n pub async fn try_connect(uri: &str) -> Result<Self> {\n\n Ok(Self {\n\n pool: PgPool::connect(uri).await?,\n\n })\n\n }\n\n\n\n pub async fn user_name_exists(&self, username: &str) -> Result<bool> {\n\n users::name_exists(self, username).await\n\n }\n\n pub async fn user_id_by_name(&self, username: &str) -> Result<Option<Uuid>> {\n\n Ok(\n", "file_path": "src/db/mod.rs", "rank": 53, "score": 24813.80668063666 }, { "content": "use chrono::{DateTime, NaiveDateTime, Utc};\n\nuse serde::Serialize;\n\nuse sqlx::{PgConnection, PgPool, Postgres, Transaction};\n\nuse uuid::Uuid;\n\n\n\nuse crate::Result;\n\n\n\n/// A revision id.\n\n/// This type wraps an article id and a revision number (both u32).\n\n/// It is used to store an article's revision so it's easier to query\n\n/// e.g. the latest revision of an article.\n\n/// Values of this type can only ever be obtained from the database.\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub struct RevId(pub Uuid, pub i64);\n\n\n\n#[derive(Debug, PartialEq, Serialize)]\n\npub struct Revision {\n\n pub content: String,\n\n pub author_id: Uuid,\n\n pub date: DateTime<Utc>,\n", "file_path": "src/db/articles.rs", "rank": 54, "score": 24811.63081873825 }, { "content": " sqlx::query_scalar!(r#\"SELECT id FROM \"user\" WHERE name = $1\"#, username)\n\n .fetch_optional(&self.pool)\n\n .await?,\n\n )\n\n }\n\n pub async fn register_user(&self, username: &str, password: String) -> Result<()> {\n\n users::register(self, username, password).await?;\n\n Ok(())\n\n }\n\n pub async fn try_login(&self, username: &str, password: String) -> Result<UserSession> {\n\n users::try_login(self, username, password).await\n\n }\n\n pub async fn get_session_user(&self, session_id: Uuid) -> Result<Option<Uuid>> {\n\n users::get_session_user(self, session_id).await\n\n }\n\n pub async fn destroy_session(&self, session_id: Uuid) -> Result<()> {\n\n users::destroy_session(self, session_id).await\n\n }\n\n pub async fn user_is_admin(&self, user_id: Uuid) -> Result<bool> {\n\n users::is_admin(self, user_id).await\n", "file_path": "src/db/mod.rs", "rank": 55, "score": 24809.451676894187 }, { "content": " }\n\n\n\n pub async fn article_id_by_name(&self, article_name: &str) -> Result<Option<Uuid>> {\n\n let mut conn = self.acquire().await?;\n\n articles::id_by_name(&mut conn, article_name).await\n\n }\n\n pub async fn list_articles(&self) -> Result<Vec<ArticleWithRevision>> {\n\n articles::list_articles(self).await\n\n }\n\n pub async fn get_current_rev(&self, article_name: &str) -> Result<Option<DisplayRevision>> {\n\n articles::get_current_rev(self, article_name).await\n\n }\n\n\n\n async fn set_flag(&self, flag: &str, value: bool) -> Result<()> {\n\n sqlx::query!(\n\n \"INSERT INTO flags(name, value)\n\n VALUES($1, $2)\n\n ON CONFLICT(name) DO UPDATE SET value = $2\",\n\n flag,\n\n value\n", "file_path": "src/db/mod.rs", "rank": 57, "score": 24807.4526747071 }, { "content": "}\n\n/// Updates the name for the given article.\n\n/// This internally changes two sled trees, removing the old article name and\n\n/// adding the new one in the name_id tree, and updating it in the id_name tree.\n\npub async fn change_name(conn: &mut PgConnection, article_id: Uuid, new_name: &str) -> Result<()> {\n\n sqlx::query!(\n\n \"UPDATE article SET name = $1 WHERE id = $2\",\n\n new_name,\n\n article_id,\n\n )\n\n .execute(&mut *conn)\n\n .await?;\n\n Ok(())\n\n}\n\n/// Add a new revision. Uses the current date and time as the date.\n\n/// The core part of this type as it touches *all* of its trees.\n\npub async fn add_revision(\n\n conn: &mut PgConnection,\n\n article_id: Uuid,\n\n author_id: Uuid,\n", "file_path": "src/db/articles.rs", "rank": 59, "score": 24807.184142404156 }, { "content": " )\n\n .fetch_optional(pool)\n\n .await?)\n\n}\n\n/// Create an empty article with no revisions.\n\npub async fn create(\n\n txn: &mut Transaction<'_, Postgres>,\n\n name: &str,\n\n content: &str,\n\n author_id: Uuid,\n\n) -> Result<(RevId, RevisionMeta)> {\n\n let id = Uuid::new_v4();\n\n sqlx::query!(\n\n \"INSERT INTO article(id, name, creator_id)\n\n VALUES($1, $2, $3)\",\n\n id,\n\n name,\n\n author_id,\n\n )\n\n .execute(&mut *txn)\n", "file_path": "src/db/articles.rs", "rank": 62, "score": 24805.68343978601 }, { "content": " )\n\n .execute(&self.pool)\n\n .await?;\n\n Ok(())\n\n }\n\n pub async fn registration_enabled(&self) -> Result<bool> {\n\n Ok(sqlx::query_scalar!(\n\n \"SELECT value FROM flags WHERE name = $1\",\n\n flags::REGISTRATION_ENABLED\n\n )\n\n .fetch_optional(&**self)\n\n .await?\n\n .unwrap_or(true))\n\n }\n\n pub async fn set_registration_enabled(&self, value: bool) -> Result<()> {\n\n self.set_flag(flags::REGISTRATION_ENABLED, value).await\n\n }\n\n}\n\n\n\n// #[cfg(test)]\n", "file_path": "src/db/mod.rs", "rank": 63, "score": 24805.607890061696 }, { "content": "}\n\n\n\n/// Get the id for the given article name if it exists.\n\npub async fn id_by_name(conn: &mut PgConnection, name: &str) -> Result<Option<Uuid>> {\n\n Ok(\n\n sqlx::query_scalar!(\"SELECT id FROM article WHERE name = $1\", name)\n\n .fetch_optional(&mut *conn)\n\n .await?,\n\n )\n\n}\n\n/// Lists the articles from the database, returning the article name, id and\n\n/// the latest revision.\n\npub async fn list_articles(pool: &PgPool) -> Result<Vec<ArticleWithRevision>> {\n\n Ok(sqlx::query_as!(\n\n ArticleWithRevision,\n\n r#\"SELECT a.id AS \"id!\", a.name AS \"name!\", r.content AS \"content!\",\n\n r.created AS \"rev_created!\"\n\n FROM article a\n\n INNER JOIN revision r ON (a.id = r.article_id)\n\n WHERE r.num = (SELECT MAX(num) FROM revision WHERE article_id = a.id)\"#\n", "file_path": "src/db/articles.rs", "rank": 65, "score": 24805.126370502 }, { "content": "// mod tests {\n\n// use articles::{Revision, RevisionMeta};\n\n\n\n// use super::*;\n\n\n\n// fn db() -> Db {\n\n// let sled_db = sled::Config::default()\n\n// .temporary(true)\n\n// .open()\n\n// .expect(\"Failed to create sled db\");\n\n// Db::load_or_create(sled_db)\n\n// .and_then(crate::seed_db)\n\n// .expect(\"Failed to open database\")\n\n// }\n\n\n\n// #[test]\n\n// fn create_database() {\n\n// db();\n\n// }\n\n\n", "file_path": "src/db/mod.rs", "rank": 66, "score": 24804.17855552772 }, { "content": "}\n\n\n\npub struct DisplayRevision {\n\n pub rev_id: i64,\n\n pub author_name: String,\n\n pub content: String,\n\n pub created: NaiveDateTime,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize)]\n\npub struct RevisionMeta {\n\n pub author_id: Uuid,\n\n pub date: DateTime<Utc>,\n\n}\n\n\n\npub struct ArticleWithRevision {\n\n pub id: Uuid,\n\n pub name: String,\n\n pub content: String,\n\n pub rev_created: NaiveDateTime,\n", "file_path": "src/db/articles.rs", "rank": 67, "score": 24804.10928688044 }, { "content": " )\n\n .fetch_all(pool)\n\n .await?)\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct ListRevision {\n\n pub num: i64,\n\n pub author_name: String,\n\n pub date: NaiveDateTime,\n\n}\n\n/// Retrieves the list of revision ids for the given article id.\n\n/// Returns Ok(empty Vec) when the article doesn't exist.\n\n/// Returns RevisionMeta because loading the revision's content doesn't\n\n/// make sense for listing the revisions.\n\npub async fn list_revisions(pool: &PgPool, article_name: &str) -> Result<Vec<ListRevision>> {\n\n Ok(sqlx::query_as!(\n\n ListRevision,\n\n r#\"SELECT r.num, u.name AS author_name, r.created AS date\n\n FROM revision r\n", "file_path": "src/db/articles.rs", "rank": 68, "score": 24803.7111234593 }, { "content": "// }\n\n\n\n// #[test]\n\n// fn settings() {\n\n// let db = db();\n\n// assert!(db.registration_enabled().unwrap());\n\n// db.set_registration_enabled(false).unwrap();\n\n// assert!(!db.registration_enabled().unwrap());\n\n// }\n\n\n\n// #[test]\n\n// fn create_article_and_revision() -> crate::Result<()> {\n\n// let db = db();\n\n// let article_name = \"MainPage\";\n\n// let author_id = db.users.register(\"username\", \"password\")?;\n\n// let content = r#\"\n\n// This is a **fun** Article with some minimal *Markdown* in it.\n\n// [Link](Link)\"#;\n\n\n\n// // Create our article\n", "file_path": "src/db/mod.rs", "rank": 70, "score": 24803.05023939554 }, { "content": "// let article_id = db\n\n// .articles\n\n// .create(\"article\")\n\n// .expect(\"failed to create article\");\n\n// let (rev_id, _rev) = db\n\n// .articles\n\n// .add_revision(article_id, author_id, \"blah blah blah\")\n\n// .expect(\"failed to create revision\");\n\n// // Verify a valid article id + rev number\n\n// assert_eq!(\n\n// db.articles.verified_rev_id(rev_id.0, rev_id.1).unwrap(),\n\n// rev_id\n\n// );\n\n// // Verify an invalid rev number returns the appropriate error\n\n// assert!(matches!(\n\n// db.articles.verified_rev_id(article_id, rev_id.1.next()),\n\n// Err(crate::Error::RevisionUnknown(_, _))\n\n// ));\n\n// }\n\n// }\n", "file_path": "src/db/mod.rs", "rank": 71, "score": 24802.73494765292 }, { "content": " article_name,\n\n )\n\n .fetch_optional(pool)\n\n .await?)\n\n}\n\n/// Get all data for the given verified revision id\n\npub async fn get_revision(\n\n pool: &PgPool,\n\n article_name: &str,\n\n num: i64,\n\n) -> Result<Option<DisplayRevision>> {\n\n Ok(sqlx::query_as!(\n\n DisplayRevision,\n\n r#\"SELECT r.num AS rev_id, r.content, u.name AS author_name, r.created\n\n FROM revision r\n\n INNER JOIN \"user\" u ON u.id = r.author_id\n\n WHERE r.article_id = (SELECT id FROM article WHERE name = $1)\n\n AND r.num = $2\"#,\n\n article_name,\n\n num,\n", "file_path": "src/db/articles.rs", "rank": 72, "score": 24802.438312176473 }, { "content": "// // Verify that the latest revision is correct\n\n// assert_eq!(\n\n// rev3_id,\n\n// db.articles.get_current_revision(article_id)?.unwrap().0\n\n// );\n\n\n\n// Ok(())\n\n// }\n\n\n\n// #[test]\n\n// fn query_specific_revisions() -> crate::Result<()> {\n\n// // Basic setup\n\n// let db = db();\n\n// let article_name = \"MainPage\";\n\n// let article_id = db.articles.create(article_name)?;\n\n// let user_id = db.users.register(\"user1\", \"password123\")?;\n\n\n\n// // Store some revisions\n\n// let (rev1_id, rev1_meta) = db.articles.add_revision(article_id, user_id, \"abc\")?;\n\n// let (rev2_id, _) = db.articles.add_revision(article_id, user_id, \"123\")?;\n", "file_path": "src/db/mod.rs", "rank": 73, "score": 24802.399737846878 }, { "content": "// );\n\n// // Destroy the session again\n\n// assert!(db.users.destroy_session(session.session_id).is_ok());\n\n// // Verifying a wrong password returns false\n\n// assert!(db.users.try_login(user_id, \"password123\")?.is_none());\n\n// // Verifying the wrong user returns false\n\n// // Note that it should not be possible to trigger a PasswordNotFound with\n\n// // normal code anymore, so the verification will just fail.\n\n// assert!(db.users.try_login(user2_id, password)?.is_none());\n\n// Ok(())\n\n// }\n\n\n\n// #[test]\n\n// fn first_user_is_admin() -> crate::Result<()> {\n\n// let db = db();\n\n// let user_id = db.users.register(\"username\", \"password\")?;\n\n// assert!(db.users.is_admin(user_id)?);\n\n// let user_id = db.users.register(\"user2\", \"password123\")?;\n\n// assert!(!db.users.is_admin(user_id)?);\n\n// Ok(())\n", "file_path": "src/db/mod.rs", "rank": 74, "score": 24802.338751919608 }, { "content": "// #[test]\n\n// fn register_and_login() -> crate::Result<()> {\n\n// let db = db();\n\n// let username = \"someone\";\n\n// let password = \"hunter2\";\n\n// let user_id = db.users.register(&username, &password)?;\n\n// let user2_id = db.users.register(\"username\", \"password\")?;\n\n// // Make sure the user exists now\n\n// assert!(db.users.name_exists(username)?);\n\n// assert_eq!(db.users.id_by_name(username)?, Some(user_id));\n\n// assert_eq!(db.users.name_by_id(user_id)?.as_str(), username);\n\n// // Verifying a correct password creates a session\n\n// let session = db\n\n// .users\n\n// .try_login(user_id, password)?\n\n// .expect(\"Correct user_id and password should yield a session\");\n\n// // The session's id should be enough to get back the user id\n\n// assert_eq!(\n\n// db.users.get_session_user(session.session_id)?,\n\n// Some(session.user_id)\n", "file_path": "src/db/mod.rs", "rank": 75, "score": 24802.15724786425 }, { "content": "// let user2_id = db.users.register(\"user2\", \"password123\")?;\n\n// let user3_id = db.users.register(\"user3\", \"password123\")?;\n\n\n\n// let (rev1_id, _) = db.articles.add_revision(article_id, user1_id, \"abc\")?;\n\n// let (rev2_id, _) = db.articles.add_revision(article_id, user2_id, \"123\")?;\n\n// let (rev3_id, _) = db.articles.add_revision(article_id, user3_id, \"abc123\")?;\n\n\n\n// // Retrieve the revisions from the db again\n\n// let revisions = db.articles.list_revisions(article_id)?;\n\n\n\n// // First, compare the ids to make sure they're the same\n\n// let revision_ids = revisions\n\n// .iter()\n\n// .map(|(id, _)| id)\n\n// .copied()\n\n// .collect::<Vec<_>>();\n\n// assert_eq!(revision_ids, vec![rev1_id, rev2_id, rev3_id]);\n\n\n\n// // Extract the other available information\n\n// let mut iter = revisions.into_iter();\n", "file_path": "src/db/mod.rs", "rank": 76, "score": 24802.15687090303 }, { "content": "\n\n// // Verify the new rev id is different\n\n// assert_ne!(rev_id, new_rev_id);\n\n// // Verify the new revision is different\n\n// let RevisionMeta { author_id, date } = new_rev;\n\n// let new_rev = Revision {\n\n// author_id,\n\n// date,\n\n// content: new_content.to_string(),\n\n// };\n\n// assert_ne!(rev, new_rev);\n\n// Ok(())\n\n// }\n\n\n\n// #[test]\n\n// fn add_and_list_revisions() -> crate::Result<()> {\n\n// let db = db();\n\n// let article_name = \"MainPage\";\n\n// let article_id = db.articles.create(article_name)?;\n\n// let user1_id = db.users.register(\"user1\", \"password123\")?;\n", "file_path": "src/db/mod.rs", "rank": 77, "score": 24802.025368487135 }, { "content": " INNER JOIN \"user\" u ON u.id = r.author_id\n\n WHERE article_id = (SELECT id FROM article WHERE name = $1)\n\n ORDER BY r.num ASC\"#,\n\n article_name\n\n )\n\n .fetch_all(pool)\n\n .await?)\n\n}\n\n\n\n/// Get the current revision for the given article id if it exists.\n\n/// Will return None if the article doesn't exist.\n\npub async fn get_current_rev(pool: &PgPool, article_name: &str) -> Result<Option<DisplayRevision>> {\n\n Ok(sqlx::query_as!(\n\n DisplayRevision,\n\n r#\"SELECT r.num AS rev_id, u.name AS author_name, r.content, r.created\n\n FROM article a\n\n INNER JOIN revision r ON (a.id = r.article_id)\n\n INNER JOIN \"user\" u ON (u.id = r.author_id)\n\n WHERE a.name = $1\n\n AND r.num = (SELECT MAX(num) FROM revision WHERE article_id = a.id)\"#,\n", "file_path": "src/db/articles.rs", "rank": 78, "score": 24801.56330249008 }, { "content": "// let db = db();\n\n// let article_id = db\n\n// .articles\n\n// .create(\"name1\")\n\n// .expect(\"failed to create article\");\n\n// assert!(db.articles.name_exists(\"name1\").unwrap());\n\n// db.articles\n\n// .change_name(article_id, \"name2\")\n\n// .expect(\"failed to rename article\");\n\n// assert!(!db.articles.name_exists(\"name1\").unwrap());\n\n// assert_eq!(db.articles.id_by_name(\"name2\").unwrap(), Some(article_id));\n\n// }\n\n\n\n// #[test]\n\n// fn verified_rev_id() {\n\n// let db = db();\n\n// let author_id = db\n\n// .users\n\n// .register(\"user1\", \"password123\")\n\n// .expect(\"failed to register user\");\n", "file_path": "src/db/mod.rs", "rank": 79, "score": 24801.243295302134 }, { "content": "// let article_id = db.articles.create(article_name)?;\n\n// // Verify it exists now\n\n// assert!(db.articles.name_exists(article_name)?);\n\n// assert_eq!(db.articles.id_by_name(article_name)?, Some(article_id));\n\n// assert_eq!(db.articles.name_by_id(article_id)?.as_str(), article_name);\n\n// // ...but it doesn't have any revisions yet\n\n// assert_eq!(db.articles.list_revisions(article_id)?.len(), 0);\n\n// // meaning trying to get the current content or revision doesn't return anything\n\n// assert_eq!(db.articles.get_current_content(article_id)?, None);\n\n// assert_eq!(db.articles.get_current_revision(article_id)?, None);\n\n// // After checking for all of that, we add our first revision\n\n// let (rev_id, rev) = db.articles.add_revision(article_id, author_id, content)?;\n\n// // Verify it's now also the current revision\n\n// assert_eq!(\n\n// rev_id,\n\n// db.articles.get_current_revision(article_id)?.unwrap().0\n\n// );\n\n// // Retrieve it manually, just to be sure\n\n// let rev_from_db = db.articles.get_revision(rev_id)?;\n\n// let RevisionMeta { author_id, date } = rev;\n", "file_path": "src/db/mod.rs", "rank": 81, "score": 24800.875591009764 }, { "content": "// .articles\n\n// .get_current_revision(article_id)?\n\n// .expect(\"article should have revisions\");\n\n// assert_eq!(curr_rev_id, rev3_id);\n\n// assert_eq!(curr_rev.content.as_str(), \"abc123\");\n\n// assert_eq!(curr_rev.author_id, rev3_meta.author_id);\n\n// assert_eq!(curr_rev.author_id, user_id);\n\n// assert_eq!(curr_rev.date, rev3_meta.date);\n\n// // The current content can also be queried separately.\n\n// // This is currently used on the edit page.\n\n// assert_eq!(\n\n// db.articles.get_current_content(article_id)?,\n\n// Some(curr_rev.content)\n\n// );\n\n\n\n// Ok(())\n\n// }\n\n\n\n// #[test]\n\n// fn rename_article() {\n", "file_path": "src/db/mod.rs", "rank": 82, "score": 24800.39239979669 }, { "content": "// let (rev3_id, rev3_meta) = db.articles.add_revision(article_id, user_id, \"abc123\")?;\n\n\n\n// // We now query them and then check if they match with what we know\n\n// let rev1 = db.articles.get_revision(rev1_id)?;\n\n// assert_eq!(rev1.content.as_str(), \"abc\");\n\n// assert_eq!(rev1.author_id, rev1_meta.author_id);\n\n// assert_eq!(rev1.author_id, user_id);\n\n// assert_eq!(rev1.date, rev1_meta.date);\n\n\n\n// // Maybe we don't need the whole info about the revision, possibly we\n\n// // already know the author_id; query only the missing information.\n\n// let rev2_content = db.articles.get_rev_content(rev2_id)?;\n\n// assert_eq!(rev2_content.as_str(), \"123\");\n\n// // We can't compare this to anything, but it should be there, right?\n\n// db.articles\n\n// .get_rev_date(rev2_id)\n\n// .expect(\"Date should be there\");\n\n\n\n// // We may also just not care about specific revisions, we may just want the current one.\n\n// let (curr_rev_id, curr_rev) = db\n", "file_path": "src/db/mod.rs", "rank": 83, "score": 24800.26287957882 }, { "content": "// let rev = Revision {\n\n// author_id,\n\n// date,\n\n// content: content.into(),\n\n// };\n\n// assert_eq!(rev, rev_from_db);\n\n\n\n// // Add another revision\n\n// let new_content = r#\"\n\n// This is a **fun** Article with some minimal *Markdown* in it.\n\n// Something [Link](Links) to something else. New content. Ha ha ha.\"#;\n\n// let (new_rev_id, new_rev) = db\n\n// .articles\n\n// .add_revision(article_id, author_id, new_content)?;\n\n\n\n// // Verify it's now also the current revision\n\n// assert_eq!(\n\n// new_rev_id,\n\n// db.articles.get_current_revision(article_id)?.unwrap().0\n\n// );\n", "file_path": "src/db/mod.rs", "rank": 84, "score": 24800.25973098823 }, { "content": "// let rev1 = iter.next().unwrap().1;\n\n// let rev2 = iter.next().unwrap().1;\n\n// let rev3 = iter.next().unwrap().1;\n\n// assert_eq!(iter.next(), None);\n\n\n\n// // And compare the author's names\n\n// assert_eq!(rev1.author_id, user1_id);\n\n// assert_eq!(rev2.author_id, user2_id);\n\n// assert_eq!(rev3.author_id, user3_id);\n\n\n\n// // Retrieve the contents for the verified revision ids\n\n// let content1 = db.articles.get_rev_content(rev1_id)?;\n\n// let content2 = db.articles.get_rev_content(rev2_id)?;\n\n// let content3 = db.articles.get_rev_content(rev3_id)?;\n\n\n\n// // Compare them to what we passed to add_revision\n\n// assert_eq!(content1.as_str(), \"abc\");\n\n// assert_eq!(content2.as_str(), \"123\");\n\n// assert_eq!(content3.as_str(), \"abc123\");\n\n\n", "file_path": "src/db/mod.rs", "rank": 85, "score": 24799.935379739563 }, { "content": " content: &str,\n\n) -> Result<(RevId, RevisionMeta)> {\n\n let (rev_num, date) = sqlx::query!(\n\n \"INSERT INTO revision(article_id, num, content, author_id)\n\n VALUES ($1, (SELECT MAX(num) + 1 FROM revision WHERE article_id = $1), $2, $3)\n\n RETURNING num, created\",\n\n article_id,\n\n content,\n\n author_id,\n\n )\n\n .fetch_one(&mut *conn)\n\n .await\n\n .map(|r| (r.num, DateTime::from_utc(r.created, Utc)))?;\n\n\n\n let id = RevId(article_id, rev_num);\n\n let revision = RevisionMeta { author_id, date };\n\n Ok((id, revision))\n\n}\n", "file_path": "src/db/articles.rs", "rank": 86, "score": 24799.048488706303 }, { "content": " .await?;\n\n let rev_num = 1;\n\n let date = sqlx::query_scalar!(\n\n \"INSERT INTO revision(article_id, num, content, author_id)\n\n VALUES($1, $2, $3, $4)\n\n RETURNING created\",\n\n id,\n\n rev_num,\n\n content,\n\n author_id\n\n )\n\n .fetch_one(&mut *txn)\n\n .await?;\n\n Ok((\n\n RevId(id, rev_num),\n\n RevisionMeta {\n\n author_id,\n\n date: DateTime::from_utc(date, Utc),\n\n },\n\n ))\n", "file_path": "src/db/articles.rs", "rank": 87, "score": 24797.5075385864 }, { "content": "#[derive(Responder)]\n\n#[allow(clippy::large_enum_variant)]\n\nenum TemplateResult {\n\n Template(Template),\n\n #[response(status = 400)]\n\n Error(Template),\n\n Redirect(Redirect),\n\n}\n\n\n\n#[get(\"/register\")]\n\nasync fn register_page(\n\n cfg: &State<Config>,\n\n cache: &State<Cache>,\n\n er: Option<EnabledRegistration>,\n\n session: Option<&UserSession>,\n\n) -> Result<TemplateResult> {\n\n // If er is None, registration is disabled.\n\n // If session is Some, we're already logged in.\n\n if er.is_none() || session.is_some() {\n\n return Ok(TemplateResult::Redirect(Redirect::to(\n\n cfg.default_path.clone(),\n\n )));\n", "file_path": "src/users.rs", "rank": 88, "score": 24765.70080952519 }, { "content": "use rocket::{\n\n form::Form,\n\n get,\n\n http::{Cookie, CookieJar},\n\n post,\n\n response::{Redirect, Responder},\n\n FromForm, State,\n\n};\n\nuse rocket_dyn_templates::Template;\n\nuse serde_json::json;\n\nuse uuid::Uuid;\n\n\n\nuse crate::{\n\n db::{\n\n users::{LoggedUser, UserSession},\n\n EnabledRegistration,\n\n },\n\n Cache, Config, Db, Error, Result,\n\n};\n\n\n", "file_path": "src/users.rs", "rank": 89, "score": 18.83989689128789 }, { "content": "use rocket::{\n\n http::{ContentType, Status},\n\n local::blocking::{Client, LocalResponse},\n\n};\n\nuse scraper::Selector;\n\nuse serial_test::serial;\n\nuse uuid::Uuid;\n\n\n\nuse super::rocket;\n\nuse crate::{\n\n articles::AddRevRequest,\n\n settings::AdminSettings,\n\n users::{LoginRequest, RegisterRequest},\n\n ArticleIndex, Cache, Db,\n\n};\n\n\n\nconst PASSWORD: &str = \"abc123\";\n\n\n", "file_path": "src/tests.rs", "rank": 90, "score": 16.232145293974042 }, { "content": "#![recursion_limit = \"512\"]\n\n\n\nuse rocket::{fairing::AdHoc, fs::FileServer, response::Redirect, Build, Rocket, State};\n\nuse rocket_dyn_templates::Template;\n\nuse serde::Deserialize;\n\n\n\nmod cache;\n\npub use cache::Cache;\n\nmod db;\n\npub use db::Db;\n\nmod search;\n\npub use search::ArticleIndex;\n\n\n\n#[derive(serde::Serialize, Deserialize)]\n\npub struct Config {\n\n pub site_name: String,\n\n pub main_page: String,\n\n #[serde(default)]\n\n pub default_path: String,\n\n}\n\n\n\nmod error;\n\npub use error::Error;\n", "file_path": "src/main.rs", "rank": 91, "score": 15.645129307208666 }, { "content": " pub(crate) captcha_solution: String,\n\n}\n\n\n\n#[post(\"/register\", data = \"<form>\")]\n\nasync fn register_form(\n\n cfg: &State<Config>,\n\n db: &State<Db>,\n\n cache: &State<Cache>,\n\n form: Form<RegisterRequest>,\n\n er: Option<EnabledRegistration>,\n\n session: Option<&UserSession>,\n\n) -> Result<TemplateResult> {\n\n // If er is None, registration is disabled.\n\n // If session is Some, we're already logged in.\n\n if er.is_none() || session.is_some() {\n\n return Ok(TemplateResult::Redirect(Redirect::to(\n\n cfg.default_path.clone(),\n\n )));\n\n }\n\n let RegisterRequest {\n", "file_path": "src/users.rs", "rank": 92, "score": 12.94830064466211 }, { "content": " };\n\n Ok(TemplateResult::Error(Template::render(\"login\", context)))\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n#[get(\"/logout\")]\n\nasync fn logout(\n\n cfg: &State<Config>,\n\n db: &State<Db>,\n\n cookies: &CookieJar<'_>,\n\n session: Option<&UserSession>,\n\n) -> Result<TemplateResult> {\n\n // Remove the session from the user's cookies in any case\n\n cookies.remove(Cookie::named(\"session_id\"));\n\n if let Some(session) = session {\n\n // And if it's still in the database, remove it from there as well\n\n db.destroy_session(session.session_id).await?;\n\n Ok(TemplateResult::Template(Template::render(\n", "file_path": "src/users.rs", "rank": 93, "score": 12.73650129321203 }, { "content": "use chrono::{DateTime, Utc};\n\nuse pulldown_cmark::{html, BrokenLink, Options, Parser};\n\nuse rocket::{\n\n form::Form,\n\n get,\n\n http::Status,\n\n post,\n\n response::{status, Redirect},\n\n FromForm, Route, State,\n\n};\n\nuse rocket_dyn_templates::Template;\n\nuse serde_json::json;\n\n\n\nuse crate::{\n\n db::{\n\n self,\n\n articles::{DisplayRevision, RevId},\n\n users::{LoggedUser, UserSession},\n\n Db,\n\n },\n\n ArticleIndex, Config, Error, Result,\n\n};\n\n\n", "file_path": "src/articles.rs", "rank": 94, "score": 12.1503594855771 }, { "content": "use rocket::{form::Form, get, post, response::Redirect, FromForm, State};\n\nuse rocket_dyn_templates::Template;\n\nuse serde_json::json;\n\n\n\nuse crate::{\n\n db::users::{LoggedAdmin, LoggedUser},\n\n Config, Db, Result,\n\n};\n\n\n", "file_path": "src/settings.rs", "rank": 95, "score": 11.98683549950578 }, { "content": " }))\n\n .attach(AdHoc::try_on_ignite(\"Connect to db\", |rocket| async {\n\n #[derive(Deserialize)]\n\n struct DbConfig {\n\n database_url: String,\n\n }\n\n let config: DbConfig = match rocket.figment().extract() {\n\n Ok(c) => c,\n\n Err(e) => {\n\n log::error!(\"Failed to read database url: {}\", e);\n\n return Err(rocket);\n\n }\n\n };\n\n let db = match Db::try_connect(&config.database_url).await {\n\n Ok(db) => db,\n\n Err(e) => {\n\n log::error!(\"Failed to connect to database: {}\", e);\n\n return Err(rocket);\n\n }\n\n };\n", "file_path": "src/main.rs", "rank": 96, "score": 11.441201616485037 }, { "content": "#[derive(FromForm)]\n\n#[cfg_attr(test, derive(serde::Serialize))]\n\npub struct AddRevRequest {\n\n pub title: Option<String>,\n\n pub content: String,\n\n}\n\n#[post(\"/<article_name>/edit\", data = \"<form>\")]\n\nasync fn edit_form(\n\n db: &State<Db>,\n\n cfg: &State<Config>,\n\n search_index: &State<ArticleIndex>,\n\n article_name: String,\n\n form: Form<AddRevRequest>,\n\n session: &UserSession,\n\n user: LoggedUser,\n\n) -> Result<status::Custom<Template>> {\n\n // Get the article's id if it already exists.\n\n let article_id = db.article_id_by_name(&article_name).await?;\n\n\n\n let AddRevRequest {\n", "file_path": "src/articles.rs", "rank": 97, "score": 10.978851499249874 }, { "content": "use chrono::{DateTime, Utc};\n\nuse parking_lot::Mutex;\n\nuse pulldown_cmark::{html, BrokenLink, CowStr, Event, Options, Parser, Tag};\n\nuse tantivy::{\n\n collector::TopDocs,\n\n doc,\n\n query::QueryParser,\n\n schema::{Field, Schema, STORED, STRING, TEXT},\n\n IndexReader, IndexWriter, Snippet, SnippetGenerator, Term,\n\n};\n\nuse uuid::Uuid;\n\n\n\nuse crate::{db::articles::ArticleWithRevision, Result};\n\n\n\npub struct ArticleIndex {\n\n id_field: Field,\n\n name_field: Field,\n\n content_field: Field,\n\n date_field: Field,\n\n inner: tantivy::Index,\n\n pub(crate) reader: IndexReader,\n\n writer: Mutex<IndexWriter>,\n\n}\n\n\n", "file_path": "src/search.rs", "rank": 98, "score": 10.372151282899889 }, { "content": " Ok(rocket.manage(db))\n\n }))\n\n .attach(AdHoc::try_on_ignite(\n\n \"Create search index\",\n\n |rocket| async {\n\n // I think I can unwrap this because this fairing will only run if the first one succeeds.\n\n let db = rocket.state::<Db>().unwrap();\n\n let index = match ArticleIndex::new(db).await {\n\n Ok(index) => index,\n\n Err(e) => {\n\n log::error!(\"Failed to create article index: {}\", e);\n\n return Err(rocket);\n\n }\n\n };\n\n Ok(rocket.manage(index))\n\n },\n\n ))\n\n .attach(Template::fairing())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 99, "score": 10.281570910740852 } ]
Rust
src/kernel/memory.rs
IamTheCarl/kernel_emulator
790be670460830f0f62e02b3b214d2c566264473
use crate::kernel::{bytes::Bytes, Pointer, Value, ValueSize}; use segmap::SegmentMap; use std::{ cell::{Ref, RefCell, RefMut}, ops::{Deref, DerefMut, Range}, }; use thiserror::Error; #[derive(Error, Debug)] pub enum Error { #[error("Loaded overlapping memory.")] MemoryOverlapps { sections: Vec<(String, Range<Pointer>)>, }, #[error("Wrong memory type: {address:08x} Wanted read:{read_wanted} Wanted write:{write_wanted} Wanted execute:{execute_wanted}")] WrongMemoryType { address: Pointer, read_wanted: bool, write_wanted: bool, execute_wanted: bool, }, #[error("Attempt to access unmapped memory: 0x{0:016x}.")] UnmappedAddress(Pointer), #[error("Memory access operation is split between blocks.")] SectionAliacing, } pub type Result<T> = std::result::Result<T, Error>; pub struct ProcessMemory { segments: SegmentMap<Pointer, RefCell<MemoryBlock>>, } impl ProcessMemory { pub fn new() -> Self { Self { segments: SegmentMap::new(), } } pub fn segments(&self) -> impl Iterator<Item = (Range<Pointer>, bool, bool, bool)> + '_ { self.segments.iter().map(|(s, r)| { let block = r.borrow(); let is_read = block.is_read(); let is_write = block.is_write(); let is_executable = block.is_executable(); ( Range { start: *s.start_value().expect("Infinite memory region."), end: *s.end_value().expect("Infinite memory region."), }, is_read, is_write, is_executable, ) }) } pub fn get_memory_block(&self, address: &Pointer) -> Result<Ref<MemoryBlock>> { self.segments .get(address) .map_or(Err(Error::UnmappedAddress(*address)), |cell| { Ok(cell.borrow()) }) } pub fn get_memory_block_mut(&self, address: &Pointer) -> Result<RefMut<MemoryBlock>> { self.segments .get(address) .map_or(Err(Error::UnmappedAddress(*address)), |cell| { Ok(cell.borrow_mut()) }) } pub fn read_random_bytes(&self, address: Pointer, target: &mut [u8]) -> Result<()> { let length = target.len() as Pointer; let range = address..address + length; let block = self.get_memory_block(&range.start)?; if block.is_read() { let data = block.get_range(range)?; target.copy_from_slice(data); Ok(()) } else { Err(Error::WrongMemoryType { address: range.start, read_wanted: true, write_wanted: false, execute_wanted: false, }) } } pub fn read_random(&self, address: Pointer, size: ValueSize) -> Result<Value> { let mut bytes = [0u8; 8]; self.read_random_bytes(address, &mut bytes[..size.len()])?; let value = Value::from_bytes(&bytes[..size.len()]); Ok(value) } pub fn write_random_bytes(&self, address: Pointer, data: &[u8]) -> Result<()> { let mut block = self.get_memory_block_mut(&address)?; let length = data.len(); if block.is_write() { let range = address..address + length as Pointer; let block_data = block.get_range_mut(range)?; block_data.copy_from_slice(data); Ok(()) } else { Err(Error::WrongMemoryType { address, read_wanted: false, write_wanted: true, execute_wanted: false, }) } } pub fn write_random(&self, address: Pointer, value: Value) -> Result<()> { self.write_random_bytes(address, &value.to_bytes()) } pub fn new_block(&mut self, memory_block: MemoryBlock) -> Result<()> { match self .segments .insert_if_empty(memory_block.range(), RefCell::new(memory_block)) { None => Ok(()), Some(memory_block) => { let sections: Vec<(String, Range<Pointer>)> = self .segments .iter_in(memory_block.borrow().range()) .map(|(_segment, overlapping)| { let block = overlapping.borrow(); (block.name.clone(), block.range()) }) .collect(); let range = memory_block.borrow().range(); println!( "OVERLAPPING RANGE {} {:08x}-{:08x}:", memory_block.borrow().name, range.start, range.end ); for (name, range) in sections.iter() { println!("\t{}: {:08x}-{:08x}", name, range.start, range.end); } Err(Error::MemoryOverlapps { sections }) } } } pub fn new_blank_block(&mut self, block: BlankMemoryBlock) -> Result<()> { let range = block.range; let gaps: Vec<Range<Pointer>> = self .segments .iter_gaps() .map(|gap| Range { start: **gap.start_value().expect("Infinite memory secton."), end: **gap.end_value().expect("Infinite memory secton."), }) .filter(|gap| gap.end >= range.start && gap.start <= range.end) .map(|gap| Range { start: std::cmp::max(gap.start, range.start), end: std::cmp::min(gap.end, range.end), }) .collect(); println!("ADD BLANK SECTION {}: {:016x?}", block.name, gaps); let mut max = range.start; let mut index = 0; for gap in gaps { max = std::cmp::max(max, gap.end); let length = gap.end - gap.start; if length > 0 { let data = Bytes::Original(vec![0u8; length as usize]); let block = MemoryBlock::new( format!("{}-{}", block.name, index), gap.start, data, block.read, block.write, block.execute, ); index += 1; println!( "BLANK BLOCK {}: {:08x}-{:08x}", block.name, gap.start, gap.end ); self.new_block(block)?; } } let end_block_length = range.end + 1 - max; if end_block_length > 0 { let data = Bytes::Original(vec![0u8; end_block_length as usize]); let block = MemoryBlock::new( format!("{}-{}", block.name, index), max, data, block.read, block.write, block.execute, ); println!("BLANK BLOCK {}: {:08x}-{:08x}", block.name, max, range.end); self.new_block(block).ok(); } for i in range { debug_assert!( self.segments.get(&i).is_some(), "Missing address: {:016x}", i ); } Ok(()) } pub fn replace(&mut self, source: ProcessMemory) { self.segments = source.segments; } } #[derive(Clone, Debug)] pub struct MemoryBlock { name: String, read: bool, write: bool, execute: bool, base_address: Pointer, data: Bytes, } impl MemoryBlock { pub fn new( name: impl Into<String>, base_address: Pointer, data: Bytes, read: bool, write: bool, execute: bool, ) -> Self { let name = name.into(); Self { name, read, write, execute, base_address, data, } } } impl MemoryBlock { pub fn is_executable(&self) -> bool { self.execute } pub fn is_read(&self) -> bool { self.read } pub fn is_write(&self) -> bool { self.write } pub fn range(&self) -> std::ops::Range<Pointer> { self.base_address..(self.base_address + self.data.len() as Pointer - 1) } pub fn get_range(&self, range: Range<Pointer>) -> Result<&[u8]> { let start = range.start - self.base_address; let end = range.end - self.base_address; self.data .get(start as usize..end as usize) .map_or(Err(Error::SectionAliacing), Ok) } pub fn get_range_mut(&mut self, range: Range<Pointer>) -> Result<&mut [u8]> { let start = range.start - self.base_address; let end = range.end - self.base_address; self.data .get_mut(start as usize..end as usize) .map_or(Err(Error::SectionAliacing), Ok) } } impl Deref for MemoryBlock { type Target = [u8]; fn deref(&self) -> &Self::Target { self.data.deref() } } impl DerefMut for MemoryBlock { fn deref_mut(&mut self) -> &mut Self::Target { self.data.deref_mut() } } impl std::cmp::PartialEq for MemoryBlock { fn eq(&self, _other: &Self) -> bool { false } } impl std::cmp::Eq for MemoryBlock {} #[derive(Clone)] pub struct BlankMemoryBlock { name: String, range: Range<Pointer>, read: bool, write: bool, execute: bool, } impl BlankMemoryBlock { pub fn new( name: impl Into<String>, base_address: Pointer, length: Pointer, read: bool, write: bool, execute: bool, ) -> Self { let name = name.into(); Self { name, range: Range { start: base_address, end: base_address + length, }, read, write, execute, } } } #[test] fn overlapping_memory() { #[cfg(not(tarpaulin_include))] fn assert_overlap_failed(result: Result<()>) -> std::result::Result<(), &'static str> { match result { Err(error) => match error { Error::MemoryOverlapps { .. } => { Ok(()) } _ => Err("Overlapping produced wrong error type."), }, Ok(_) => Err("Overlapping did not fail."), } } let mut kernel = ProcessMemory::new(); kernel .new_block(MemoryBlock::new( "", 0, Bytes::from_static(&[0u8; 512]), false, false, false, )) .unwrap(); kernel .new_block(MemoryBlock::new( "", 512, Bytes::from_static(&[0u8; 512]), false, false, false, )) .unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 512, Bytes::from_static(&[0u8; 512]), false, false, false, )); assert_overlap_failed(result).unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 256, Bytes::from_static(&[0u8; 512]), false, false, false, )); assert_overlap_failed(result).unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 1, Bytes::from_static(&[0u8; 1]), false, false, false, )); assert_overlap_failed(result).unwrap(); }
use crate::kernel::{bytes::Bytes, Pointer, Value, ValueSize}; use segmap::SegmentMap; use std::{ cell::{Ref, RefCell, RefMut}, ops::{Deref, DerefMut, Range}, }; use thiserror::Error; #[derive(Error, Debug)] pub enum Error { #[error("Loaded overlapping memory.")] MemoryOverlapps { sections: Vec<(String, Range<Pointer>)>, }, #[error("Wrong memory type: {address:08x} Wanted read:{read_wanted} Wanted write:{write_wanted} Wanted execute:{execute_wanted}")] WrongMemoryType { address: Pointer, read_wanted: bool, write_wanted: bool, execute_wanted: bool, }, #[error("Attempt to access unmapped memory: 0x{0:016x}.")] UnmappedAddress(Pointer), #[error("Memory access operation is split between blocks.")] SectionAliacing, } pub type Result<T> = std::result::Result<T, Error>; pub struct ProcessMemory { segments: SegmentMap<Pointer, RefCell<MemoryBlock>>, } impl ProcessMemory { pub fn new() -> Self { Self { segments: SegmentMap::new(), } } pub fn segments(&self) -> impl Iterator<Item = (Range<Pointer>, bool, bool, bool)> + '_ { self.segments.iter().map(|(s, r)| { let block = r.borrow(); let is_read = block.is_read(); let is_write = block.is_write(); let is_executable = block.is_executable(); ( Range { start: *s.start_value().expect("Infinite memory region."), end: *s.end_value().expect("Infinite memory region."), }, is_read, is_write, is_executable, ) }) } pub fn get_memory_block(&self, address: &Pointer) -> Result<Ref<MemoryBlock>> { self.segments .get(address) .map_or(Err(Error::UnmappedAddress(*address)), |cell| { Ok(cell.borrow()) }) } pub fn get_memory_block_mut(&self, address: &Pointer) -> Result<RefMut<MemoryBlock>> { self.segments .get(address) .map_or(Err(Error::UnmappedAddress(*address)), |cell| { Ok(cell.borrow_mut()) }) } pub fn read_random_bytes(&self, address: Pointer, target: &mut [u8]) -> Result<()> { let length = target.len() as Pointer; let range = address..address + length; let block = self.get_memory_block(&range.start)?; if block.is_read() { let data = block.get_range(range)?; target.copy_from_slice(data); Ok(()) } else { Err(Error::WrongMemoryType { address: range.start, read_wanted: true, write_wanted: false, execute_wanted: false, }) } } pub fn read_random(&self, address: Pointer, size: ValueSize) -> Result<Value> { let mut bytes = [0u8; 8]; self.read_random_bytes(address, &mut bytes[..size.len()])?; let
base_address, end: base_address + length, }, read, write, execute, } } } #[test] fn overlapping_memory() { #[cfg(not(tarpaulin_include))] fn assert_overlap_failed(result: Result<()>) -> std::result::Result<(), &'static str> { match result { Err(error) => match error { Error::MemoryOverlapps { .. } => { Ok(()) } _ => Err("Overlapping produced wrong error type."), }, Ok(_) => Err("Overlapping did not fail."), } } let mut kernel = ProcessMemory::new(); kernel .new_block(MemoryBlock::new( "", 0, Bytes::from_static(&[0u8; 512]), false, false, false, )) .unwrap(); kernel .new_block(MemoryBlock::new( "", 512, Bytes::from_static(&[0u8; 512]), false, false, false, )) .unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 512, Bytes::from_static(&[0u8; 512]), false, false, false, )); assert_overlap_failed(result).unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 256, Bytes::from_static(&[0u8; 512]), false, false, false, )); assert_overlap_failed(result).unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 1, Bytes::from_static(&[0u8; 1]), false, false, false, )); assert_overlap_failed(result).unwrap(); }
value = Value::from_bytes(&bytes[..size.len()]); Ok(value) } pub fn write_random_bytes(&self, address: Pointer, data: &[u8]) -> Result<()> { let mut block = self.get_memory_block_mut(&address)?; let length = data.len(); if block.is_write() { let range = address..address + length as Pointer; let block_data = block.get_range_mut(range)?; block_data.copy_from_slice(data); Ok(()) } else { Err(Error::WrongMemoryType { address, read_wanted: false, write_wanted: true, execute_wanted: false, }) } } pub fn write_random(&self, address: Pointer, value: Value) -> Result<()> { self.write_random_bytes(address, &value.to_bytes()) } pub fn new_block(&mut self, memory_block: MemoryBlock) -> Result<()> { match self .segments .insert_if_empty(memory_block.range(), RefCell::new(memory_block)) { None => Ok(()), Some(memory_block) => { let sections: Vec<(String, Range<Pointer>)> = self .segments .iter_in(memory_block.borrow().range()) .map(|(_segment, overlapping)| { let block = overlapping.borrow(); (block.name.clone(), block.range()) }) .collect(); let range = memory_block.borrow().range(); println!( "OVERLAPPING RANGE {} {:08x}-{:08x}:", memory_block.borrow().name, range.start, range.end ); for (name, range) in sections.iter() { println!("\t{}: {:08x}-{:08x}", name, range.start, range.end); } Err(Error::MemoryOverlapps { sections }) } } } pub fn new_blank_block(&mut self, block: BlankMemoryBlock) -> Result<()> { let range = block.range; let gaps: Vec<Range<Pointer>> = self .segments .iter_gaps() .map(|gap| Range { start: **gap.start_value().expect("Infinite memory secton."), end: **gap.end_value().expect("Infinite memory secton."), }) .filter(|gap| gap.end >= range.start && gap.start <= range.end) .map(|gap| Range { start: std::cmp::max(gap.start, range.start), end: std::cmp::min(gap.end, range.end), }) .collect(); println!("ADD BLANK SECTION {}: {:016x?}", block.name, gaps); let mut max = range.start; let mut index = 0; for gap in gaps { max = std::cmp::max(max, gap.end); let length = gap.end - gap.start; if length > 0 { let data = Bytes::Original(vec![0u8; length as usize]); let block = MemoryBlock::new( format!("{}-{}", block.name, index), gap.start, data, block.read, block.write, block.execute, ); index += 1; println!( "BLANK BLOCK {}: {:08x}-{:08x}", block.name, gap.start, gap.end ); self.new_block(block)?; } } let end_block_length = range.end + 1 - max; if end_block_length > 0 { let data = Bytes::Original(vec![0u8; end_block_length as usize]); let block = MemoryBlock::new( format!("{}-{}", block.name, index), max, data, block.read, block.write, block.execute, ); println!("BLANK BLOCK {}: {:08x}-{:08x}", block.name, max, range.end); self.new_block(block).ok(); } for i in range { debug_assert!( self.segments.get(&i).is_some(), "Missing address: {:016x}", i ); } Ok(()) } pub fn replace(&mut self, source: ProcessMemory) { self.segments = source.segments; } } #[derive(Clone, Debug)] pub struct MemoryBlock { name: String, read: bool, write: bool, execute: bool, base_address: Pointer, data: Bytes, } impl MemoryBlock { pub fn new( name: impl Into<String>, base_address: Pointer, data: Bytes, read: bool, write: bool, execute: bool, ) -> Self { let name = name.into(); Self { name, read, write, execute, base_address, data, } } } impl MemoryBlock { pub fn is_executable(&self) -> bool { self.execute } pub fn is_read(&self) -> bool { self.read } pub fn is_write(&self) -> bool { self.write } pub fn range(&self) -> std::ops::Range<Pointer> { self.base_address..(self.base_address + self.data.len() as Pointer - 1) } pub fn get_range(&self, range: Range<Pointer>) -> Result<&[u8]> { let start = range.start - self.base_address; let end = range.end - self.base_address; self.data .get(start as usize..end as usize) .map_or(Err(Error::SectionAliacing), Ok) } pub fn get_range_mut(&mut self, range: Range<Pointer>) -> Result<&mut [u8]> { let start = range.start - self.base_address; let end = range.end - self.base_address; self.data .get_mut(start as usize..end as usize) .map_or(Err(Error::SectionAliacing), Ok) } } impl Deref for MemoryBlock { type Target = [u8]; fn deref(&self) -> &Self::Target { self.data.deref() } } impl DerefMut for MemoryBlock { fn deref_mut(&mut self) -> &mut Self::Target { self.data.deref_mut() } } impl std::cmp::PartialEq for MemoryBlock { fn eq(&self, _other: &Self) -> bool { false } } impl std::cmp::Eq for MemoryBlock {} #[derive(Clone)] pub struct BlankMemoryBlock { name: String, range: Range<Pointer>, read: bool, write: bool, execute: bool, } impl BlankMemoryBlock { pub fn new( name: impl Into<String>, base_address: Pointer, length: Pointer, read: bool, write: bool, execute: bool, ) -> Self { let name = name.into(); Self { name, range: Range { start:
random
[ { "content": "type Result<T> = std::result::Result<T, Error>;\n\n\n\npub struct SyscallRequest {\n\n process_id: ProcessId,\n\n call_code: Pointer,\n\n arguments: [Pointer; 6],\n\n}\n\n\n\npub enum SyscallResult {\n\n None,\n\n Some(Pointer),\n\n Exit,\n\n}\n\n\n\npub struct Executable {\n\n // Where execution should start.\n\n entry_point: Pointer,\n\n\n\n sections: Vec<MemoryBlock>,\n\n blank_sections: Vec<BlankMemoryBlock>,\n", "file_path": "src/kernel/mod.rs", "rank": 0, "score": 127029.05282158073 }, { "content": "pub trait IntoValue {\n\n fn into_value(self, size: ValueSize) -> Value;\n\n}\n\n\n\nimpl IntoValue for u64 {\n\n fn into_value(self, size: ValueSize) -> Value {\n\n match size {\n\n ValueSize::Byte => Value::Byte(self as u8),\n\n ValueSize::Word => Value::Word(self as u16),\n\n ValueSize::Double => Value::Double(self as u32),\n\n ValueSize::Quad => Value::Quad(self as u64),\n\n }\n\n }\n\n}\n\n\n\nimpl IntoValue for i64 {\n\n fn into_value(self, size: ValueSize) -> Value {\n\n match size {\n\n ValueSize::Byte => Value::Byte(self as u8),\n\n ValueSize::Word => Value::Word(self as u16),\n", "file_path": "src/kernel/mod.rs", "rank": 2, "score": 87929.63355799872 }, { "content": "trait GetValueSize {\n\n fn value_size(&self) -> ValueSize {\n\n self.try_value_size().expect(\"Value does not have width.\")\n\n }\n\n fn try_value_size(&self) -> Option<ValueSize>;\n\n}\n\n\n\nimpl GetValueSize for yaxpeax_x86::long_mode::RegSpec {\n\n fn try_value_size(&self) -> Option<ValueSize> {\n\n use yaxpeax_x86::long_mode::register_class;\n\n\n\n Some(match self.class() {\n\n register_class::Q => ValueSize::Quad,\n\n register_class::D => ValueSize::Double,\n\n register_class::W => ValueSize::Word,\n\n register_class::B => ValueSize::Byte,\n\n register_class::RB => unimplemented!(),\n\n register_class::CR => unimplemented!(),\n\n register_class::DR => unimplemented!(),\n\n register_class::S => unimplemented!(),\n", "file_path": "src/kernel/mod.rs", "rank": 3, "score": 73233.83331417046 }, { "content": "#[test]\n\nfn reference() {\n\n let data = b\"test\";\n\n let vec = data.to_vec();\n\n let bytes = Bytes::reference(Arc::new(vec));\n\n assert_eq!(&*bytes, data);\n\n\n\n let mut reference = Bytes::clone(&bytes);\n\n\n\n assert_eq!(*reference, *bytes);\n\n\n\n // They should be the same pointer.\n\n assert_eq!(reference.as_ptr(), bytes.as_ptr());\n\n\n\n reference.reverse();\n\n\n\n // Now they should be different.\n\n assert_ne!(reference.as_ptr(), bytes.as_ptr());\n\n}\n", "file_path": "src/kernel/bytes.rs", "rank": 4, "score": 65746.41833320334 }, { "content": "#[test]\n\nfn from_static() {\n\n let data = b\"test\";\n\n let mut bytes = Bytes::from_static(data);\n\n assert_eq!(&*bytes, data);\n\n\n\n let static_clone = Bytes::clone(&bytes);\n\n assert_eq!(&*static_clone, data);\n\n\n\n // Modifcation will result in a new vector.\n\n let test2 = b\"tset\";\n\n bytes.copy_from_slice(test2);\n\n assert_eq!(&*bytes, test2);\n\n assert_ne!(&*static_clone, test2);\n\n}\n\n\n", "file_path": "src/kernel/bytes.rs", "rank": 5, "score": 65746.41833320334 }, { "content": "#[test]\n\nfn from_vec() {\n\n let data = b\"test\";\n\n let vec = data.to_vec();\n\n let bytes = Bytes::from_vec(vec);\n\n assert_eq!(&*bytes, data);\n\n\n\n let vec_clone = Bytes::clone(&bytes);\n\n assert_eq!(&*vec_clone, data);\n\n}\n\n\n", "file_path": "src/kernel/bytes.rs", "rank": 6, "score": 65746.41833320334 }, { "content": "struct UnicornData {\n\n process_id: ProcessId,\n\n memory: ProcessMemory,\n\n syscall_request: Option<SyscallRequest>,\n\n error: Option<Error>,\n\n}\n\n\n\npub struct UnicornX86Process {\n\n unicorn: Unicorn<'static, UnicornData>,\n\n}\n\n\n\nimpl UnicornX86Process {\n\n pub fn new() -> Result<Box<Self>> {\n\n let memory = ProcessMemory::new();\n\n let mut unicorn = Unicorn::new_with_data(\n\n Arch::X86,\n\n Mode::MODE_64,\n\n UnicornData {\n\n process_id: 0,\n\n memory,\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 7, "score": 58881.158450578296 }, { "content": "fn main() {\n\n let test_programs: &[&[u8]] = &[\n\n include_bytes!(\"../testing/hello_world_asm.elf\"),\n\n include_bytes!(\"../testing/hello_world_c.elf\"),\n\n ];\n\n\n\n let test_program_names = [\n\n \"../testing/hello_world_asm.elf\",\n\n \"../testing/hello_world_c.elf\",\n\n ];\n\n\n\n let mut pid_to_name = HashMap::new();\n\n\n\n let mut system = Kernel::new().unwrap();\n\n\n\n let mut processes = HashSet::new();\n\n\n\n for (program, name) in test_programs.iter().zip(test_program_names.iter()) {\n\n let executable = Kernel::load_elf(program).unwrap();\n\n let process_x86 = X86Process::new();\n", "file_path": "src/main.rs", "rank": 8, "score": 40331.616832433276 }, { "content": "struct RegisterFile {\n\n general_purpose_registers: [Pointer; 16],\n\n rip: Pointer,\n\n sf: bool,\n\n zf: bool,\n\n pf: bool,\n\n of: bool,\n\n cf: bool,\n\n af: bool,\n\n}\n\n\n\nimpl RegisterFile {\n\n fn new() -> Self {\n\n Self {\n\n general_purpose_registers: [0; 16],\n\n rip: 0,\n\n sf: false,\n\n zf: false,\n\n pf: false,\n\n of: false,\n", "file_path": "src/kernel/process/x86.rs", "rank": 9, "score": 36815.3680827407 }, { "content": "struct InstructionIterator {\n\n instruction_index: usize,\n\n instructions: Vec<Instruction>,\n\n instruction_addresses: HashMap<Pointer, usize>,\n\n}\n\n\n\nimpl InstructionIterator {\n\n fn new() -> Self {\n\n // Start in an unset state.\n\n Self {\n\n instruction_index: 0,\n\n instructions: Vec::new(),\n\n instruction_addresses: HashMap::new(),\n\n }\n\n }\n\n\n\n fn jump(&mut self, memory: &ProcessMemory, address: Pointer) -> Result<()> {\n\n if let Some(index) = self.instruction_addresses.get(&address) {\n\n // We're in the same block, so we'll just change our index.\n\n self.instruction_index = *index;\n", "file_path": "src/kernel/process/x86.rs", "rank": 10, "score": 36815.3680827407 }, { "content": "#[allow(unused)]\n\n#[repr(usize)]\n\nenum GeneralPurposeRegister {\n\n Rax = 0,\n\n Rcx = 1,\n\n Rdx = 2,\n\n Rbx = 3,\n\n Rsp = 4,\n\n Rbp = 5,\n\n Rsi = 6,\n\n Rdi = 7,\n\n R8 = 8,\n\n R9 = 9,\n\n R10 = 10,\n\n R11 = 11,\n\n R12 = 12,\n\n R13 = 13,\n\n R14 = 14,\n\n R15 = 15,\n\n}\n\n\n", "file_path": "src/kernel/process/x86.rs", "rank": 11, "score": 36425.0133311852 }, { "content": "pub trait Process {\n\n ///\n\n /// Initalize the process.\n\n ///\n\n /// entry_point - where execution should start.\n\n /// stack_pointer - where the top of the stack lives.\n\n /// at_exit_pointer - a function pointer to be registered with atexit(BA_OS).\n\n /// memory - the memory space visable to the process.\n\n ///\n\n /// Returns an instance of the process.\n\n ///\n\n fn initalize(\n\n &mut self,\n\n process_id: ProcessId,\n\n entry_point: Pointer,\n\n stack_pointer: Pointer,\n\n at_exit_pointer: Pointer,\n\n memory: ProcessMemory,\n\n ) -> Result<()>;\n\n\n", "file_path": "src/kernel/process/mod.rs", "rank": 12, "score": 35201.176834237274 }, { "content": " Self::Static(data) => Self::Static(data),\n\n Self::Reference(data) => Self::Reference(Arc::clone(data)),\n\n Self::Original(data) => Self::Original(data.clone()),\n\n }\n\n }\n\n}\n\n\n\nimpl Deref for Bytes {\n\n type Target = [u8];\n\n\n\n fn deref(&self) -> &Self::Target {\n\n match self {\n\n Bytes::Static(data) => data,\n\n Bytes::Reference(data) => data,\n\n Bytes::Original(data) => data,\n\n }\n\n }\n\n}\n\n\n\nimpl DerefMut for Bytes {\n", "file_path": "src/kernel/bytes.rs", "rank": 13, "score": 28685.461481390998 }, { "content": " // Used in unit tests.\n\n #[allow(unused)]\n\n pub fn from_static(data: &'static [u8]) -> Self {\n\n Self::Static(data)\n\n }\n\n\n\n // Used in unit tests.\n\n #[allow(unused)]\n\n pub fn from_vec(data: Vec<u8>) -> Self {\n\n Self::Original(data)\n\n }\n\n\n\n pub fn reference(data: Arc<Vec<u8>>) -> Self {\n\n Self::Reference(data)\n\n }\n\n}\n\n\n\nimpl Clone for Bytes {\n\n fn clone(&self) -> Self {\n\n match self {\n", "file_path": "src/kernel/bytes.rs", "rank": 14, "score": 28685.322315513153 }, { "content": " fn deref_mut(&mut self) -> &mut Self::Target {\n\n match self {\n\n Bytes::Static(data) => {\n\n // Convert yourself into an original so we don't modify the static memory.\n\n *self = Self::Original(data.to_vec());\n\n self.deref_mut()\n\n }\n\n Bytes::Reference(data) => {\n\n // Convert yourself into an original so we don't modify the source memory.\n\n *self = Self::Original(Vec::clone(data));\n\n self.deref_mut()\n\n }\n\n Bytes::Original(data) => data.deref_mut(),\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/kernel/bytes.rs", "rank": 15, "score": 28682.86047231721 }, { "content": "// Copyright 2022 James Carl\n\n//\n\n// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or\n\n// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or\n\n// http://opensource.org/licenses/MIT>, at your option. This file may not be\n\n// copied, modified, or distributed except according to those terms.\n\n\n\nuse std::{\n\n ops::{Deref, DerefMut},\n\n sync::Arc,\n\n};\n\n\n\n#[derive(Debug)]\n\npub enum Bytes {\n\n Static(&'static [u8]),\n\n Reference(Arc<Vec<u8>>),\n\n Original(Vec<u8>),\n\n}\n\n\n\nimpl Bytes {\n", "file_path": "src/kernel/bytes.rs", "rank": 16, "score": 28679.91262966457 }, { "content": " data.syscall_request = None;\n\n data.error = Some(error);\n\n }\n\n })?;\n\n\n\n // Setup memory.\n\n let read_callback = move |uc: &mut Unicorn<'_, UnicornData>, address, size| {\n\n let size = ValueSize::new(size as u8);\n\n\n\n println!(\"READ: {:016x} len {}\", address, size.len());\n\n\n\n let memory = &uc.get_data().memory;\n\n match memory.read_random(address, size) {\n\n Ok(value) => value.as_pointer(),\n\n Err(error) => {\n\n // Report error.\n\n uc.get_data_mut().error = Some(error.into());\n\n\n\n // Attempt to stop the emulator.\n\n uc.emu_stop().ok();\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 38, "score": 34.016956895235545 }, { "content": " self.unicorn.reg_write(RegisterX86::RDX, at_exit_pointer)?;\n\n\n\n // Unmap old memory.\n\n let regions: Vec<(std::ops::Range<Pointer>, bool, bool, bool)> =\n\n self.memory().segments().collect();\n\n for (region, _is_read, _is_write, _is_executable) in regions {\n\n self.unicorn\n\n .mem_unmap(region.start, (region.end - region.start) as usize)?;\n\n }\n\n\n\n self.unicorn.get_data_mut().memory.replace(memory);\n\n\n\n Ok(())\n\n }\n\n\n\n fn step(\n\n &mut self,\n\n instruction_count: u64,\n\n syscall_result: Option<Pointer>,\n\n ) -> Result<(StepResult, i64)> {\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 39, "score": 34.00130539305437 }, { "content": " 0\n\n }\n\n }\n\n };\n\n let write_callback = move |uc: &mut Unicorn<'_, UnicornData>, address, size, value| {\n\n let size = ValueSize::new(size as u8);\n\n let memory = &uc.get_data().memory;\n\n\n\n let value = Value::Quad(value).dynamic_cast(ValueSize::new(size as u8));\n\n\n\n if let Err(error) = memory.write_random(address, value) {\n\n // Report error.\n\n uc.get_data_mut().error = Some(error.into());\n\n\n\n // Attempt to stop the emulator.\n\n uc.emu_stop().ok();\n\n }\n\n };\n\n\n\n unicorn.mmio_map(\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 40, "score": 33.966866326362826 }, { "content": "\n\n Ok(Arc::new(Executable {\n\n entry_point,\n\n sections,\n\n blank_sections,\n\n }))\n\n }\n\n\n\n fn build_stack(arguments: Vec<String>) -> Result<(Pointer, MemoryBlock)> {\n\n let memory_start = STACK_START - STACK_SIZE;\n\n\n\n let mut data = vec![0u8; STACK_SIZE as usize];\n\n let mut stack_pointer = STACK_SIZE;\n\n\n\n let mut push = |to_push: &[u8]| {\n\n let end = stack_pointer;\n\n stack_pointer -= to_push.len() as Pointer;\n\n data[stack_pointer as usize..end as usize].copy_from_slice(to_push);\n\n\n\n end\n", "file_path": "src/kernel/mod.rs", "rank": 41, "score": 31.17923618634906 }, { "content": " Value::Byte(v) => write!(f, \"{:02x}\", v),\n\n Value::Word(v) => write!(f, \"{:04x}\", v),\n\n Value::Double(v) => write!(f, \"{:08x}\", v),\n\n Value::Quad(v) => write!(f, \"{:016x}\", v),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub enum ValueSize {\n\n Byte,\n\n Word,\n\n Double,\n\n Quad,\n\n}\n\n\n\nimpl ValueSize {\n\n pub fn new(size: u8) -> Self {\n\n match size {\n\n 1 => ValueSize::Byte,\n", "file_path": "src/kernel/mod.rs", "rank": 42, "score": 30.71962442014051 }, { "content": " let message = memory_block\n\n .get_range(message_pointer..message_pointer + message_length)?;\n\n\n\n // FIXME: An IO error should set errno.\n\n let bytes_written = file.write(message)? as Pointer;\n\n\n\n Ok(SyscallResult::Some(bytes_written))\n\n } else {\n\n Err(Error::Memory(memory::Error::WrongMemoryType {\n\n address: message_pointer,\n\n read_wanted: true,\n\n write_wanted: false,\n\n execute_wanted: false,\n\n }))\n\n }\n\n }\n\n _ => panic!(\"Syscall `{}` is not yet supported.\", call_code),\n\n }\n\n } else {\n\n Err(Error::InvalidSyscall(call_code))\n\n }\n\n }\n\n}\n", "file_path": "src/kernel/mod.rs", "rank": 43, "score": 30.54876023658493 }, { "content": " match bytes_result {\n\n Ok(_) => {\n\n let decoder = InstDecoder::minimal();\n\n\n\n let mut block_reader = U8Reader::new(&bytes);\n\n let instruction = decoder\n\n .decode(&mut block_reader)\n\n .expect(\"Unicorn passed us an invalid instruction.\");\n\n\n\n println!(\n\n \"{:08x}: {:02x?} {:->40}\",\n\n address,\n\n &bytes[..size as usize],\n\n instruction\n\n );\n\n }\n\n Err(error) => {\n\n uc.get_data_mut().error = Some(Error::Memory(error));\n\n uc.emu_stop().ok();\n\n }\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 44, "score": 30.0128049883194 }, { "content": "\n\nimpl IntoValue for i8 {\n\n fn into_value(self, size: ValueSize) -> Value {\n\n match size {\n\n ValueSize::Byte => Value::Byte(self as u8),\n\n ValueSize::Word => Value::Word(self as i16 as u16),\n\n ValueSize::Double => Value::Double(self as i32 as u32),\n\n ValueSize::Quad => Value::Quad(self as i64 as u64),\n\n }\n\n }\n\n}\n\n\n\npub const POINTER_WIDTH: Pointer = std::mem::size_of::<Pointer>() as Pointer;\n\npub const STACK_SIZE: Pointer = 4 * 1024; // 4 Kb\n\npub const STACK_START: Pointer = 0x7FFFFFFFFFFFFFFF;\n\n\n\npub struct Kernel {\n\n processes: HashMap<ProcessId, Box<dyn Process>>,\n\n next_pid: u64,\n\n time_aliace: i64,\n", "file_path": "src/kernel/mod.rs", "rank": 45, "score": 28.296209166868586 }, { "content": " Self::Byte(_) => 1,\n\n Self::Word(_) => 2,\n\n Self::Double(_) => 4,\n\n Self::Quad(_) => 8,\n\n }\n\n }\n\n\n\n pub fn dynamic_cast(&self, new_size: ValueSize) -> Self {\n\n match new_size {\n\n ValueSize::Byte => Value::Byte(self.as_byte()),\n\n ValueSize::Word => Value::Word(self.as_word()),\n\n ValueSize::Double => Value::Double(self.as_double()),\n\n ValueSize::Quad => Value::Quad(self.as_quad()),\n\n }\n\n }\n\n\n\n pub fn dynamic_signed_cast(&self, new_size: ValueSize) -> Self {\n\n match new_size {\n\n ValueSize::Byte => Value::Byte(self.as_signed_byte() as u8),\n\n ValueSize::Word => Value::Word(self.as_signed_word() as u16),\n", "file_path": "src/kernel/mod.rs", "rank": 46, "score": 27.958645315136955 }, { "content": "}\n\n\n\npub type Pointer = u64;\n\npub type ProcessId = u64;\n\n\n\n#[derive(Clone)]\n\npub enum Value {\n\n Byte(u8),\n\n Word(u16),\n\n Double(u32),\n\n Quad(u64),\n\n}\n\n\n\nimpl Value {\n\n pub fn from_bytes(bytes: &[u8]) -> Self {\n\n match bytes.len() {\n\n 1 => Self::Byte({\n\n let mut integer_bytes = [0u8; 1];\n\n integer_bytes.copy_from_slice(bytes);\n\n\n", "file_path": "src/kernel/mod.rs", "rank": 47, "score": 27.045085992459775 }, { "content": " ValueSize::Double => Value::Double(self.as_signed_double() as u32),\n\n ValueSize::Quad => Value::Quad(self.as_signed_quad() as u64),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Value {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Value::Byte(v) => write!(f, \"{:02x}\", v),\n\n Value::Word(v) => write!(f, \"{:04x}\", v),\n\n Value::Double(v) => write!(f, \"{:08x}\", v),\n\n Value::Quad(v) => write!(f, \"{:016x}\", v),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Value {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n", "file_path": "src/kernel/mod.rs", "rank": 48, "score": 26.91130570402318 }, { "content": "}\n\n\n\nimpl Kernel {\n\n pub fn new() -> Result<Self> {\n\n Ok(Self {\n\n processes: HashMap::new(),\n\n next_pid: 0,\n\n time_aliace: 0,\n\n })\n\n }\n\n\n\n pub fn load_elf(elf_bytes: &[u8]) -> Result<Arc<Executable>> {\n\n let elf = Elf::from_bytes(elf_bytes).unwrap();\n\n\n\n let mut sections = Vec::new();\n\n let mut blank_sections = Vec::new();\n\n let entry_point = elf.entry_point();\n\n\n\n for header in elf.section_header_iter() {\n\n let flags = header.flags();\n", "file_path": "src/kernel/mod.rs", "rank": 49, "score": 26.670122866924643 }, { "content": "\n\n let value = memory.read_random(stack_pointer, value_size)?;\n\n\n\n registers.general_purpose_registers[GeneralPurposeRegister::Rsp as usize] +=\n\n value.len() as Pointer;\n\n\n\n // println!(\"POP {:08x}: {}\", stack_pointer, value);\n\n\n\n Ok(value)\n\n }\n\n\n\n #[inline]\n\n fn jump(\n\n memory: &ProcessMemory,\n\n registers: &mut RegisterFile,\n\n instruction_iterator: &mut InstructionIterator,\n\n address: Pointer,\n\n ) -> Result<()> {\n\n registers.rip = address;\n\n instruction_iterator.jump(memory, address)?;\n", "file_path": "src/kernel/process/x86.rs", "rank": 50, "score": 26.09973119804663 }, { "content": " true,\n\n true,\n\n false,\n\n ),\n\n ))\n\n }\n\n\n\n pub fn new_process(\n\n &mut self,\n\n mut process: Box<dyn Process>,\n\n executable: &Executable,\n\n arguments: Vec<String>,\n\n ) -> Result<ProcessId> {\n\n let mut memory = ProcessMemory::new();\n\n\n\n // Start by loading the executable into memory.\n\n for section in executable.sections.iter() {\n\n memory.new_block(section.clone())?;\n\n }\n\n for section in executable.blank_sections.iter() {\n", "file_path": "src/kernel/mod.rs", "rank": 51, "score": 25.828043313561064 }, { "content": " Err(Error::Memory(MemoryError::WrongMemoryType {\n\n address,\n\n read_wanted: false,\n\n write_wanted: false,\n\n execute_wanted: true,\n\n }))\n\n }\n\n }\n\n }\n\n\n\n fn next(&mut self) -> Result<&Instruction> {\n\n let old_index = self.instruction_index;\n\n self.instruction_index += 1;\n\n\n\n if let Some(instruction) = self.instructions.get(old_index) {\n\n Ok(instruction)\n\n } else {\n\n todo!(\"Automatic jump to next block.\")\n\n }\n\n }\n", "file_path": "src/kernel/process/x86.rs", "rank": 52, "score": 25.183661105041615 }, { "content": " Some(&source),\n\n instruction,\n\n )?\n\n .as_quad();\n\n let b = Self::read_operand(\n\n &self.memory,\n\n &self.register_file,\n\n &source,\n\n Some(&target),\n\n instruction,\n\n )?\n\n .as_quad();\n\n\n\n let result = (a & b).into_value(target.value_size());\n\n\n\n Self::write_target(\n\n &self.memory,\n\n &mut self.register_file,\n\n instruction.operand(0),\n\n result,\n", "file_path": "src/kernel/process/x86.rs", "rank": 53, "score": 25.1345223253871 }, { "content": " )?\n\n .as_quad();\n\n let result = (a ^ b).into_value(target.value_size());\n\n\n\n Self::write_target(\n\n &self.memory,\n\n &mut self.register_file,\n\n instruction.operand(0),\n\n result,\n\n )?;\n\n }\n\n Opcode::SUB => {\n\n let source = instruction.operand(1);\n\n let target = instruction.operand(0);\n\n\n\n let a = Self::read_operand(\n\n &self.memory,\n\n &self.register_file,\n\n &target,\n\n Some(&source),\n", "file_path": "src/kernel/process/x86.rs", "rank": 54, "score": 24.520380451036367 }, { "content": " instruction,\n\n )?\n\n .as_signed_quad();\n\n let b = Self::read_operand(\n\n &self.memory,\n\n &self.register_file,\n\n &source,\n\n Some(&target),\n\n instruction,\n\n )?\n\n .as_signed_quad();\n\n let result = a.wrapping_sub(b).into_value(target.value_size());\n\n\n\n Self::write_target(\n\n &self.memory,\n\n &mut self.register_file,\n\n instruction.operand(0),\n\n result,\n\n )?;\n\n }\n", "file_path": "src/kernel/process/x86.rs", "rank": 55, "score": 24.456977467465975 }, { "content": "// Copyright 2022 James Carl\n\n//\n\n// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or\n\n// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or\n\n// http://opensource.org/licenses/MIT>, at your option. This file may not be\n\n// copied, modified, or distributed except according to those terms.\n\n\n\nuse super::{Error, Process, Result, StepResult};\n\nuse crate::kernel::{\n\n memory::{Error as MemoryError, ProcessMemory},\n\n GetValueSize, IntoValue, Pointer, ProcessId, SyscallRequest, Value, ValueSize,\n\n};\n\nuse std::collections::HashMap;\n\nuse yaxpeax_arch::{Decoder, LengthedInstruction, Reader, U8Reader};\n\nuse yaxpeax_x86::amd64::{register_class, InstDecoder, Instruction, Opcode, Operand, RegSpec};\n\n\n\n// #[cfg(test)]\n\n// mod test;\n\n\n", "file_path": "src/kernel/process/x86.rs", "rank": 56, "score": 24.429325911059834 }, { "content": " Some(&source),\n\n instruction,\n\n )?\n\n .as_signed_quad();\n\n let b = Self::read_operand(\n\n &self.memory,\n\n &self.register_file,\n\n &source,\n\n Some(&target),\n\n instruction,\n\n )?\n\n .as_signed_quad();\n\n let result = a.wrapping_add(b).into_value(target.value_size());\n\n\n\n Self::write_target(\n\n &self.memory,\n\n &mut self.register_file,\n\n instruction.operand(0),\n\n result,\n\n )?;\n", "file_path": "src/kernel/process/x86.rs", "rank": 57, "score": 24.239524381541926 }, { "content": "\n\n // Push argv.\n\n // We're actually just reserving space here.\n\n push(&[0u8; POINTER_WIDTH as usize]); // Must end with null word.\n\n for pointer in argument_pointers {\n\n // Push arguments on there too.\n\n push(&pointer.to_le_bytes());\n\n }\n\n\n\n // Push argc.\n\n push(&argument_count.to_le_bytes());\n\n\n\n // println!(\"STACK {:02x?}\", data);\n\n\n\n Ok((\n\n memory_start + stack_pointer as Pointer,\n\n MemoryBlock::new(\n\n \"stack\",\n\n memory_start,\n\n Bytes::Original(data),\n", "file_path": "src/kernel/mod.rs", "rank": 58, "score": 24.060412687947323 }, { "content": " instruction,\n\n )?;\n\n\n\n Self::push_onto_stack(&self.memory, &mut self.register_file, value)?;\n\n }\n\n Opcode::POP => {\n\n let target = instruction.operand(0);\n\n let value = Self::pop_from_stack(\n\n &self.memory,\n\n &mut self.register_file,\n\n target.value_size(),\n\n )?;\n\n Self::write_target(&self.memory, &mut self.register_file, target, value)?;\n\n }\n\n Opcode::LEA => {\n\n let target = instruction.operand(0);\n\n let source = instruction.operand(1);\n\n\n\n let value = Self::read_operand(\n\n &self.memory,\n", "file_path": "src/kernel/process/x86.rs", "rank": 59, "score": 23.892185858753834 }, { "content": " .as_quad();\n\n let result = (a | b).into_value(target.value_size());\n\n\n\n Self::write_target(\n\n &self.memory,\n\n &mut self.register_file,\n\n instruction.operand(0),\n\n result,\n\n )?;\n\n }\n\n // Opcode::ADC => todo!(),\n\n // Opcode::SBB => todo!(),\n\n Opcode::AND => {\n\n let source = instruction.operand(1);\n\n let target = instruction.operand(0);\n\n\n\n let a = Self::read_operand(\n\n &self.memory,\n\n &self.register_file,\n\n &target,\n", "file_path": "src/kernel/process/x86.rs", "rank": 60, "score": 23.70164076969688 }, { "content": " fn from(val: i16) -> Self {\n\n Value::Word(val as u16)\n\n }\n\n}\n\n\n\nimpl From<u8> for Value {\n\n fn from(val: u8) -> Self {\n\n Value::Byte(val)\n\n }\n\n}\n\n\n\nimpl From<i8> for Value {\n\n fn from(val: i8) -> Self {\n\n Value::Byte(val as u8)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/mod.rs", "rank": 61, "score": 22.87354232168546 }, { "content": " Ok(())\n\n } else {\n\n // Looks like we're in a different block. We'll have to find it.\n\n\n\n let block = memory.get_memory_block(&address)?;\n\n\n\n if block.is_executable() {\n\n let decoder = InstDecoder::minimal();\n\n\n\n let mut block_reader = U8Reader::new(&block);\n\n\n\n let instructions = &mut self.instructions;\n\n let instruction_addresses = &mut self.instruction_addresses;\n\n\n\n // TODO maybe not toss the old ones out? Jumping back to them will be less expensive if we keep them.\n\n instructions.clear();\n\n instruction_addresses.clear();\n\n\n\n let mut current_offset = block.range().start\n\n + <U8Reader<'_> as Reader<u16, yaxpeax_arch::U32le>>::total_offset(\n", "file_path": "src/kernel/process/x86.rs", "rank": 62, "score": 22.30366166465807 }, { "content": " ValueSize::Byte => Value::Byte(self as u8),\n\n ValueSize::Word => Value::Word(self as u16),\n\n ValueSize::Double => Value::Double(self as u32),\n\n ValueSize::Quad => Value::Quad(self as i64 as u64),\n\n }\n\n }\n\n}\n\n\n\nimpl IntoValue for u16 {\n\n fn into_value(self, size: ValueSize) -> Value {\n\n match size {\n\n ValueSize::Byte => Value::Byte(self as u8),\n\n ValueSize::Word => Value::Word(self as u16),\n\n ValueSize::Double => Value::Double(self as u32),\n\n ValueSize::Quad => Value::Quad(self as u64),\n\n }\n\n }\n\n}\n\n\n\nimpl IntoValue for i16 {\n", "file_path": "src/kernel/mod.rs", "rank": 63, "score": 22.159819424087207 }, { "content": " let stack_pointer = registers.general_purpose_registers\n\n [GeneralPurposeRegister::Rsp as usize]\n\n - value.len() as Pointer;\n\n registers.general_purpose_registers[GeneralPurposeRegister::Rsp as usize] = stack_pointer;\n\n\n\n // println!(\"PUSH {:08x}: {}\", stack_pointer, value);\n\n\n\n memory.write_random(stack_pointer, value)?;\n\n\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn pop_from_stack(\n\n memory: &ProcessMemory,\n\n registers: &mut RegisterFile,\n\n value_size: ValueSize,\n\n ) -> Result<Value> {\n\n let stack_pointer =\n\n registers.general_purpose_registers[GeneralPurposeRegister::Rsp as usize];\n", "file_path": "src/kernel/process/x86.rs", "rank": 64, "score": 21.755865231439792 }, { "content": " fn into_value(self, size: ValueSize) -> Value {\n\n match size {\n\n ValueSize::Byte => Value::Byte(self as u8),\n\n ValueSize::Word => Value::Word(self as u16),\n\n ValueSize::Double => Value::Double(self as i32 as u32),\n\n ValueSize::Quad => Value::Quad(self as i64 as u64),\n\n }\n\n }\n\n}\n\n\n\nimpl IntoValue for u8 {\n\n fn into_value(self, size: ValueSize) -> Value {\n\n match size {\n\n ValueSize::Byte => Value::Byte(self as u8),\n\n ValueSize::Word => Value::Word(self as u16),\n\n ValueSize::Double => Value::Double(self as u32),\n\n ValueSize::Quad => Value::Quad(self as u64),\n\n }\n\n }\n\n}\n", "file_path": "src/kernel/mod.rs", "rank": 65, "score": 21.699779196469407 }, { "content": " 0,\n\n 0xFFFF_FFFF_FFFF_F000,\n\n Some(read_callback),\n\n Some(write_callback),\n\n )?;\n\n unicorn.mem_protect(\n\n 0,\n\n 0xFFFF_FFFF_FFFF_F000,\n\n Permission::READ | Permission::WRITE | Permission::EXEC,\n\n )?;\n\n\n\n // Setup instruction printing for debug.\n\n unicorn.add_code_hook(0, 0xFFFF_FFFF_F000, |uc, address, size| {\n\n let mut bytes = [0u8; 15];\n\n\n\n let bytes_result = uc\n\n .get_data()\n\n .memory\n\n .read_random_bytes(address, &mut bytes[..size as usize]);\n\n\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 66, "score": 21.27096927733021 }, { "content": " 4 => Some(ValueSize::Double),\n\n 8 => Some(ValueSize::Quad),\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl GetValueSize for yaxpeax_x86::long_mode::Instruction {\n\n fn try_value_size(&self) -> Option<ValueSize> {\n\n if let Some(width) = self\n\n .mem_size()\n\n .and_then(|memory_size| memory_size.bytes_size())\n\n {\n\n match width {\n\n 1 => Some(ValueSize::Byte),\n\n 2 => Some(ValueSize::Word),\n\n 4 => Some(ValueSize::Double),\n", "file_path": "src/kernel/mod.rs", "rank": 67, "score": 21.030850052170106 }, { "content": " ValueSize::Double => Value::Double(self as u32),\n\n ValueSize::Quad => Value::Quad(self as u64),\n\n }\n\n }\n\n}\n\n\n\nimpl IntoValue for u32 {\n\n fn into_value(self, size: ValueSize) -> Value {\n\n match size {\n\n ValueSize::Byte => Value::Byte(self as u8),\n\n ValueSize::Word => Value::Word(self as u16),\n\n ValueSize::Double => Value::Double(self as u32),\n\n ValueSize::Quad => Value::Quad(self as u64),\n\n }\n\n }\n\n}\n\n\n\nimpl IntoValue for i32 {\n\n fn into_value(self, size: ValueSize) -> Value {\n\n match size {\n", "file_path": "src/kernel/mod.rs", "rank": 68, "score": 20.91535729321926 }, { "content": " Opcode::CALL => {\n\n // Get the return pointer and push that onto the stack.\n\n // We need that to return later.\n\n let return_pointer = self.register_file.rip;\n\n Self::push_onto_stack(\n\n &self.memory,\n\n &mut self.register_file,\n\n Value::Quad(return_pointer),\n\n )?;\n\n\n\n // Get the new address we're jumping to.\n\n let new_address = return_pointer.wrapping_add(\n\n Self::read_operand(\n\n &self.memory,\n\n &self.register_file,\n\n &instruction.operand(0),\n\n None,\n\n instruction,\n\n )?\n\n .as_signed_quad() as u64,\n", "file_path": "src/kernel/process/x86.rs", "rank": 69, "score": 20.867030666487878 }, { "content": " register_class::RIP => self.rip = value.as_pointer(),\n\n register_class::EIP => self.rip = value.as_pointer(),\n\n register_class::RFLAGS => unimplemented!(),\n\n register_class::EFLAGS => unimplemented!(),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for RegisterFile {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"RegisterFile\")\n\n .field(\n\n \"rax\",\n\n &self.general_purpose_registers[GeneralPurposeRegister::Rax as usize],\n\n )\n\n .field(\n\n \"rcx\",\n\n &self.general_purpose_registers[GeneralPurposeRegister::Rcx as usize],\n\n )\n\n .field(\n", "file_path": "src/kernel/process/x86.rs", "rank": 70, "score": 20.840160019963992 }, { "content": " sections.push(MemoryBlock::new(\n\n String::from_utf8_lossy(header.section_name()),\n\n header.addr(),\n\n Bytes::reference(base_data),\n\n readable,\n\n writable,\n\n executable,\n\n ));\n\n } else {\n\n blank_sections.push(BlankMemoryBlock::new(\n\n String::from_utf8_lossy(header.section_name()),\n\n header.addr() as Pointer,\n\n base_data.len() as Pointer,\n\n readable,\n\n writable,\n\n executable,\n\n ))\n\n }\n\n }\n\n }\n", "file_path": "src/kernel/mod.rs", "rank": 71, "score": 20.62050704849839 }, { "content": " let exit_code = request.arguments[0];\n\n\n\n terminated_process_list.push((request.process_id, exit_code));\n\n\n\n Ok(SyscallResult::Exit)\n\n }\n\n Sysno::write => {\n\n let file_handle = request.arguments[0];\n\n let message_pointer = request.arguments[1];\n\n let message_length = request.arguments[2];\n\n\n\n let mut file: Box<dyn std::io::Write> = match file_handle {\n\n 1 => Box::new(std::io::stdout()),\n\n 2 => Box::new(std::io::stderr()),\n\n _ => panic!(\"Non standard file handals are not yet supported.\"),\n\n };\n\n\n\n let memory_block = memory.get_memory_block(&message_pointer)?;\n\n\n\n if memory_block.is_read() {\n", "file_path": "src/kernel/mod.rs", "rank": 72, "score": 20.578001926781212 }, { "content": " }\n\n })?;\n\n\n\n Ok(Box::new(Self { unicorn }))\n\n }\n\n}\n\n\n\nimpl Process for UnicornX86Process {\n\n fn initalize(\n\n &mut self,\n\n process_id: ProcessId,\n\n entry_point: Pointer,\n\n stack_pointer: Pointer,\n\n at_exit_pointer: Pointer,\n\n memory: ProcessMemory,\n\n ) -> Result<()> {\n\n self.unicorn.get_data_mut().process_id = process_id;\n\n\n\n self.unicorn.reg_write(RegisterX86::RIP, entry_point)?;\n\n self.unicorn.reg_write(RegisterX86::RSP, stack_pointer)?;\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 73, "score": 20.47953787183957 }, { "content": " &mut block_reader,\n\n ) as u64;\n\n\n\n while let Ok(instruction) = decoder.decode(&mut block_reader) {\n\n instruction_addresses.insert(current_offset, instructions.len());\n\n instructions.push(instruction);\n\n\n\n current_offset += instruction.len();\n\n }\n\n\n\n // And we need to point into this location as well.\n\n self.instruction_index = self\n\n .instruction_addresses\n\n .get(&address)\n\n .copied()\n\n .ok_or(Error::Memory(MemoryError::SectionAliacing))?;\n\n\n\n Ok(())\n\n } else {\n\n // Not executable code.\n", "file_path": "src/kernel/process/x86.rs", "rank": 74, "score": 20.257570091391152 }, { "content": "// Copyright 2022 James Carl\n\n//\n\n// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or\n\n// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or\n\n// http://opensource.org/licenses/MIT>, at your option. This file may not be\n\n// copied, modified, or distributed except according to those terms.\n\n\n\nuse elf_rs::{Elf, ElfFile, SectionHeaderFlags, SectionType};\n\nuse std::{collections::HashMap, ffi::CString, sync::Arc};\n\nuse syscalls::Sysno;\n\nuse thiserror::Error;\n\n\n\nmod bytes;\n\nuse bytes::Bytes;\n\n\n\nmod memory;\n\nuse memory::*;\n\n\n\npub mod process;\n\nuse process::{Process, StepResult};\n", "file_path": "src/kernel/mod.rs", "rank": 75, "score": 20.033706342089015 }, { "content": "\n\n#[derive(Error, Debug)]\n\npub enum Error {\n\n #[error(\"Memory Error: {0}\")]\n\n Memory(#[from] MemoryError),\n\n\n\n #[error(\"Platform specific error: {0}\")]\n\n Custom(&'static str),\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\npub enum StepResult {\n\n Continue,\n\n InvalidInstruction,\n\n Syscall(SyscallRequest),\n\n}\n\n\n", "file_path": "src/kernel/process/mod.rs", "rank": 76, "score": 20.009616227034236 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn run_instruction(&mut self) -> Result<StepResult> {\n\n let instruction = self.instruction_iterator.next()?;\n\n\n\n let instruction_length = instruction.len().to_const();\n\n\n\n // println!(\"REGISTERS: {:016x?}\", registers);\n\n\n\n // let mut instruction_bytes = [0u8; 15];\n\n\n\n // memory.read_random(\n\n // registers.rip,\n\n // &mut instruction_bytes[..instruction_length as usize],\n\n // )?;\n\n // println!(\n\n // \"{:08x} {:<30}{}\",\n", "file_path": "src/kernel/process/x86.rs", "rank": 77, "score": 19.526379986787255 }, { "content": " _ => panic!(\"Unsupported value width: {}\", bytes.len()),\n\n }\n\n }\n\n\n\n pub fn as_pointer(&self) -> Pointer {\n\n match self {\n\n Self::Byte(v) => *v as Pointer,\n\n Self::Word(v) => *v as Pointer,\n\n Self::Double(v) => *v as Pointer,\n\n Self::Quad(v) => *v as Pointer,\n\n }\n\n }\n\n\n\n pub fn as_quad(&self) -> u64 {\n\n match self {\n\n Self::Byte(v) => *v as u64,\n\n Self::Word(v) => *v as u64,\n\n Self::Double(v) => *v as u64,\n\n Self::Quad(v) => *v as u64,\n\n }\n", "file_path": "src/kernel/mod.rs", "rank": 78, "score": 19.45821323738732 }, { "content": "// Copyright 2022 James Carl\n\n//\n\n// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or\n\n// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or\n\n// http://opensource.org/licenses/MIT>, at your option. This file may not be\n\n// copied, modified, or distributed except according to those terms.\n\n\n\nuse super::{Error, Process, Result, StepResult};\n\nuse crate::kernel::{\n\n memory::ProcessMemory, process::Error as ProcessError, Pointer, ProcessId, SyscallRequest,\n\n Value, ValueSize,\n\n};\n\nuse unicorn_engine::{\n\n unicorn_const::{uc_error, Arch, Mode, Permission},\n\n InsnSysX86, RegisterX86, Unicorn,\n\n};\n\nuse yaxpeax_arch::{Decoder, U8Reader};\n\nuse yaxpeax_x86::amd64::InstDecoder;\n\n\n\n// So we can just happily return Unicorn errors.\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 79, "score": 19.351752089054553 }, { "content": " if let Some(syscall_result) = syscall_result {\n\n self.unicorn.reg_write(RegisterX86::RAX, syscall_result)?;\n\n }\n\n\n\n let pc = self.unicorn.reg_read(RegisterX86::RIP)?;\n\n\n\n // FIXME if we return early (say to handle a syscall) this doesn't report that we went under budget.\n\n self.unicorn\n\n .emu_start(pc, 0xFFFF_FFFF_FFFF_FFFF, 0, instruction_count as usize)?;\n\n\n\n if let Some(error) = self.unicorn.get_data_mut().error.take() {\n\n Err(error)\n\n } else if let Some(syscall) = self.unicorn.get_data_mut().syscall_request.take() {\n\n Ok((StepResult::Syscall(syscall), 0))\n\n } else {\n\n Ok((StepResult::Continue, 0))\n\n }\n\n }\n\n\n\n fn memory(&mut self) -> &ProcessMemory {\n\n &self.unicorn.get_data().memory\n\n }\n\n}\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 80, "score": 18.947083933111326 }, { "content": " 2 => ValueSize::Word,\n\n 4 => ValueSize::Double,\n\n 8 => ValueSize::Quad,\n\n _ => panic!(\"Invalid operand size.\"),\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n match self {\n\n ValueSize::Byte => 1,\n\n ValueSize::Word => 2,\n\n ValueSize::Double => 4,\n\n ValueSize::Quad => 8,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/mod.rs", "rank": 81, "score": 18.14812997244977 }, { "content": " 8 => Some(ValueSize::Quad),\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl From<u64> for Value {\n\n fn from(val: u64) -> Self {\n\n Value::Quad(val)\n\n }\n\n}\n\n\n\nimpl From<i64> for Value {\n\n fn from(val: i64) -> Self {\n\n Value::Quad(val as u64)\n\n }\n\n}\n", "file_path": "src/kernel/mod.rs", "rank": 82, "score": 18.065549791233963 }, { "content": " .try_value_size()\n\n .unwrap_or_else(|| instruction.value_size())\n\n },\n\n ),\n\n _ => operand\n\n .try_value_size()\n\n .unwrap_or_else(|| instruction.value_size()),\n\n };\n\n\n\n Ok(match *operand {\n\n Operand::ImmediateI8(v) => Value::Byte(v as u8).dynamic_signed_cast(value_size),\n\n Operand::ImmediateU8(v) => Value::Byte(v).dynamic_cast(value_size),\n\n Operand::ImmediateI16(v) => Value::Word(v as u16).dynamic_signed_cast(value_size),\n\n Operand::ImmediateU16(v) => Value::Word(v).dynamic_cast(value_size),\n\n Operand::ImmediateI32(v) => Value::Double(v as u32).dynamic_signed_cast(value_size),\n\n Operand::ImmediateU32(v) => Value::Double(v).dynamic_cast(value_size),\n\n Operand::ImmediateI64(v) => Value::Quad(v as u64).dynamic_signed_cast(value_size),\n\n Operand::ImmediateU64(v) => Value::Quad(v).dynamic_cast(value_size),\n\n Operand::Register(spec) => registers.get(spec),\n\n // Operand::RegisterMaskMerge(_, _, _) => todo!(),\n", "file_path": "src/kernel/process/x86.rs", "rank": 83, "score": 17.835173144053027 }, { "content": " );\n\n\n\n Self::jump(\n\n &self.memory,\n\n &mut self.register_file,\n\n &mut self.instruction_iterator,\n\n new_address,\n\n )?;\n\n }\n\n // Opcode::CALLF => todo!(),\n\n // Opcode::JMP => todo!(),\n\n // Opcode::JMPF => todo!(),\n\n Opcode::PUSH => {\n\n // We need to figure out the width of our instruction.\n\n let operand = instruction.operand(0);\n\n let value = Self::read_operand(\n\n &self.memory,\n\n &self.register_file,\n\n &operand,\n\n None,\n", "file_path": "src/kernel/process/x86.rs", "rank": 84, "score": 17.73831739342635 }, { "content": " // registers.rip,\n\n // format!(\"{:02x?}\", &instruction_bytes[..instruction_length as usize]),\n\n // instruction\n\n // );\n\n\n\n println!(\"{:08x} {}\", self.register_file.rip, instruction);\n\n\n\n // Update instruction pointer.\n\n self.register_file.rip += instruction_length;\n\n\n\n match instruction.opcode() {\n\n Opcode::Invalid => return Ok(StepResult::InvalidInstruction),\n\n Opcode::ADD => {\n\n let source = instruction.operand(1);\n\n let target = instruction.operand(0);\n\n\n\n let a = Self::read_operand(\n\n &self.memory,\n\n &self.register_file,\n\n &target,\n", "file_path": "src/kernel/process/x86.rs", "rank": 85, "score": 17.687484511787623 }, { "content": " }\n\n\n\n fn memory(&mut self) -> &ProcessMemory {\n\n &self.memory\n\n }\n\n}\n\n\n\nimpl X86Process {\n\n pub fn new() -> Box<Self> {\n\n Box::new(Self {\n\n process_id: 0,\n\n register_file: RegisterFile::new(),\n\n memory: ProcessMemory::new(),\n\n instruction_iterator: InstructionIterator::new(),\n\n })\n\n }\n\n\n\n fn read_operand(\n\n memory: &ProcessMemory,\n\n registers: &RegisterFile,\n", "file_path": "src/kernel/process/x86.rs", "rank": 86, "score": 17.622597920517794 }, { "content": " let return_pointer =\n\n Self::pop_from_stack(&self.memory, &mut self.register_file, ValueSize::Quad)?;\n\n\n\n Self::jump(\n\n &self.memory,\n\n &mut self.register_file,\n\n &mut self.instruction_iterator,\n\n return_pointer.as_pointer(),\n\n )?;\n\n }\n\n // Opcode::PUSHF => todo!(),\n\n // Opcode::WAIT => todo!(),\n\n // Opcode::CBW => todo!(),\n\n // Opcode::CWDE => todo!(),\n\n // Opcode::CDQE => todo!(),\n\n // Opcode::CWD => todo!(),\n\n // Opcode::CDQ => todo!(),\n\n // Opcode::CQO => todo!(),\n\n // Opcode::LODS => todo!(),\n\n // Opcode::STOS => todo!(),\n", "file_path": "src/kernel/process/x86.rs", "rank": 87, "score": 17.39699370712867 }, { "content": " &source,\n\n Some(&target),\n\n instruction,\n\n )?;\n\n let to_move = to_move.dynamic_signed_cast(target.value_size());\n\n\n\n Self::write_target(&self.memory, &mut self.register_file, target, to_move)?;\n\n }\n\n // Opcode::SAR => todo!(),\n\n // Opcode::SAL => todo!(),\n\n // Opcode::SHR => todo!(),\n\n // Opcode::SHRD => todo!(),\n\n // Opcode::SHL => todo!(),\n\n // Opcode::RCR => todo!(),\n\n // Opcode::RCL => todo!(),\n\n // Opcode::ROR => todo!(),\n\n // Opcode::ROL => todo!(),\n\n // Opcode::INC => todo!(),\n\n // Opcode::DEC => todo!(),\n\n Opcode::HLT => return Ok(StepResult::InvalidInstruction), // Halt is not allowed in the userland.\n", "file_path": "src/kernel/process/x86.rs", "rank": 88, "score": 16.944969517382162 }, { "content": " memory.new_blank_block(section.clone())?;\n\n }\n\n\n\n let (stack_pointer, stack) = Self::build_stack(arguments)?;\n\n\n\n memory.new_block(stack)?;\n\n\n\n let process_id = self.next_pid;\n\n self.next_pid += 1;\n\n\n\n process.initalize(\n\n process_id,\n\n executable.entry_point,\n\n stack_pointer,\n\n 0xdeafbeef,\n\n memory,\n\n )?;\n\n\n\n // Store it for execution later.\n\n self.processes.insert(process_id, process);\n", "file_path": "src/kernel/mod.rs", "rank": 89, "score": 16.491328829782248 }, { "content": "\n\nimpl From<u32> for Value {\n\n fn from(val: u32) -> Self {\n\n Value::Double(val)\n\n }\n\n}\n\n\n\nimpl From<i32> for Value {\n\n fn from(val: i32) -> Self {\n\n Value::Double(val as u32)\n\n }\n\n}\n\n\n\nimpl From<u16> for Value {\n\n fn from(val: u16) -> Self {\n\n Value::Word(val)\n\n }\n\n}\n\n\n\nimpl From<i16> for Value {\n", "file_path": "src/kernel/mod.rs", "rank": 90, "score": 16.373075292112464 }, { "content": " Self::Double(v) => *v as i8,\n\n Self::Quad(v) => *v as i8,\n\n }\n\n }\n\n\n\n pub fn to_bytes(&self) -> [u8; 8] {\n\n let mut bytes = [0u8; 8];\n\n\n\n match self {\n\n Self::Byte(v) => bytes[..1].copy_from_slice(&v.to_le_bytes()),\n\n Self::Word(v) => bytes[..2].copy_from_slice(&v.to_le_bytes()),\n\n Self::Double(v) => bytes[..4].copy_from_slice(&v.to_le_bytes()),\n\n Self::Quad(v) => bytes.copy_from_slice(&v.to_le_bytes()),\n\n }\n\n\n\n bytes\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n match self {\n", "file_path": "src/kernel/mod.rs", "rank": 91, "score": 16.292157399612062 }, { "content": " };\n\n\n\n let argument_count = arguments.len() as Pointer;\n\n\n\n // Push argument strings.\n\n let mut argument_pointers = Vec::new();\n\n argument_pointers.reserve(arguments.len());\n\n\n\n for argument in arguments {\n\n let argument = CString::new(argument)?;\n\n let pointer = push(argument.as_bytes_with_nul());\n\n argument_pointers.push(memory_start + pointer);\n\n }\n\n argument_pointers.reverse();\n\n\n\n // Push null aux vector.\n\n push(&[0u8; POINTER_WIDTH as usize * 2]); // Must end with two null words.\n\n\n\n // Push environment pointers. (unimplemented)\n\n push(&[0u8; POINTER_WIDTH as usize]); // Must end with null word.\n", "file_path": "src/kernel/mod.rs", "rank": 92, "score": 15.991959037242877 }, { "content": "}\n\n\n\npub struct X86Process {\n\n process_id: ProcessId,\n\n register_file: RegisterFile,\n\n memory: ProcessMemory,\n\n instruction_iterator: InstructionIterator,\n\n}\n\n\n\nimpl Process for X86Process {\n\n fn initalize(\n\n &mut self,\n\n process_id: ProcessId,\n\n entry_point: Pointer,\n\n stack_pointer: Pointer,\n\n at_exit_pointer: Pointer,\n\n memory: ProcessMemory,\n\n ) -> super::Result<()> {\n\n self.process_id = process_id;\n\n self.memory = memory;\n", "file_path": "src/kernel/process/x86.rs", "rank": 93, "score": 15.474055562476769 }, { "content": "\n\n#[derive(Error, Debug)]\n\npub enum Error {\n\n #[error(\"Invalid syscall: {0}\")]\n\n InvalidSyscall(u64),\n\n\n\n #[error(\"Io Error: {0}\")]\n\n Io(#[from] std::io::Error),\n\n\n\n #[error(\"Memory Error: {0}\")]\n\n Memory(#[from] memory::Error),\n\n\n\n #[error(\"CPU Error: {0}\")]\n\n Process(#[from] process::Error),\n\n\n\n #[error(\"CString has invalid format: {0}\")]\n\n StringFormat(#[from] std::ffi::NulError),\n\n}\n\n\n", "file_path": "src/kernel/mod.rs", "rank": 94, "score": 15.388273864004361 }, { "content": " uc.reg_read(RegisterX86::R9)?,\n\n ];\n\n\n\n let data = uc.get_data_mut();\n\n\n\n data.syscall_request = Some(SyscallRequest {\n\n process_id: data.process_id,\n\n call_code,\n\n arguments,\n\n });\n\n\n\n Ok(())\n\n };\n\n\n\n let result = trampoline();\n\n drop(trampoline);\n\n\n\n // Handle errors, if they happen.\n\n if let Err(error) = result {\n\n let data = uc.get_data_mut();\n", "file_path": "src/kernel/process/unicorn_x86.rs", "rank": 95, "score": 15.349130156636669 }, { "content": " pub fn as_byte(&self) -> u8 {\n\n match self {\n\n Self::Byte(v) => *v as u8,\n\n Self::Word(v) => *v as u8,\n\n Self::Double(v) => *v as u8,\n\n Self::Quad(v) => *v as u8,\n\n }\n\n }\n\n\n\n pub fn as_signed_quad(&self) -> i64 {\n\n match self {\n\n Self::Byte(v) => *v as i8 as i64,\n\n Self::Word(v) => *v as i16 as i64,\n\n Self::Double(v) => *v as i32 as i64,\n\n Self::Quad(v) => *v as i64,\n\n }\n\n }\n\n\n\n pub fn as_signed_double(&self) -> i32 {\n\n match self {\n", "file_path": "src/kernel/mod.rs", "rank": 96, "score": 15.265870998510284 }, { "content": " register_class::X => unimplemented!(),\n\n register_class::Y => unimplemented!(),\n\n register_class::Z => unimplemented!(),\n\n register_class::ST => unimplemented!(),\n\n register_class::MM => unimplemented!(),\n\n register_class::K => unimplemented!(),\n\n register_class::RIP => ValueSize::Quad,\n\n register_class::EIP => ValueSize::Double,\n\n register_class::RFLAGS => unimplemented!(),\n\n register_class::EFLAGS => unimplemented!(),\n\n })\n\n }\n\n}\n\n\n\nimpl GetValueSize for yaxpeax_x86::long_mode::Operand {\n\n fn try_value_size(&self) -> Option<ValueSize> {\n\n if let Some(width) = self.width() {\n\n match width {\n\n 1 => Some(ValueSize::Byte),\n\n 2 => Some(ValueSize::Word),\n", "file_path": "src/kernel/mod.rs", "rank": 97, "score": 14.951115435299249 }, { "content": "\n\n Self::jump(\n\n &self.memory,\n\n &mut self.register_file,\n\n &mut self.instruction_iterator,\n\n entry_point,\n\n )?;\n\n\n\n self.register_file.general_purpose_registers[GeneralPurposeRegister::Rdx as usize] =\n\n at_exit_pointer;\n\n self.register_file.general_purpose_registers[GeneralPurposeRegister::Rsp as usize] =\n\n stack_pointer;\n\n\n\n Ok(())\n\n }\n\n\n\n fn step(\n\n &mut self,\n\n instruction_count: u64,\n\n syscall_result: Option<Pointer>,\n", "file_path": "src/kernel/process/x86.rs", "rank": 98, "score": 14.646937872756396 }, { "content": " u8::from_le_bytes(integer_bytes)\n\n }),\n\n 2 => Self::Word({\n\n let mut integer_bytes = [0u8; 2];\n\n integer_bytes.copy_from_slice(bytes);\n\n\n\n u16::from_le_bytes(integer_bytes)\n\n }),\n\n 4 => Self::Double({\n\n let mut integer_bytes = [0u8; 4];\n\n integer_bytes.copy_from_slice(bytes);\n\n\n\n u32::from_le_bytes(integer_bytes)\n\n }),\n\n 8 => Self::Quad({\n\n let mut integer_bytes = [0u8; 8];\n\n integer_bytes.copy_from_slice(bytes);\n\n\n\n u64::from_le_bytes(integer_bytes)\n\n }),\n", "file_path": "src/kernel/mod.rs", "rank": 99, "score": 14.553352868091991 } ]
Rust
prisma-fmt/src/text_document_completion.rs
ever0de/prisma-engines
4c9d4edf238ad9c4a706eb5b7201ee0b4ebee93e
use datamodel::{ datamodel_connector::{Connector, ReferentialIntegrity}, parse_configuration, parse_schema_ast, schema_ast::ast, }; use log::*; use lsp_types::*; pub(crate) fn empty_completion_list() -> CompletionList { CompletionList { is_incomplete: true, items: Vec::new(), } } pub(crate) fn completion(schema: &str, params: CompletionParams) -> CompletionList { let schema_ast = if let Ok(schema_ast) = parse_schema_ast(schema) { schema_ast } else { warn!("Failed to parse schema AST in completion request."); return empty_completion_list(); }; let position = if let Some(pos) = position_to_offset(&params.text_document_position.position, schema) { pos } else { warn!("Received a position outside of the document boundaries in CompletionParams"); return empty_completion_list(); }; let (connector, referential_integrity) = parse_configuration(schema) .ok() .and_then(|conf| conf.subject.datasources.into_iter().next()) .map(|datasource| (datasource.active_connector, datasource.referential_integrity())) .unwrap_or_else(|| { ( &datamodel::datamodel_connector::EmptyDatamodelConnector, Default::default(), ) }); let mut list = CompletionList { is_incomplete: false, items: Vec::new(), }; push_ast_completions(&mut list, connector, referential_integrity, &schema_ast, position); list } fn position_to_offset(position: &Position, document: &str) -> Option<usize> { let mut offset = 0; let mut line_offset = position.line; let mut character_offset = position.character; let mut chars = document.chars(); while line_offset > 0 { loop { match chars.next() { Some('\n') => { offset += 1; break; } Some(_) => { offset += 1; } None => return None, } } line_offset -= 1; } while character_offset > 0 { match chars.next() { Some('\n') | None => return None, Some(_) => { offset += 1; character_offset -= 1; } } } Some(offset) } fn push_ast_completions( completion_list: &mut CompletionList, connector: &'static dyn Connector, referential_integrity: ReferentialIntegrity, ast: &ast::SchemaAst, position: usize, ) { match ast.find_at_position(position) { ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("default", _, None)), ) => { if connector.has_capability(datamodel::datamodel_connector::ConnectorCapability::NamedDefaultValues) { completion_list.items.push(CompletionItem { label: "map: ".to_owned(), kind: Some(CompletionItemKind::PROPERTY), ..Default::default() }) } } ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("relation", _, Some(attr_name))), ) if attr_name == "onDelete" || attr_name == "onUpdate" => { for referential_action in connector.referential_actions(&referential_integrity).iter() { completion_list.items.push(CompletionItem { label: referential_action.as_str().to_owned(), kind: Some(CompletionItemKind::ENUM), detail: Some(referential_action.documentation().to_owned()), ..Default::default() }); } } _ => (), } } #[test] fn position_to_offset_with_crlf() { let schema = "\r\nmodel Test {\r\n id Int @id\r\n}"; let expected_offset = schema.chars().position(|c| c == 'i').unwrap(); let found_offset = position_to_offset(&Position { line: 2, character: 4 }, schema).unwrap(); assert_eq!(found_offset, expected_offset); }
use datamodel::{ datamodel_connector::{Connector, ReferentialIntegrity}, parse_configuration, parse_schema_ast, schema_ast::ast, }; use log::*; use lsp_types::*; pub(crate) fn empty_completion_list() -> CompletionList { CompletionList { is_incomplete: true, items: Vec::new(), } } pub(crate) fn completion(schema: &str, params: CompletionParams) -> CompletionList { let schema_ast = if let Ok(schema_ast) = parse_schema_ast(schema) { schema_ast } else { warn!("Failed to parse schema AST in completion request."); return empty_completion_list(); }; let position = if let Some(pos) = position_to_offset(&params.text_document_position.position, schema) { pos } else { warn!("Received a position outside of the document boundaries in CompletionParams"); return empty_completion_list(); }; let (connector, referential_integrity) = parse_configuration(schema) .ok() .and_then(|conf| conf.subject.datasources.into_iter().next()) .map(|datasource| (datasource.active_connector, datasource.referential_integrity())) .unwrap_or_else(|| { ( &datamodel::datamodel_connector::EmptyDatamodelConnector, Default::default(), ) }); let mut list = CompletionList { is_incomplete: false, items: Vec::new(), }; push_ast_completions(&mut list, connector, referential_integrity, &schema_ast, position); list } fn position_to_offset(position: &Position, document: &str) -> Option<usize> { let mut offset = 0; let mut line_offset = position.line; let mut character_offset = position.character; let mut chars = document.chars(); while line_offset > 0 { loop { match chars.next() { Some('\n') => { offset += 1; break; } Some(_) => { offset += 1; } None => return None, } } line_offset -= 1; } while character_offset > 0 { match chars.next() { Some('\n') | None => return None, Some(_) => { offset += 1; character_offset -= 1; } } } Some(offset) } fn push_ast_completions( completion_list: &mut CompletionList, connector: &'static dyn Connector, referential_integrity: ReferentialIntegrity, ast: &ast::SchemaAst, position: usize, ) { match ast.find_at_position(position) { ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("default", _, None)), ) => { if connector.has_capability(datamodel::datamodel_connector::ConnectorCapability::NamedDefaultValues) { completion_list.items.push(CompletionItem { label: "map: ".to_owned(), kind: Some(CompletionItemKind::PROPERT
#[test] fn position_to_offset_with_crlf() { let schema = "\r\nmodel Test {\r\n id Int @id\r\n}"; let expected_offset = schema.chars().position(|c| c == 'i').unwrap(); let found_offset = position_to_offset(&Position { line: 2, character: 4 }, schema).unwrap(); assert_eq!(found_offset, expected_offset); }
Y), ..Default::default() }) } } ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("relation", _, Some(attr_name))), ) if attr_name == "onDelete" || attr_name == "onUpdate" => { for referential_action in connector.referential_actions(&referential_integrity).iter() { completion_list.items.push(CompletionItem { label: referential_action.as_str().to_owned(), kind: Some(CompletionItemKind::ENUM), detail: Some(referential_action.documentation().to_owned()), ..Default::default() }); } } _ => (), } }
function_block-function_prefixed
[]
Rust
MBExtender/rust/speed-boostair/src/dds_loader.rs
RandomityGuy/MBExtender
5b5a4b5f8f9aafe76c6f50ab75f8a214ef22cb3c
use crate::dds_types::*; use crate::gl::{self, types::*}; use crate::io; use log::{debug, error, trace}; use mbx::core::{self, ResourceInstance, Stream}; use mbx::dgl::{self, BitmapFormat, GBitmap, TextureObject}; use mbx::prelude::*; use mbx::util; use std::error::Error; use std::ffi::CString; use std::mem; use std::os::raw::c_char; use std::ptr; use std::time::Instant; enum TextureLayout { Pixels(usize), Blocks(usize), } impl TextureLayout { fn data_size(&self, width: u32, height: u32) -> usize { match *self { Self::Pixels(bpp) => (width as usize) * (height as usize) * bpp, Self::Blocks(block_size) => { let width_blocks = (width as usize + 3) / 4; let height_blocks = (height as usize + 3) / 4; width_blocks * height_blocks * block_size } } } } impl From<BitmapFormat> for TextureLayout { fn from(format: BitmapFormat) -> Self { match format { BitmapFormat::Palettized => Self::Pixels(1), BitmapFormat::Intensity => Self::Pixels(1), BitmapFormat::Rgb => Self::Pixels(3), BitmapFormat::Rgba => Self::Pixels(4), BitmapFormat::Alpha => Self::Pixels(1), BitmapFormat::Rgb565 => Self::Pixels(2), BitmapFormat::Rgb5551 => Self::Pixels(2), BitmapFormat::Luminance => Self::Pixels(1), BitmapFormat::XDxt1 => Self::Blocks(8), BitmapFormat::XDxt3 => Self::Blocks(16), BitmapFormat::XDxt5 => Self::Blocks(16), BitmapFormat::XBc5S => Self::Blocks(16), BitmapFormat::XBc5U => Self::Blocks(16), } } } fn dds_bitmap_format(format: &PixelFormat) -> Result<BitmapFormat, &'static str> { match *format { DDSPF_R8G8B8 => Ok(BitmapFormat::Rgb), DDSPF_A8R8G8B8 => Ok(BitmapFormat::Rgba), DDSPF_A8 => Ok(BitmapFormat::Alpha), DDSPF_R5G6B5 => Ok(BitmapFormat::Rgb565), DDSPF_A1R5G5B5 => Ok(BitmapFormat::Rgb5551), DDSPF_L8 => Ok(BitmapFormat::Luminance), DDSPF_DXT1 => Ok(BitmapFormat::XDxt1), DDSPF_DXT3 => Ok(BitmapFormat::XDxt3), DDSPF_DXT5 => Ok(BitmapFormat::XDxt5), DDSPF_BC5_SNORM => Ok(BitmapFormat::XBc5S), DDSPF_BC5_UNORM | DDSPF_ATI2 => Ok(BitmapFormat::XBc5U), _ => Err("unsupported texture format"), } } fn swap_channels(data: &mut [u8], channels: usize) { for pixel in data.chunks_exact_mut(channels) { pixel.swap(0, 2); } } fn do_read_dds(stream: &mut Stream) -> Result<Box<GBitmap>, Box<dyn Error>> { let start_time = Instant::now(); let magic: [u8; 4] = io::read_val(stream)?; if magic != DDS_MAGIC { return Err("bad magic".into()); } let header: Header = io::read_val(stream)?; trace!("DDS header: {:?}", header); if header.size as usize != mem::size_of::<Header>() { return Err("unrecognized header size".into()); } else if header.flags & DDS_HEADER_FLAGS_TEXTURE != DDS_HEADER_FLAGS_TEXTURE { return Err("missing texture information".into()); } else if header.flags & DDS_HEADER_FLAGS_VOLUME != 0 || header.caps2 != 0 { return Err("volumetric and cubemap textures are not supported".into()); } else if header.width == 0 || header.height == 0 { return Err("invalid texture size".into()); } let mut bitmap = GBitmap::empty(); bitmap.width = header.width; bitmap.height = header.height; bitmap.format = dds_bitmap_format(&header.format)?; let layout = TextureLayout::from(bitmap.format); bitmap.bytes_per_pixel = match layout { TextureLayout::Pixels(bpp) => bpp as u32, TextureLayout::Blocks(_) => 0, }; let mut mip_levels = 1; if header.flags & DDS_HEADER_FLAGS_MIPMAP != 0 { mip_levels = header.mip_map_count.max(1).min(10); } bitmap.num_mip_levels = mip_levels; let mut total_size = 0; for i in 0..mip_levels { bitmap.mip_level_offsets[i as usize] = total_size as u32; total_size += layout.data_size(bitmap.mip_width(i), bitmap.mip_height(i)); } bitmap.byte_size = total_size as u32; let mut data: Vec<u8> = io::read_array(stream, total_size)?; match bitmap.format { BitmapFormat::Rgb => swap_channels(&mut data, 3), BitmapFormat::Rgba => swap_channels(&mut data, 4), _ => (), } debug!("Loaded {}x{} DDS in {:?}", bitmap.width, bitmap.height, start_time.elapsed()); bitmap.bits = util::leak_vec_ptr(data); Ok(bitmap) } extern "C" fn read_dds(stream: &mut Stream) -> *mut ResourceInstance { match do_read_dds(stream) { Ok(bitmap) => Box::into_raw(bitmap).cast(), Err(e) => { error!("Error loading DDS file: {}", e); ptr::null_mut() } } } #[fn_override(original_create_gl_name)] unsafe fn my_create_gl_name( bitmap: &mut GBitmap, clamp_to_edge: bool, first_mip: u32, texture_type: u32, to: &mut TextureObject, ) -> bool { let gl_format = match bitmap.format { BitmapFormat::XDxt1 => gl::COMPRESSED_RGB_S3TC_DXT1_EXT, BitmapFormat::XDxt3 => gl::COMPRESSED_RGBA_S3TC_DXT3_EXT, BitmapFormat::XDxt5 => gl::COMPRESSED_RGBA_S3TC_DXT5_EXT, BitmapFormat::XBc5S => gl::COMPRESSED_SIGNED_RG_RGTC2, BitmapFormat::XBc5U => gl::COMPRESSED_RG_RGTC2, _ => return original_create_gl_name(bitmap, clamp_to_edge, first_mip, texture_type, to), }; gl::GenTextures(1, &mut to.gl_texture_name); gl::BindTexture(gl::TEXTURE_2D, to.gl_texture_name); let layout = TextureLayout::from(bitmap.format); for i in first_mip..bitmap.num_mip_levels { let width = bitmap.mip_width(i); let height = bitmap.mip_height(i); let size = layout.data_size(width, height); gl::CompressedTexImage2D( gl::TEXTURE_2D, (i - first_mip) as GLint, gl_format, width as GLint, height as GLint, 0, size as GLsizei, bitmap.mip_bits(i).cast(), ); } to.texture_width = bitmap.mip_width(first_mip); to.texture_height = bitmap.mip_height(first_mip); let (min_filter, mag_filter) = if to.filter_nearest { (gl::NEAREST, gl::NEAREST) } else if bitmap.num_mip_levels - first_mip > 1 { (gl::LINEAR_MIPMAP_LINEAR, gl::LINEAR) } else { (gl::LINEAR, gl::LINEAR) }; gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, min_filter as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, mag_filter as GLint); if clamp_to_edge { gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as GLint); } else { gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::REPEAT as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::REPEAT as GLint); } true } #[method_override(original_extrude_mip_levels)] unsafe fn my_extrude_mip_levels(this: &mut GBitmap, clear_borders: bool) { if let TextureLayout::Blocks(_) = this.format.into() { return; } if this.num_mip_levels > 1 { return; } original_extrude_mip_levels(this, clear_borders); } tge_statics! { static mut EXT1: *const c_char = tge_addr!(0x65d908, 0x2da900); static mut EXT2: *const c_char = tge_addr!(0x65d924, 0x2da908); } const DDS_EXTENSION: &'static str = ".dds"; pub fn init(plugin: &Plugin) -> Result<(), &'static str> { unsafe { let ext = CString::new(DDS_EXTENSION).unwrap().into_raw(); *EXT1 = ext; *EXT2 = ext; (*core::TGE_RESOURCE_MANAGER).register_extension(DDS_EXTENSION, read_dds); } plugin.intercept(dgl::tge_create_gl_name, my_create_gl_name, &original_create_gl_name)?; plugin.intercept( dgl::tge_extrude_mip_levels, my_extrude_mip_levels, &original_extrude_mip_levels, )?; Ok(()) }
use crate::dds_types::*; use crate::gl::{self, types::*}; use crate::io; use log::{debug, error, trace}; use mbx::core::{self, ResourceInstance, Stream}; use mbx::dgl::{self, BitmapFormat, GBitmap, TextureObject}; use mbx::prelude::*; use mbx::util; use std::error::Error; use std::ffi::CString; use std::mem; use std::os::raw::c_char; use std::ptr; use std::time::Instant; enum TextureLayout { Pixels(usize), Blocks(usize), } impl TextureLayout { fn data_size(&self, width: u32, height: u32) -> usize { match *self { Self::Pixels(bpp) => (width as usize) * (height as usize) * bpp, Self::Blocks(block_size) => { let width_blocks = (width as usize + 3) / 4; let height_blocks = (height as usize + 3) / 4; width_blocks * height_blocks * block_size } } } } impl From<BitmapFormat> for TextureLayout { fn from(format: BitmapFormat) -> Self { match format { BitmapFormat::Palettized => Self::Pixels(1), BitmapFormat::Intensity => Self::Pixels(1), BitmapFormat::Rgb => Self::Pixels(3), BitmapFormat::Rgba => Self::Pixels(4), BitmapFormat::Alpha => Self::Pixels(1), BitmapFormat::Rgb565 => Self::Pixels(2), BitmapFormat::Rgb5551 => Self::Pixels(2), BitmapFormat::Luminance => Self::Pixels(1), BitmapFormat::XDxt1 => Self::Blocks(8), BitmapFormat::XDxt3 => Self::Blocks(16), BitmapFormat::XDxt5 => Self::Blocks(16), BitmapFormat::XBc5S => Self::Blocks(16), BitmapFormat::XBc5U => Self::Blocks(16), } } } fn dds_bitmap_format(format: &PixelFormat) -> Result<BitmapFormat, &'static str> { match *format { DDSPF_R8G8B8 => Ok(BitmapFormat::Rgb), DDSPF_A8R8G8B8 => Ok(BitmapFormat::Rgba), DDSPF_A8 => Ok(BitmapFormat::Alpha), DDSPF_R5G6B5 => Ok(BitmapFormat::Rgb565), DDSPF_A1R5G5B5 => Ok(BitmapFormat::Rgb5551), DDSPF_L8 => Ok(BitmapFormat::Luminance), DDSPF_DXT1 => Ok(BitmapFormat::XDxt1), DDSPF_DXT3 => Ok(BitmapFormat::XDxt3), DDSPF_DXT5 => Ok(BitmapFormat::XDxt5), DDSPF_BC5_SNORM => Ok(BitmapFormat::XBc5S), DDSPF_BC5_UNORM | DDSPF_ATI2 => Ok(BitmapFormat::XBc5U), _ => Err("unsupported texture format"), } } fn swap_channels(data: &mut [u8], channels: usize) { for pixel in data.chunks_exact_mut(channels) { pixel.swap(0, 2); } } fn do_read_dds(stream: &mut Stream) -> Result<Box<GBitmap>, Box<dyn Error>> { let start_time = Instant::now(); let magic: [u8; 4] = io::read_val(stream)?; if magic != DDS_MAGIC { return Err("bad magic".into()); } let header: Header = io::read_val(stream)?; trace!("DDS header: {:?}", header); if header.size as usize != mem::size_of::<Header>() { return Err("unrecognized header size".into()); } else if header.flags & DDS_HEADER_FLAGS_TEXTURE != DDS_HEADER_FLAGS_TEXTURE { return Err("missing texture information".into()); } else if header.flags & DDS_HEADER_FLAGS_VOLUME != 0 || header.caps2 != 0 { return Err("volumetric and cubemap textures are not supported".into()); } else if header.width == 0 || header.height == 0 { return Err("invalid texture size".into()); } let mut bitmap = GBitmap::empty(); bitmap.width = header.width; bitmap.height = header.height; bitmap.format = dds_bitmap_format(&header.format)?; let layout = TextureLayout::from(bitmap.format); bitmap.bytes_per_pixel = match layout { TextureLayout::P
l::TEXTURE_MIN_FILTER, min_filter as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, mag_filter as GLint); if clamp_to_edge { gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as GLint); } else { gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::REPEAT as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::REPEAT as GLint); } true } #[method_override(original_extrude_mip_levels)] unsafe fn my_extrude_mip_levels(this: &mut GBitmap, clear_borders: bool) { if let TextureLayout::Blocks(_) = this.format.into() { return; } if this.num_mip_levels > 1 { return; } original_extrude_mip_levels(this, clear_borders); } tge_statics! { static mut EXT1: *const c_char = tge_addr!(0x65d908, 0x2da900); static mut EXT2: *const c_char = tge_addr!(0x65d924, 0x2da908); } const DDS_EXTENSION: &'static str = ".dds"; pub fn init(plugin: &Plugin) -> Result<(), &'static str> { unsafe { let ext = CString::new(DDS_EXTENSION).unwrap().into_raw(); *EXT1 = ext; *EXT2 = ext; (*core::TGE_RESOURCE_MANAGER).register_extension(DDS_EXTENSION, read_dds); } plugin.intercept(dgl::tge_create_gl_name, my_create_gl_name, &original_create_gl_name)?; plugin.intercept( dgl::tge_extrude_mip_levels, my_extrude_mip_levels, &original_extrude_mip_levels, )?; Ok(()) }
ixels(bpp) => bpp as u32, TextureLayout::Blocks(_) => 0, }; let mut mip_levels = 1; if header.flags & DDS_HEADER_FLAGS_MIPMAP != 0 { mip_levels = header.mip_map_count.max(1).min(10); } bitmap.num_mip_levels = mip_levels; let mut total_size = 0; for i in 0..mip_levels { bitmap.mip_level_offsets[i as usize] = total_size as u32; total_size += layout.data_size(bitmap.mip_width(i), bitmap.mip_height(i)); } bitmap.byte_size = total_size as u32; let mut data: Vec<u8> = io::read_array(stream, total_size)?; match bitmap.format { BitmapFormat::Rgb => swap_channels(&mut data, 3), BitmapFormat::Rgba => swap_channels(&mut data, 4), _ => (), } debug!("Loaded {}x{} DDS in {:?}", bitmap.width, bitmap.height, start_time.elapsed()); bitmap.bits = util::leak_vec_ptr(data); Ok(bitmap) } extern "C" fn read_dds(stream: &mut Stream) -> *mut ResourceInstance { match do_read_dds(stream) { Ok(bitmap) => Box::into_raw(bitmap).cast(), Err(e) => { error!("Error loading DDS file: {}", e); ptr::null_mut() } } } #[fn_override(original_create_gl_name)] unsafe fn my_create_gl_name( bitmap: &mut GBitmap, clamp_to_edge: bool, first_mip: u32, texture_type: u32, to: &mut TextureObject, ) -> bool { let gl_format = match bitmap.format { BitmapFormat::XDxt1 => gl::COMPRESSED_RGB_S3TC_DXT1_EXT, BitmapFormat::XDxt3 => gl::COMPRESSED_RGBA_S3TC_DXT3_EXT, BitmapFormat::XDxt5 => gl::COMPRESSED_RGBA_S3TC_DXT5_EXT, BitmapFormat::XBc5S => gl::COMPRESSED_SIGNED_RG_RGTC2, BitmapFormat::XBc5U => gl::COMPRESSED_RG_RGTC2, _ => return original_create_gl_name(bitmap, clamp_to_edge, first_mip, texture_type, to), }; gl::GenTextures(1, &mut to.gl_texture_name); gl::BindTexture(gl::TEXTURE_2D, to.gl_texture_name); let layout = TextureLayout::from(bitmap.format); for i in first_mip..bitmap.num_mip_levels { let width = bitmap.mip_width(i); let height = bitmap.mip_height(i); let size = layout.data_size(width, height); gl::CompressedTexImage2D( gl::TEXTURE_2D, (i - first_mip) as GLint, gl_format, width as GLint, height as GLint, 0, size as GLsizei, bitmap.mip_bits(i).cast(), ); } to.texture_width = bitmap.mip_width(first_mip); to.texture_height = bitmap.mip_height(first_mip); let (min_filter, mag_filter) = if to.filter_nearest { (gl::NEAREST, gl::NEAREST) } else if bitmap.num_mip_levels - first_mip > 1 { (gl::LINEAR_MIPMAP_LINEAR, gl::LINEAR) } else { (gl::LINEAR, gl::LINEAR) }; gl::TexParameteri(gl::TEXTURE_2D, g
random
[ { "content": "fn fix_texture(name: u32, bitmap: &mut GBitmap, texture_type: u32) {\n\n if texture_type == 0 || texture_type == 1 || texture_type == 2 {\n\n return;\n\n }\n\n if bitmap.width <= 512 && bitmap.height <= 512 {\n\n return;\n\n }\n\n if bitmap.num_mip_levels > 1 {\n\n return;\n\n }\n\n unsafe {\n\n gl::BindTexture(gl::TEXTURE_2D, name);\n\n gl::GenerateMipmap(gl::TEXTURE_2D);\n\n gl::TexParameteri(\n\n gl::TEXTURE_2D,\n\n gl::TEXTURE_MIN_FILTER,\n\n gl::LINEAR_MIPMAP_LINEAR as GLint,\n\n );\n\n gl::BindTexture(gl::TEXTURE_2D, 0);\n\n }\n", "file_path": "MBExtender/rust/texture-size-fix/src/lib.rs", "rank": 1, "score": 464747.9263254404 }, { "content": "\t/// \\brief Exception thrown when a header decoding error occurs\n\n\tclass HeaderErr : public Err {public: HeaderErr() : Err(INVALID_DATA_FORMAT, \"Gunzip: header decoding error\") {}};\n", "file_path": "MBExtender/external/cryptopp/gzip.h", "rank": 4, "score": 286718.28731652955 }, { "content": "#[plugin_main]\n\nfn main(plugin: &Plugin) -> Result<(), &'static str> {\n\n plugin.on_gl_context_ready(gl::init);\n\n\n\n plugin.intercept(dgl::tge_allocate_bitmap, my_allocate_bitmap, &original_allocate_bitmap)?;\n\n plugin.intercept(dgl::tge_create_gl_name, my_create_gl_name, &original_create_gl_name)?;\n\n\n\n unsafe {\n\n let error = format!(\"Error, cannot load pngs taller than {} pixels!\", MAX_PNG_HEIGHT);\n\n ROW_POINTERS = vec![ptr::null_mut(); MAX_PNG_HEIGHT as usize];\n\n\n\n *HEIGHT_COMPARE = MAX_PNG_HEIGHT;\n\n *ERROR_MESSAGE = CString::new(error).unwrap().into_raw();\n\n *ROW_POINTERS_1 = ROW_POINTERS.as_mut_ptr();\n\n *ROW_POINTERS_2 = ROW_POINTERS.as_mut_ptr();\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "MBExtender/rust/texture-size-fix/src/lib.rs", "rank": 5, "score": 285579.04855378176 }, { "content": "\tclass HeaderErr : public Err {public: HeaderErr() : Err(INVALID_DATA_FORMAT, \"ZlibDecompressor: header decoding error\") {}};\n", "file_path": "MBExtender/external/cryptopp/zlib.h", "rank": 6, "score": 283735.0971193671 }, { "content": "pub fn get_current_directory() -> &'static str {\n\n unsafe {\n\n let cwd = tge_platform_get_current_directory();\n\n CStr::from_ptr(cwd).to_str().unwrap()\n\n }\n\n}\n", "file_path": "MBExtender/rust/mbx/src/platform/platform.rs", "rank": 8, "score": 247012.98280731984 }, { "content": "#[plugin_main]\n\nfn main(plugin: &Plugin) -> Result<(), &'static str> {\n\n con::add_command(&loadMBPackage);\n\n\n\n plugin.intercept(\n\n platform::tge_platform_is_subdirectory,\n\n platform_is_subdirectory,\n\n &original_platform_is_subdirectory,\n\n )?;\n\n\n\n plugin.intercept(\n\n platform::tge_platform_get_filetimes,\n\n platform_get_filetimes,\n\n &original_platform_get_filetimes,\n\n )?;\n\n\n\n plugin.intercept(core::tge_file_open, file_open, &original_file_open)?;\n\n plugin.intercept(core::tge_file_close, file_close, &original_file_close)?;\n\n plugin.intercept(core::tge_file_getposition, file_getposition, &original_file_getposition)?;\n\n plugin.intercept(core::tge_file_setposition, file_setposition, &original_file_setposition)?;\n\n plugin.intercept(core::tge_file_read, file_read, &original_file_read)?;\n", "file_path": "MBExtender/rust/mbcrypt/src/lib.rs", "rank": 9, "score": 234708.889850781 }, { "content": "#[plugin_main]\n\nfn main(plugin: &Plugin) -> Result<(), &'static str> {\n\n info!(\"Hello from main()!\");\n\n warn!(\"This is a warning!\");\n\n error!(\"This is an error!\");\n\n\n\n con::add_command(&rustAdd);\n\n con::add_command(&rustReverse);\n\n con::add_command(&rustPrintVelocity);\n\n\n\n plugin.intercept(tge_create_window, my_create_window, &original_create_window)?;\n\n plugin.intercept(\n\n game::tge_marble_do_power_up,\n\n my_marble_do_power_up,\n\n &original_marble_do_power_up,\n\n )?;\n\n\n\n plugin.on_game_start(game_start);\n\n\n\n Ok(())\n\n}\n", "file_path": "MBExtender/rust/rust-plugin/src/lib.rs", "rank": 10, "score": 231474.31448758225 }, { "content": "#[plugin_main]\n\nfn main(plugin: &Plugin) -> Result<(), &'static str> {\n\n plugin.on_gl_context_ready(gl::init);\n\n accel::init(plugin)?;\n\n dds_loader::init(plugin)?;\n\n Ok(())\n\n}\n", "file_path": "MBExtender/rust/speed-boostair/src/lib.rs", "rank": 11, "score": 231474.31448758225 }, { "content": "/// Reads an array of primitive values from a stream.\n\npub fn read_array<V, S: Read>(stream: &mut S, count: usize) -> io::Result<Vec<V>> {\n\n let mut vec: Vec<MaybeUninit<V>> = Vec::with_capacity(count);\n\n let size = mem::size_of::<V>()\n\n .checked_mul(count)\n\n .ok_or_else(|| io::Error::from(io::ErrorKind::InvalidInput))?;\n\n unsafe {\n\n vec.set_len(count);\n\n let bytes = slice::from_raw_parts_mut(vec.as_mut_ptr() as *mut u8, size);\n\n stream.read_exact(bytes)?;\n\n Ok(mem::transmute(vec))\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/speed-boostair/src/io.rs", "rank": 12, "score": 230866.1178365196 }, { "content": "/// Writes an array of primitive values to a stream.\n\npub fn write_array<V, S: Write>(stream: &mut S, ptr: *const V, count: usize) -> io::Result<()> {\n\n let size = mem::size_of::<V>()\n\n .checked_mul(count)\n\n .ok_or_else(|| io::Error::from(io::ErrorKind::InvalidInput))?;\n\n let bytes = unsafe { slice::from_raw_parts(ptr as *const u8, size) };\n\n stream.write_all(bytes)\n\n}\n", "file_path": "MBExtender/rust/speed-boostair/src/io.rs", "rank": 13, "score": 226125.04257916426 }, { "content": "/// Initializes the accelerator cacher.\n\npub fn init(plugin: &Plugin) -> Result<(), &'static str> {\n\n plugin.intercept(\n\n ts::tge_compute_accelerator,\n\n my_compute_accelerator,\n\n &original_compute_accelerator,\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "MBExtender/rust/speed-boostair/src/accel.rs", "rank": 14, "score": 222854.26070962567 }, { "content": "/// gl-rs-compatible OpenGL symbol loader\n\npub fn get_proc_address(symbol: &'static str) -> *const c_void {\n\n unimplemented!();\n\n}\n", "file_path": "MBExtender/rust/mbx-gl/src/macos.rs", "rank": 16, "score": 216937.96698510763 }, { "content": "/// gl-rs-compatible OpenGL symbol loader\n\npub fn get_proc_address(symbol: &'static str) -> *const c_void {\n\n LOAD_OPENGL32.call_once(|| {\n\n let opengl32_name = U16CString::from_str(\"opengl32.dll\").unwrap();\n\n unsafe {\n\n OPENGL32 = libloaderapi::LoadLibraryW(opengl32_name.as_ptr());\n\n if OPENGL32.is_null() {\n\n panic!(\"Failed to load opengl32.dll\");\n\n }\n\n }\n\n });\n\n let c_symbol = CString::new(symbol).unwrap();\n\n let ptr = unsafe { wglGetProcAddress(c_symbol.as_ptr()) };\n\n if ptr.is_null() {\n\n unsafe { libloaderapi::GetProcAddress(OPENGL32, c_symbol.as_ptr()).cast() }\n\n } else {\n\n ptr\n\n }\n\n}\n", "file_path": "MBExtender/rust/mbx-gl/src/windows.rs", "rank": 17, "score": 216937.96698510763 }, { "content": " enum class ReturnType {\n\n String,\n\n Void,\n\n Int,\n\n Float,\n\n Bool,\n\n };\n\n\n\n ReturnType retType_;\n\n const char *name_;\n\n void *cb_;\n\n const char *usage_;\n\n int minArgs_;\n\n int maxArgs_;\n\n const char *nsName_;\n\n};\n\n} // namespace MBX\n", "file_path": "MBExtender/src/MBExtender/include/MBExtender/Console.h", "rank": 18, "score": 214571.28965252484 }, { "content": "/// Copy a string into Torque's internal string return buffer and return a raw pointer to it.\n\npub fn get_return_buffer(s: &str) -> *const c_char {\n\n let bytes = s.as_bytes();\n\n unsafe {\n\n let buffer = tge_get_return_buffer(bytes.len() as u32 + 1);\n\n ptr::copy_nonoverlapping(bytes.as_ptr().cast::<c_char>(), buffer, bytes.len());\n\n buffer.offset(bytes.len() as isize).write(0);\n\n buffer\n\n }\n\n}\n", "file_path": "MBExtender/rust/mbx/src/con_/command.rs", "rank": 19, "score": 209527.78232674685 }, { "content": "#[doc(hidden)]\n\n#[proc_macro_attribute]\n\npub fn __impl_method_override_windows(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n do_method_override(attr, item, true)\n\n}\n\n\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 20, "score": 198330.59680538956 }, { "content": "#[doc(hidden)]\n\n#[proc_macro_attribute]\n\npub fn __impl_method_override_macos(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n do_method_override(attr, item, false)\n\n}\n\n\n\n/// Generates a method override definition and automatically expands calls to\n\n/// the original function.\n\n///\n\n/// Example:\n\n///\n\n/// #[method_override(original_marble_do_power_up)]\n\n/// fn my_marble_do_power_up(this: *mut (), id: i32) {\n\n/// unsafe { original_marble_do_power_up(this, id); }\n\n/// }\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 21, "score": 198330.59680538956 }, { "content": "/// Reads a typed value from a stream.\n\npub fn read_val<V, S: Read>(stream: &mut S) -> io::Result<V> {\n\n let mut val = MaybeUninit::<V>::uninit();\n\n let size = mem::size_of::<V>();\n\n unsafe {\n\n let bytes = slice::from_raw_parts_mut(val.as_mut_ptr() as *mut u8, size);\n\n stream.read_exact(bytes)?;\n\n Ok(val.assume_init())\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/speed-boostair/src/io.rs", "rank": 22, "score": 194493.98024338658 }, { "content": "/// Writes a typed value to a stream.\n\npub fn write_val<V, S: Write>(stream: &mut S, val: &V) -> io::Result<()> {\n\n let size = mem::size_of::<V>();\n\n let bytes = unsafe { slice::from_raw_parts(val as *const _ as *const u8, size) };\n\n stream.write_all(bytes)\n\n}\n\n\n", "file_path": "MBExtender/rust/speed-boostair/src/io.rs", "rank": 23, "score": 189549.05411780177 }, { "content": "fn combine(left: &str, right: &str) -> String {\n\n if left.is_empty() {\n\n return String::from(right);\n\n } else if right.is_empty() {\n\n return String::from(left);\n\n }\n\n let leftEnd = left.chars().nth(left.len() - 1).unwrap();\n\n let rightStart = right.chars().nth(0).unwrap();\n\n if leftEnd == '/' || rightStart == '/' {\n\n return format!(\"{}{}\", left, right);\n\n } else {\n\n return format!(\"{}/{}\", left, right);\n\n }\n\n}\n\n\n\n#[fn_override(original_platform_dumppath)]\n\nunsafe fn platform_dumppath(\n\n path: *const c_char,\n\n fileVector: &mut core::TgeVec<platform::FileInfo>,\n\n) -> bool {\n", "file_path": "MBExtender/rust/mbcrypt/src/lib.rs", "rank": 24, "score": 185587.83455544495 }, { "content": "Uint32\n\nSDL_MasksToPixelFormatEnum(int bpp, Uint32 Rmask, Uint32 Gmask, Uint32 Bmask,\n\n Uint32 Amask)\n\n{\n\n switch (bpp) {\n\n case 1:\n\n /* SDL defaults to MSB ordering */\n\n return SDL_PIXELFORMAT_INDEX1MSB;\n\n case 4:\n\n /* SDL defaults to MSB ordering */\n\n return SDL_PIXELFORMAT_INDEX4MSB;\n\n case 8:\n\n if (Rmask == 0) {\n\n return SDL_PIXELFORMAT_INDEX8;\n\n }\n\n if (Rmask == 0xE0 &&\n\n Gmask == 0x1C &&\n\n Bmask == 0x03 &&\n\n Amask == 0x00) {\n\n return SDL_PIXELFORMAT_RGB332;\n\n }\n\n break;\n\n case 12:\n\n if (Rmask == 0) {\n\n return SDL_PIXELFORMAT_RGB444;\n\n }\n\n if (Rmask == 0x0F00 &&\n\n Gmask == 0x00F0 &&\n\n Bmask == 0x000F &&\n\n Amask == 0x0000) {\n\n return SDL_PIXELFORMAT_RGB444;\n\n }\n\n break;\n\n case 15:\n\n if (Rmask == 0) {\n\n return SDL_PIXELFORMAT_RGB555;\n\n }\n\n /* Fall through to 16-bit checks */\n\n case 16:\n\n if (Rmask == 0) {\n\n return SDL_PIXELFORMAT_RGB565;\n\n }\n\n if (Rmask == 0x7C00 &&\n\n Gmask == 0x03E0 &&\n\n Bmask == 0x001F &&\n\n Amask == 0x0000) {\n\n return SDL_PIXELFORMAT_RGB555;\n\n }\n\n if (Rmask == 0x001F &&\n\n Gmask == 0x03E0 &&\n\n Bmask == 0x7C00 &&\n\n Amask == 0x0000) {\n\n return SDL_PIXELFORMAT_BGR555;\n\n }\n\n if (Rmask == 0x0F00 &&\n\n Gmask == 0x00F0 &&\n\n Bmask == 0x000F &&\n\n Amask == 0xF000) {\n\n return SDL_PIXELFORMAT_ARGB4444;\n\n }\n\n if (Rmask == 0xF000 &&\n\n Gmask == 0x0F00 &&\n\n Bmask == 0x00F0 &&\n\n Amask == 0x000F) {\n\n return SDL_PIXELFORMAT_RGBA4444;\n\n }\n\n if (Rmask == 0x000F &&\n\n Gmask == 0x00F0 &&\n\n Bmask == 0x0F00 &&\n\n Amask == 0xF000) {\n\n return SDL_PIXELFORMAT_ABGR4444;\n\n }\n\n if (Rmask == 0x00F0 &&\n\n Gmask == 0x0F00 &&\n\n Bmask == 0xF000 &&\n\n Amask == 0x000F) {\n\n return SDL_PIXELFORMAT_BGRA4444;\n\n }\n\n if (Rmask == 0x7C00 &&\n\n Gmask == 0x03E0 &&\n\n Bmask == 0x001F &&\n\n Amask == 0x8000) {\n\n return SDL_PIXELFORMAT_ARGB1555;\n\n }\n\n if (Rmask == 0xF800 &&\n\n Gmask == 0x07C0 &&\n\n Bmask == 0x003E &&\n\n Amask == 0x0001) {\n\n return SDL_PIXELFORMAT_RGBA5551;\n\n }\n\n if (Rmask == 0x001F &&\n\n Gmask == 0x03E0 &&\n\n Bmask == 0x7C00 &&\n\n Amask == 0x8000) {\n\n return SDL_PIXELFORMAT_ABGR1555;\n\n }\n\n if (Rmask == 0x003E &&\n\n Gmask == 0x07C0 &&\n\n Bmask == 0xF800 &&\n\n Amask == 0x0001) {\n\n return SDL_PIXELFORMAT_BGRA5551;\n\n }\n\n if (Rmask == 0xF800 &&\n\n Gmask == 0x07E0 &&\n\n Bmask == 0x001F &&\n\n Amask == 0x0000) {\n\n return SDL_PIXELFORMAT_RGB565;\n\n }\n\n if (Rmask == 0x001F &&\n\n Gmask == 0x07E0 &&\n\n Bmask == 0xF800 &&\n\n Amask == 0x0000) {\n\n return SDL_PIXELFORMAT_BGR565;\n\n }\n\n break;\n\n case 24:\n\n switch (Rmask) {\n\n case 0:\n\n case 0x00FF0000:\n\n#if SDL_BYTEORDER == SDL_BIG_ENDIAN\n\n return SDL_PIXELFORMAT_RGB24;\n\n#else\n\n return SDL_PIXELFORMAT_BGR24;\n\n#endif\n\n case 0x000000FF:\n\n#if SDL_BYTEORDER == SDL_BIG_ENDIAN\n\n return SDL_PIXELFORMAT_BGR24;\n\n#else\n\n return SDL_PIXELFORMAT_RGB24;\n\n#endif\n\n }\n\n case 32:\n\n if (Rmask == 0) {\n\n return SDL_PIXELFORMAT_RGB888;\n\n }\n\n if (Rmask == 0x00FF0000 &&\n\n Gmask == 0x0000FF00 &&\n\n Bmask == 0x000000FF &&\n\n Amask == 0x00000000) {\n\n return SDL_PIXELFORMAT_RGB888;\n\n }\n\n if (Rmask == 0xFF000000 &&\n\n Gmask == 0x00FF0000 &&\n\n Bmask == 0x0000FF00 &&\n\n Amask == 0x00000000) {\n\n return SDL_PIXELFORMAT_RGBX8888;\n\n }\n\n if (Rmask == 0x000000FF &&\n\n Gmask == 0x0000FF00 &&\n\n Bmask == 0x00FF0000 &&\n\n Amask == 0x00000000) {\n\n return SDL_PIXELFORMAT_BGR888;\n\n }\n\n if (Rmask == 0x0000FF00 &&\n\n Gmask == 0x00FF0000 &&\n\n Bmask == 0xFF000000 &&\n\n Amask == 0x00000000) {\n\n return SDL_PIXELFORMAT_BGRX8888;\n\n }\n\n if (Rmask == 0x00FF0000 &&\n\n Gmask == 0x0000FF00 &&\n\n Bmask == 0x000000FF &&\n\n Amask == 0xFF000000) {\n\n return SDL_PIXELFORMAT_ARGB8888;\n\n }\n\n if (Rmask == 0xFF000000 &&\n\n Gmask == 0x00FF0000 &&\n\n Bmask == 0x0000FF00 &&\n\n Amask == 0x000000FF) {\n\n return SDL_PIXELFORMAT_RGBA8888;\n\n }\n\n if (Rmask == 0x000000FF &&\n\n Gmask == 0x0000FF00 &&\n\n Bmask == 0x00FF0000 &&\n\n Amask == 0xFF000000) {\n\n return SDL_PIXELFORMAT_ABGR8888;\n\n }\n\n if (Rmask == 0x0000FF00 &&\n\n Gmask == 0x00FF0000 &&\n\n Bmask == 0xFF000000 &&\n\n Amask == 0x000000FF) {\n\n return SDL_PIXELFORMAT_BGRA8888;\n\n }\n\n if (Rmask == 0x3FF00000 &&\n\n Gmask == 0x000FFC00 &&\n\n Bmask == 0x000003FF &&\n\n Amask == 0xC0000000) {\n\n return SDL_PIXELFORMAT_ARGB2101010;\n\n }\n\n }\n\n return SDL_PIXELFORMAT_UNKNOWN;\n", "file_path": "MBExtender/external/SDL2/src/video/SDL_pixels.c", "rank": 25, "score": 180183.07200866943 }, { "content": "SDL_bool\n\nSDL_PixelFormatEnumToMasks(Uint32 format, int *bpp, Uint32 * Rmask,\n\n Uint32 * Gmask, Uint32 * Bmask, Uint32 * Amask)\n\n{\n\n Uint32 masks[4];\n\n\n\n /* This function doesn't work with FourCC pixel formats */\n\n if (SDL_ISPIXELFORMAT_FOURCC(format)) {\n\n SDL_SetError(\"FOURCC pixel formats are not supported\");\n\n return SDL_FALSE;\n\n }\n\n\n\n /* Initialize the values here */\n\n if (SDL_BYTESPERPIXEL(format) <= 2) {\n\n *bpp = SDL_BITSPERPIXEL(format);\n\n } else {\n\n *bpp = SDL_BYTESPERPIXEL(format) * 8;\n\n }\n\n *Rmask = *Gmask = *Bmask = *Amask = 0;\n\n\n\n if (format == SDL_PIXELFORMAT_RGB24) {\n\n#if SDL_BYTEORDER == SDL_BIG_ENDIAN\n\n *Rmask = 0x00FF0000;\n\n *Gmask = 0x0000FF00;\n\n *Bmask = 0x000000FF;\n\n#else\n\n *Rmask = 0x000000FF;\n\n *Gmask = 0x0000FF00;\n\n *Bmask = 0x00FF0000;\n\n#endif\n\n return SDL_TRUE;\n\n }\n\n\n\n if (format == SDL_PIXELFORMAT_BGR24) {\n\n#if SDL_BYTEORDER == SDL_BIG_ENDIAN\n\n *Rmask = 0x000000FF;\n\n *Gmask = 0x0000FF00;\n\n *Bmask = 0x00FF0000;\n\n#else\n\n *Rmask = 0x00FF0000;\n\n *Gmask = 0x0000FF00;\n\n *Bmask = 0x000000FF;\n\n#endif\n\n return SDL_TRUE;\n\n }\n\n\n\n if (SDL_PIXELTYPE(format) != SDL_PIXELTYPE_PACKED8 &&\n\n SDL_PIXELTYPE(format) != SDL_PIXELTYPE_PACKED16 &&\n\n SDL_PIXELTYPE(format) != SDL_PIXELTYPE_PACKED32) {\n\n /* Not a format that uses masks */\n\n return SDL_TRUE;\n\n }\n\n\n\n switch (SDL_PIXELLAYOUT(format)) {\n\n case SDL_PACKEDLAYOUT_332:\n\n masks[0] = 0x00000000;\n\n masks[1] = 0x000000E0;\n\n masks[2] = 0x0000001C;\n\n masks[3] = 0x00000003;\n\n break;\n\n case SDL_PACKEDLAYOUT_4444:\n\n masks[0] = 0x0000F000;\n\n masks[1] = 0x00000F00;\n\n masks[2] = 0x000000F0;\n\n masks[3] = 0x0000000F;\n\n break;\n\n case SDL_PACKEDLAYOUT_1555:\n\n masks[0] = 0x00008000;\n\n masks[1] = 0x00007C00;\n\n masks[2] = 0x000003E0;\n\n masks[3] = 0x0000001F;\n\n break;\n\n case SDL_PACKEDLAYOUT_5551:\n\n masks[0] = 0x0000F800;\n\n masks[1] = 0x000007C0;\n\n masks[2] = 0x0000003E;\n\n masks[3] = 0x00000001;\n\n break;\n\n case SDL_PACKEDLAYOUT_565:\n\n masks[0] = 0x00000000;\n\n masks[1] = 0x0000F800;\n\n masks[2] = 0x000007E0;\n\n masks[3] = 0x0000001F;\n\n break;\n\n case SDL_PACKEDLAYOUT_8888:\n\n masks[0] = 0xFF000000;\n\n masks[1] = 0x00FF0000;\n\n masks[2] = 0x0000FF00;\n\n masks[3] = 0x000000FF;\n\n break;\n\n case SDL_PACKEDLAYOUT_2101010:\n\n masks[0] = 0xC0000000;\n\n masks[1] = 0x3FF00000;\n\n masks[2] = 0x000FFC00;\n\n masks[3] = 0x000003FF;\n\n break;\n\n case SDL_PACKEDLAYOUT_1010102:\n\n masks[0] = 0xFFC00000;\n\n masks[1] = 0x003FF000;\n\n masks[2] = 0x00000FFC;\n\n masks[3] = 0x00000003;\n\n break;\n\n default:\n\n SDL_SetError(\"Unknown pixel format\");\n\n return SDL_FALSE;\n\n }\n\n\n\n switch (SDL_PIXELORDER(format)) {\n\n case SDL_PACKEDORDER_XRGB:\n\n *Rmask = masks[1];\n\n *Gmask = masks[2];\n\n *Bmask = masks[3];\n\n break;\n\n case SDL_PACKEDORDER_RGBX:\n\n *Rmask = masks[0];\n\n *Gmask = masks[1];\n\n *Bmask = masks[2];\n\n break;\n\n case SDL_PACKEDORDER_ARGB:\n\n *Amask = masks[0];\n\n *Rmask = masks[1];\n\n *Gmask = masks[2];\n\n *Bmask = masks[3];\n\n break;\n\n case SDL_PACKEDORDER_RGBA:\n\n *Rmask = masks[0];\n\n *Gmask = masks[1];\n\n *Bmask = masks[2];\n\n *Amask = masks[3];\n\n break;\n\n case SDL_PACKEDORDER_XBGR:\n\n *Bmask = masks[1];\n\n *Gmask = masks[2];\n\n *Rmask = masks[3];\n\n break;\n\n case SDL_PACKEDORDER_BGRX:\n\n *Bmask = masks[0];\n\n *Gmask = masks[1];\n\n *Rmask = masks[2];\n\n break;\n\n case SDL_PACKEDORDER_BGRA:\n\n *Bmask = masks[0];\n\n *Gmask = masks[1];\n\n *Rmask = masks[2];\n\n *Amask = masks[3];\n\n break;\n\n case SDL_PACKEDORDER_ABGR:\n\n *Amask = masks[0];\n\n *Bmask = masks[1];\n\n *Gmask = masks[2];\n\n *Rmask = masks[3];\n\n break;\n\n default:\n\n SDL_SetError(\"Unknown pixel format\");\n\n return SDL_FALSE;\n\n }\n\n return SDL_TRUE;\n", "file_path": "MBExtender/external/SDL2/src/video/SDL_pixels.c", "rank": 26, "score": 180183.07200866946 }, { "content": " int header; /* true if block header must be written */\n", "file_path": "MBExtender/external/zlib/trees.c", "rank": 27, "score": 176737.165582804 }, { "content": " struct curl_slist *headers; /* linked list of extra headers */\n", "file_path": "MBExtender/external/curl/lib/urldata.h", "rank": 28, "score": 174414.56306079336 }, { "content": " bool header; /* incoming data has HTTP header */\n", "file_path": "MBExtender/external/curl/lib/urldata.h", "rank": 29, "score": 174413.30225576274 }, { "content": " const char * header; /* Pointer to header byte. */\n", "file_path": "MBExtender/external/curl/lib/x509asn1.h", "rank": 30, "score": 174413.30225576274 }, { "content": "/// Register a console command which returns a string.\n\npub fn add_command_str(name: &str, cb: StringFn, usage: &str, min_args: i32, max_args: i32) {\n\n let c_name = CString::new(name).unwrap();\n\n let c_usage = CString::new(usage).unwrap();\n\n unsafe {\n\n tge_add_command_str(c_name.as_ptr(), cb, c_usage.into_raw(), min_args, max_args);\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/mbx/src/con_/command.rs", "rank": 31, "score": 173694.6184193726 }, { "content": " struct curl_slist *headers;\n", "file_path": "MBExtender/external/curl/src/tool_cfgable.h", "rank": 32, "score": 172176.626636232 }, { "content": " Uint32 format;\n", "file_path": "MBExtender/external/SDL2/include/SDL_pixels.h", "rank": 33, "score": 172139.51614994777 }, { "content": " uint8_t error;\n", "file_path": "MBExtender/external/udis86/udis86/libudis86/types.h", "rank": 34, "score": 172026.91640071973 }, { "content": "---------------------\n\n| DOS Header |\n", "file_path": "MBExtender/src/MBGPatcher/PEFile.h", "rank": 35, "score": 171792.49086684742 }, { "content": "---------------------\n\n| PE Header |\n", "file_path": "MBExtender/src/MBGPatcher/PEFile.h", "rank": 36, "score": 171792.49086684742 }, { "content": "/// \\brief Status of the power-up self test\n\nenum PowerUpSelfTestStatus {\n\n\n\n\t/// \\brief The self tests have not been performed.\n\n\tPOWER_UP_SELF_TEST_NOT_DONE,\n\n\t/// \\brief The self tests were executed via DoPowerUpSelfTest() or\n\n\t/// DoDllPowerUpSelfTest(), but the result was failure.\n\n\tPOWER_UP_SELF_TEST_FAILED,\n\n\t/// \\brief The self tests were executed via DoPowerUpSelfTest() or\n\n\t/// DoDllPowerUpSelfTest(), and the result was success.\n\n\tPOWER_UP_SELF_TEST_PASSED\n\n};\n\n\n\n/// \\brief Performs the power-up self test\n\n/// \\param moduleFilename the fully qualified name of the module\n\n/// \\param expectedModuleMac the expected MAC of the components protected by the integrity check\n\n/// \\details Performs the power-up self test, and sets the self test status to\n\n/// POWER_UP_SELF_TEST_PASSED or POWER_UP_SELF_TEST_FAILED.\n\n/// \\details The self tests for an algorithm are performed by the Algortihm class\n\n/// when CRYPTOPP_ENABLE_COMPLIANCE_WITH_FIPS_140_2 is defined.\n\nCRYPTOPP_DLL void CRYPTOPP_API DoPowerUpSelfTest(const char *moduleFilename, const byte *expectedModuleMac);\n", "file_path": "MBExtender/external/cryptopp/fips140.h", "rank": 37, "score": 169189.33213989568 }, { "content": "enum ValueType {\n\n nullValue = 0, ///< 'null' value\n\n intValue, ///< signed integer value\n\n uintValue, ///< unsigned integer value\n\n realValue, ///< double value\n\n stringValue, ///< UTF-8 string value\n\n booleanValue, ///< bool value\n\n arrayValue, ///< array value (ordered list)\n\n objectValue ///< object value (collection of name/value pairs).\n\n};\n\n\n", "file_path": "MBExtender/external/jsoncpp/include/json/value.h", "rank": 38, "score": 169034.4623646 }, { "content": "#[proc_macro_attribute]\n\npub fn fn_override(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let original = parse_macro_input!(attr as Ident);\n\n let mut func = parse_macro_input!(item as ItemFn);\n\n\n\n // Use extern \"C\"\n\n if let Some(ref abi) = func.sig.abi {\n\n return Error::new(abi.span(), \"function overrides must not specify an ABI\")\n\n .to_compile_error()\n\n .into();\n\n }\n\n func.sig.abi = Some(parse_quote!(extern \"C\"));\n\n\n\n // Force unsafe\n\n if func.sig.unsafety.is_none() {\n\n return Error::new(func.sig.span(), \"function overrides must be `unsafe`\")\n\n .to_compile_error()\n\n .into();\n\n }\n\n\n\n let sig = &func.sig;\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 39, "score": 166787.6810078565 }, { "content": "enum PrettyFormatOptions {\n\n kFormatDefault = 0, //!< Default pretty formatting.\n\n kFormatSingleLineArray = 1 //!< Format arrays on a single line.\n\n};\n\n\n\n//! Writer with indentation and spacing.\n\n/*!\n\n \\tparam OutputStream Type of ouptut os.\n\n \\tparam SourceEncoding Encoding of source string.\n\n \\tparam TargetEncoding Encoding of output stream.\n\n \\tparam StackAllocator Type of allocator for allocating memory of stack.\n\n*/\n\ntemplate<typename OutputStream, typename SourceEncoding = UTF8<>, typename TargetEncoding = UTF8<>, typename StackAllocator = CrtAllocator, unsigned writeFlags = kWriteDefaultFlags>\n", "file_path": "MBExtender/external/rapidjson/include/rapidjson/prettywriter.h", "rank": 40, "score": 166652.55160711697 }, { "content": "/// Register a console command which returns an integer.\n\npub fn add_command_int(name: &str, cb: IntFn, usage: &str, min_args: i32, max_args: i32) {\n\n let c_name = CString::new(name).unwrap();\n\n let c_usage = CString::new(usage).unwrap();\n\n unsafe {\n\n tge_add_command_int(c_name.as_ptr(), cb, c_usage.into_raw(), min_args, max_args);\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/mbx/src/con_/command.rs", "rank": 41, "score": 164548.3468531236 }, { "content": "/// Register a console command which returns a boolean.\n\npub fn add_command_bool(name: &str, cb: BoolFn, usage: &str, min_args: i32, max_args: i32) {\n\n let c_name = CString::new(name).unwrap();\n\n let c_usage = CString::new(usage).unwrap();\n\n unsafe {\n\n tge_add_command_bool(c_name.as_ptr(), cb, c_usage.into_raw(), min_args, max_args);\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/mbx/src/con_/command.rs", "rank": 42, "score": 164548.3468531236 }, { "content": "/// Register a console command which returns a float.\n\npub fn add_command_float(name: &str, cb: FloatFn, usage: &str, min_args: i32, max_args: i32) {\n\n let c_name = CString::new(name).unwrap();\n\n let c_usage = CString::new(usage).unwrap();\n\n unsafe {\n\n tge_add_command_float(c_name.as_ptr(), cb, c_usage.into_raw(), min_args, max_args);\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/mbx/src/con_/command.rs", "rank": 43, "score": 164548.3468531236 }, { "content": "/// Register a console command which does not return a value.\n\npub fn add_command_void(name: &str, cb: VoidFn, usage: &str, min_args: i32, max_args: i32) {\n\n let c_name = CString::new(name).unwrap();\n\n let c_usage = CString::new(usage).unwrap();\n\n unsafe {\n\n tge_add_command_void(c_name.as_ptr(), cb, c_usage.into_raw(), min_args, max_args);\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/mbx/src/con_/command.rs", "rank": 44, "score": 164548.3468531236 }, { "content": "enum PointerParseErrorCode {\n\n kPointerParseErrorNone = 0, //!< The parse is successful\n\n\n\n kPointerParseErrorTokenMustBeginWithSolidus, //!< A token must begin with a '/'\n\n kPointerParseErrorInvalidEscape, //!< Invalid escape\n\n kPointerParseErrorInvalidPercentEncoding, //!< Invalid percent encoding in URI fragment\n\n kPointerParseErrorCharacterMustPercentEncode //!< A character must percent encoded in URI fragment\n\n};\n\n\n\n///////////////////////////////////////////////////////////////////////////////\n\n// GenericPointer\n\n\n\n//! Represents a JSON Pointer. Use Pointer for UTF8 encoding and default allocator.\n\n/*!\n\n This class implements RFC 6901 \"JavaScript Object Notation (JSON) Pointer\" \n\n (https://tools.ietf.org/html/rfc6901).\n\n\n\n A JSON pointer is for identifying a specific value in a JSON document\n\n (GenericDocument). It can simplify coding of DOM tree manipulation, because it\n\n can access multiple-level depth of DOM tree with single API call.\n", "file_path": "MBExtender/external/rapidjson/include/rapidjson/pointer.h", "rank": 45, "score": 164236.35064680452 }, { "content": "#define SDL_PixelFormatEnumToMasks SDL_PixelFormatEnumToMasks_REAL\n", "file_path": "MBExtender/external/SDL2/src/dynapi/SDL_dynapi_overrides.h", "rank": 46, "score": 160391.35515437648 }, { "content": "#define SDL_MasksToPixelFormatEnum SDL_MasksToPixelFormatEnum_REAL\n", "file_path": "MBExtender/external/SDL2/src/dynapi/SDL_dynapi_overrides.h", "rank": 47, "score": 160391.35515437648 }, { "content": "fn searchEntry(path: &str) -> Option<MBPakFileEntry> {\n\n let workingdir = platform::get_current_directory();\n\n let relativedir = str::replace(path, workingdir, \"\");\n\n for package in loadedPackages.lock().unwrap().iter() {\n\n for entry in package.entries.iter() {\n\n if entry.file_path.starts_with(&relativedir) {\n\n let value = entry.clone();\n\n return Some(value);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n\n#[method_override(original_file_open)]\n\nunsafe fn file_open(\n\n this: &mut core::File,\n\n filename: *const c_char,\n\n openMode: core::AccessMode,\n\n) -> core::FileStatus {\n", "file_path": "MBExtender/rust/mbcrypt/src/lib.rs", "rank": 48, "score": 154775.5599702253 }, { "content": "/// Print an error message to the console.\n\n/// Prefer using the con_eprintln!() macro.\n\npub fn error<D: Display>(message: D) {\n\n let c_fmt = CString::new(\"%s\").unwrap();\n\n let c_message = CString::new(format!(\"{}\", message)).unwrap();\n\n unsafe {\n\n tge_errorf(c_fmt.as_ptr(), c_message.as_ptr());\n\n }\n\n}\n", "file_path": "MBExtender/rust/mbx/src/con_/print.rs", "rank": 49, "score": 154309.5321744621 }, { "content": "/// Register a console method which returns a string.\n\npub fn add_method_str(\n\n class: &str,\n\n name: &str,\n\n cb: StringFn,\n\n usage: &str,\n\n min_args: i32,\n\n max_args: i32,\n\n) {\n\n let c_class = CString::new(class).unwrap();\n\n let c_name = CString::new(name).unwrap();\n\n let c_usage = CString::new(usage).unwrap();\n\n unsafe {\n\n tge_add_command_ns_str(\n\n c_class.as_ptr(),\n\n c_name.as_ptr(),\n\n cb,\n\n c_usage.into_raw(),\n\n min_args,\n\n max_args,\n\n );\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/mbx/src/con_/command.rs", "rank": 50, "score": 154153.95349554787 }, { "content": "#[proc_macro_attribute]\n\npub fn inherits(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let parent_type = parse_macro_input!(attr as TypePath);\n\n let mut struct_item = parse_macro_input!(item as ItemStruct);\n\n\n\n // Insert the parent field at the beginning of the struct\n\n let new_fields: FieldsNamed = parse_quote!({ pub parent: #parent_type });\n\n match struct_item.fields {\n\n Fields::Named(ref mut fields) => {\n\n fields.named.insert(0, new_fields.named.first().unwrap().clone());\n\n }\n\n Fields::Unnamed(_) => {\n\n return Error::new(struct_item.fields.span(), \"struct must have named fields\")\n\n .to_compile_error()\n\n .into();\n\n }\n\n Fields::Unit => {\n\n struct_item.fields = Fields::Named(new_fields);\n\n }\n\n }\n\n\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 51, "score": 151151.45576593984 }, { "content": "#[proc_macro_attribute]\n\npub fn command(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let meta = parse_macro_input!(attr as ConsoleFnMeta);\n\n let mut func = parse_macro_input!(item as ItemFn);\n\n\n\n // Force ABI to extern \"C\"\n\n if let Some(ref abi) = func.sig.abi {\n\n return Error::new(abi.span(), \"console functions must not specify an ABI\")\n\n .to_compile_error()\n\n .into();\n\n }\n\n func.sig.abi = parse_quote!(extern \"C\");\n\n\n\n // The function will be replaced by a private function and a struct with the visibility of the\n\n // original function\n\n let vis = func.vis;\n\n func.vis = Visibility::Inherited;\n\n\n\n // Rename the function to __<name>()\n\n let public_ident = func.sig.ident;\n\n let private_name = format!(\"__{}\", public_ident);\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 52, "score": 151151.45576593984 }, { "content": "#[proc_macro_attribute]\n\npub fn vtable(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let vtable_type = parse_macro_input!(attr as TypePath);\n\n let mut struct_item = parse_macro_input!(item as ItemStruct);\n\n\n\n // We want to know whether this is a subclass. There are two cases here:\n\n // 1. The #[inherits] attribute has not been processed yet. It will show up in the attrs list.\n\n // 2. The #[inherits] attribute has already been processed. We have to look for the parent field.\n\n let mut is_subclass = struct_item.attrs.iter().any(|a| match a.path.segments.last() {\n\n Some(segment) => segment.ident.to_string() == \"inherits\",\n\n None => false,\n\n });\n\n if !is_subclass {\n\n if let Fields::Named(ref fields) = struct_item.fields {\n\n if let Some(ref first) = fields.named.first() {\n\n if let Some(ref ident) = first.ident {\n\n if ident.to_string() == \"parent\" {\n\n is_subclass = true;\n\n }\n\n }\n\n }\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 53, "score": 151151.45576593984 }, { "content": "#[proc_macro_attribute]\n\npub fn method_override(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let original = parse_macro_input!(attr as Ident);\n\n let func = parse_macro_input!(item as ItemFn);\n\n // cargo doesn't tell us the target, so we have to use cfg_attr as a workaround\n\n let result = quote_spanned! {func.span()=>\n\n #[cfg_attr(target_os = \"windows\", ::mbx::__impl_method_override_windows(#original))]\n\n #[cfg_attr(target_os = \"macos\", ::mbx::__impl_method_override_macos(#original))]\n\n #func\n\n };\n\n result.into()\n\n}\n\n\n\n/// Emulates inheritance by generating a field for a parent type along with a\n\n/// Deref implementation.\n\n///\n\n/// Example:\n\n///\n\n/// #[repr(C)]\n\n/// #[inherits(SimObject)]\n\n/// pub struct NetObject {\n\n/// ...\n\n/// }\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 54, "score": 149591.55671953544 }, { "content": "#[proc_macro_attribute]\n\npub fn plugin_main(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let func = parse_macro_input!(item as ItemFn);\n\n let func_ident = &func.sig.ident;\n\n let invocation = match func.sig.output {\n\n ReturnType::Default => {\n\n quote_spanned! {func.sig.span()=>\n\n #func_ident(&plugin);\n\n ::mbx::ffi::MBX_Status_MBX_OK\n\n }\n\n }\n\n _ => {\n\n quote_spanned! {func.sig.span()=>\n\n match #func_ident(&plugin) {\n\n ::std::result::Result::Ok(_) => ::mbx::ffi::MBX_Status_MBX_OK,\n\n ::std::result::Result::Err(err) => {\n\n plugin.set_error(err);\n\n ::mbx::ffi::MBX_Status_MBX_ERROR\n\n }\n\n }\n\n }\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 55, "score": 149591.55671953544 }, { "content": "#[proc_macro_attribute]\n\npub fn virtual_destructor(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let struct_item = parse_macro_input!(item as ItemStruct);\n\n\n\n // This macro can be used inside mbx\n\n let c = if env::var(\"CARGO_PKG_NAME\").unwrap() == \"mbx\" {\n\n quote!(crate)\n\n } else {\n\n quote!(::mbx)\n\n };\n\n\n\n let struct_ident = &struct_item.ident;\n\n let result = quote! {\n\n #struct_item\n\n\n\n impl Drop for #struct_ident {\n\n #[inline]\n\n fn drop(&mut self) {\n\n unsafe {\n\n let destructor = #c::interop::Vtable::vtable(self).__destructor;\n\n destructor.invoke(self);\n\n }\n\n }\n\n }\n\n };\n\n result.into()\n\n}\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 56, "score": 149591.55671953544 }, { "content": "fn do_method_override(attr: TokenStream, item: TokenStream, windows: bool) -> TokenStream {\n\n let original = parse_macro_input!(attr as Ident);\n\n let mut func = parse_macro_input!(item as ItemFn);\n\n\n\n // Force unsafe\n\n if func.sig.unsafety.is_none() {\n\n return Error::new(func.sig.span(), \"method overrides must be `unsafe`\")\n\n .to_compile_error()\n\n .into();\n\n }\n\n\n\n if let Some(ref abi) = func.sig.abi {\n\n return Error::new(abi.span(), \"method overrides must not specify an ABI\")\n\n .to_compile_error()\n\n .into();\n\n }\n\n\n\n if windows {\n\n // Use fastcall to grab ecx (this) and edx and then insert an edx argument\n\n func.sig.abi = Some(parse_quote!(extern \"fastcall\"));\n", "file_path": "MBExtender/rust/mbx-proc/src/lib.rs", "rank": 57, "score": 146563.79914041757 }, { "content": "#[inline]\n\npub fn leak_vec_ptr<T>(vec: Vec<T>) -> *mut T {\n\n Box::leak(vec.into_boxed_slice()).as_mut_ptr()\n\n}\n\n\n\n/// Constructs a Box with uninitialized contents.\n", "file_path": "MBExtender/rust/mbx/src/util.rs", "rank": 58, "score": 141607.2427039291 }, { "content": "/// Collect console command arguments into a vector of string slices.\n\n/// Arguments which cannot be represented as UTF-8 will default to an empty string.\n\npub fn collect_args<'a>(argc: i32, argv: Argv) -> Vec<&'a str> {\n\n unsafe { slice::from_raw_parts(argv, argc as usize) }\n\n .into_iter()\n\n .map(|s| unsafe { CStr::from_ptr(*s).to_str().unwrap_or_default() })\n\n .collect()\n\n}\n\n\n", "file_path": "MBExtender/rust/mbx/src/con_/command.rs", "rank": 59, "score": 136931.50651453063 }, { "content": "#[command(args = 2, usage = \"loadMBPackage(package)\")]\n\nfn loadMBPackage(_obj: *mut SimObject, argc: i32, argv: con::Argv) {\n\n let args = con::collect_args(argc, argv);\n\n let packagename = String::from(args[1]);\n\n let zipn = packagename.clone() + \".mbpak\";\n\n let path = format!(\"packages/{}\", zipn);\n\n\n\n let pak = mbpakfile::MBPakFile::new(&path, keystore::KeyStore::Load());\n\n if pak.is_ok() {\n\n loadedPackages.lock().unwrap().push(pak.unwrap());\n\n con::print(format!(\"Package {} loaded\", packagename));\n\n } else {\n\n con::error(format!(\"Could not load package {}\", packagename));\n\n }\n\n}\n\n\n", "file_path": "MBExtender/rust/mbcrypt/src/lib.rs", "rank": 60, "score": 135400.53551061478 }, { "content": "#[command(class = \"Marble\", args = 2, usage = \"%marble.rustPrintVelocity()\")]\n\nfn rustPrintVelocity(obj: *mut SimObject, _argc: i32, _argv: con::Argv) {\n\n let marble: &Marble = unsafe { &*obj.cast() };\n\n info!(\"Velocity: {:?}\", marble.velocity());\n\n}\n\n\n\nextern \"C\" fn game_start() {\n\n info!(\"Rust game_start()\");\n\n}\n\n\n\n#[fn_override(original_create_window)]\n\nunsafe fn my_create_window(width: i32, height: i32, fullscreen: bool) -> *mut c_void {\n\n info!(\"Rust createWindow({}, {}, {})\", width, height, fullscreen);\n\n original_create_window(width, height, fullscreen)\n\n}\n\n\n\n#[method_override(original_marble_do_power_up)]\n\nunsafe fn my_marble_do_power_up(this: &mut Marble, id: i32) {\n\n info!(\"Rust Marble::doPowerUp({})\", id);\n\n let acr = vcall!(this, get_class_rep);\n\n info!(\"Marble::getClassRep() = {:#x}\", acr as usize);\n\n original_marble_do_power_up(this, id)\n\n}\n\n\n\ntge_functions! {\n\n fn tge_create_window(width: i32, height: i32, fullscreen: bool) -> *mut c_void = tge_addr!(0x4065a0, 0);\n\n}\n\n\n", "file_path": "MBExtender/rust/rust-plugin/src/lib.rs", "rank": 61, "score": 133650.12620538913 }, { "content": "/// Saves a convex hull accelerator to a file.\n\nfn save_accelerator(path: &Path, accel: &ConvexHullAccelerator) -> Result<(), Box<dyn Error>> {\n\n let file = File::create(path)?;\n\n let mut file = BufWriter::new(file);\n\n\n\n io::write_val(&mut file, &CHA_MAGIC)?;\n\n io::write_val(&mut file, &CHA_VERSION)?;\n\n\n\n io::write_val(&mut file, &accel.num_verts)?;\n\n io::write_array(&mut file, accel.vertex_list, accel.num_verts as usize)?;\n\n\n\n let raw_strings =\n\n unsafe { slice::from_raw_parts(accel.emit_strings, accel.num_verts as usize) };\n\n let strings: Vec<EmitString> =\n\n raw_strings.into_iter().map(|s| unsafe { EmitString::from_ptr(*s) }).collect();\n\n\n\n let num_faces = count_faces(&strings);\n\n io::write_val(&mut file, &num_faces)?;\n\n io::write_array(&mut file, accel.normal_list, num_faces as usize)?;\n\n\n\n for string in strings {\n", "file_path": "MBExtender/rust/speed-boostair/src/accel.rs", "rank": 62, "score": 133633.80484842515 }, { "content": "/// Loads a convex hull accelerator from a file.\n\nfn load_accelerator(path: &Path) -> Result<Box<ConvexHullAccelerator>, Box<dyn Error>> {\n\n let file = File::open(path)?;\n\n let mut file = BufReader::new(file);\n\n\n\n let magic: [u8; 4] = io::read_val(&mut file)?;\n\n if magic != CHA_MAGIC {\n\n return Err(\"bad magic\".into());\n\n }\n\n let version: u32 = io::read_val(&mut file)?;\n\n if version != CHA_VERSION {\n\n return Err(\"unsupported version\".into());\n\n }\n\n\n\n let num_verts: i32 = io::read_val(&mut file)?;\n\n if num_verts < 0 {\n\n return Err(\"bad vertex count\".into());\n\n }\n\n let verts: Vec<Point3F> = io::read_array(&mut file, num_verts as usize)?;\n\n\n\n let num_faces: i32 = io::read_val(&mut file)?;\n", "file_path": "MBExtender/rust/speed-boostair/src/accel.rs", "rank": 63, "score": 133633.80484842515 }, { "content": "struct EnumToType\n\n{\n\n\tstatic ENUM_TYPE ToEnum() {return static_cast<ENUM_TYPE>(VALUE);}\n\n};\n\n\n", "file_path": "MBExtender/external/cryptopp/cryptlib.h", "rank": 64, "score": 133487.10327956657 }, { "content": "class StreamLocalCopy<Stream, 0> {\n\npublic:\n\n StreamLocalCopy(Stream& original) : s(original) {}\n\n\n\n Stream& s;\n\n\n\nprivate:\n\n StreamLocalCopy& operator=(const StreamLocalCopy&) /* = delete */;\n\n};\n\n\n\n} // namespace internal\n\n\n\n///////////////////////////////////////////////////////////////////////////////\n\n// SkipWhitespace\n\n\n\n//! Skip the JSON white spaces in a stream.\n\n/*! \\param is A input stream for skipping white spaces.\n\n \\note This function has SSE2/SSE4.2 specialization.\n\n*/\n\ntemplate<typename InputStream>\n", "file_path": "MBExtender/external/rapidjson/include/rapidjson/reader.h", "rank": 65, "score": 133162.15886035364 }, { "content": "#[command(args = 3, usage = \"rustAdd(x, y)\")]\n\nfn rustAdd(_obj: *mut SimObject, argc: i32, argv: con::Argv) -> i32 {\n\n let args = con::collect_args(argc, argv);\n\n args[1].parse::<i32>().unwrap_or(0) + args[2].parse::<i32>().unwrap_or(0)\n\n}\n\n\n", "file_path": "MBExtender/rust/rust-plugin/src/lib.rs", "rank": 66, "score": 131505.4769832604 }, { "content": " int error;\n", "file_path": "MBExtender/external/SDL2/src/SDL_error_c.h", "rank": 67, "score": 129020.15207737965 }, { "content": " enum ud_type type;\n", "file_path": "MBExtender/external/udis86/udis86/libudis86/types.h", "rank": 68, "score": 128852.0192612037 }, { "content": "class ElGamalObjectImpl :\n\n\tpublic DL_ObjectImplBase<BASE, SCHEME_OPTIONS, KEY>,\n\n\tpublic ElGamalBase\n\n{\n\npublic:\n\n\tvirtual ~ElGamalObjectImpl() {}\n\n\n\n\tsize_t FixedMaxPlaintextLength() const {return this->MaxPlaintextLength(FixedCiphertextLength());}\n\n\tsize_t FixedCiphertextLength() const {return this->CiphertextLength(0);}\n\n\n\n\tconst DL_GroupParameters_GFP & GetGroupParameters() const {return this->GetKey().GetGroupParameters();}\n\n\n\n\tDecodingResult FixedLengthDecrypt(RandomNumberGenerator &rng, const byte *cipherText, byte *plainText) const\n\n\t\t{return Decrypt(rng, cipherText, FixedCiphertextLength(), plainText);}\n\n\n\nprotected:\n\n\tconst DL_KeyAgreementAlgorithm<Integer> & GetKeyAgreementAlgorithm() const {return *this;}\n\n\tconst DL_KeyDerivationAlgorithm<Integer> & GetKeyDerivationAlgorithm() const {return *this;}\n\n\tconst DL_SymmetricEncryptionAlgorithm & GetSymmetricEncryptionAlgorithm() const {return *this;}\n\n};\n", "file_path": "MBExtender/external/cryptopp/elgamal.h", "rank": 69, "score": 128290.11137950337 }, { "content": "class CodeStream;\n\n} // namespace MBX\n\n\n", "file_path": "MBExtender/src/PluginLoader/PluginImpl.h", "rank": 70, "score": 128258.9996530126 }, { "content": "#[command(args = 2, usage = \"rustReverse(str)\")]\n\nfn rustReverse(_obj: *mut SimObject, argc: i32, argv: con::Argv) -> *const c_char {\n\n let args = con::collect_args(argc, argv);\n\n let reversed: String = args[1].chars().rev().collect();\n\n con::get_return_buffer(&reversed)\n\n}\n\n\n", "file_path": "MBExtender/rust/rust-plugin/src/lib.rs", "rank": 71, "score": 127974.17403135225 }, { "content": "int\n\nSDL_SetPixelFormatPalette(SDL_PixelFormat * format, SDL_Palette *palette)\n\n{\n\n if (!format) {\n\n return SDL_SetError(\"SDL_SetPixelFormatPalette() passed NULL format\");\n\n }\n\n\n\n if (palette && palette->ncolors != (1 << format->BitsPerPixel)) {\n\n return SDL_SetError(\"SDL_SetPixelFormatPalette() passed a palette that doesn't match the format\");\n\n }\n\n\n\n if (format->palette == palette) {\n\n return 0;\n\n }\n\n\n\n if (format->palette) {\n\n SDL_FreePalette(format->palette);\n\n }\n\n\n\n format->palette = palette;\n\n\n\n if (format->palette) {\n\n ++format->palette->refcount;\n\n }\n\n\n\n return 0;\n", "file_path": "MBExtender/external/SDL2/src/video/SDL_pixels.c", "rank": 72, "score": 125804.24698823458 }, { "content": "const char*\n\nSDL_GetPixelFormatName(Uint32 format)\n\n{\n\n switch (format) {\n\n#define CASE(X) case X: return #X;\n\n CASE(SDL_PIXELFORMAT_INDEX1LSB)\n\n CASE(SDL_PIXELFORMAT_INDEX1MSB)\n\n CASE(SDL_PIXELFORMAT_INDEX4LSB)\n\n CASE(SDL_PIXELFORMAT_INDEX4MSB)\n\n CASE(SDL_PIXELFORMAT_INDEX8)\n\n CASE(SDL_PIXELFORMAT_RGB332)\n\n CASE(SDL_PIXELFORMAT_RGB444)\n\n CASE(SDL_PIXELFORMAT_RGB555)\n\n CASE(SDL_PIXELFORMAT_BGR555)\n\n CASE(SDL_PIXELFORMAT_ARGB4444)\n\n CASE(SDL_PIXELFORMAT_RGBA4444)\n\n CASE(SDL_PIXELFORMAT_ABGR4444)\n\n CASE(SDL_PIXELFORMAT_BGRA4444)\n\n CASE(SDL_PIXELFORMAT_ARGB1555)\n\n CASE(SDL_PIXELFORMAT_RGBA5551)\n\n CASE(SDL_PIXELFORMAT_ABGR1555)\n\n CASE(SDL_PIXELFORMAT_BGRA5551)\n\n CASE(SDL_PIXELFORMAT_RGB565)\n\n CASE(SDL_PIXELFORMAT_BGR565)\n\n CASE(SDL_PIXELFORMAT_RGB24)\n\n CASE(SDL_PIXELFORMAT_BGR24)\n\n CASE(SDL_PIXELFORMAT_RGB888)\n\n CASE(SDL_PIXELFORMAT_RGBX8888)\n\n CASE(SDL_PIXELFORMAT_BGR888)\n\n CASE(SDL_PIXELFORMAT_BGRX8888)\n\n CASE(SDL_PIXELFORMAT_ARGB8888)\n\n CASE(SDL_PIXELFORMAT_RGBA8888)\n\n CASE(SDL_PIXELFORMAT_ABGR8888)\n\n CASE(SDL_PIXELFORMAT_BGRA8888)\n\n CASE(SDL_PIXELFORMAT_ARGB2101010)\n\n CASE(SDL_PIXELFORMAT_YV12)\n\n CASE(SDL_PIXELFORMAT_IYUV)\n\n CASE(SDL_PIXELFORMAT_YUY2)\n\n CASE(SDL_PIXELFORMAT_UYVY)\n\n CASE(SDL_PIXELFORMAT_YVYU)\n\n CASE(SDL_PIXELFORMAT_NV12)\n\n CASE(SDL_PIXELFORMAT_NV21)\n\n#undef CASE\n\n default:\n\n return \"SDL_PIXELFORMAT_UNKNOWN\";\n\n }\n", "file_path": "MBExtender/external/SDL2/src/video/SDL_pixels.c", "rank": 73, "score": 125804.24698823458 }, { "content": "sub outputHeader {\n\n print <<EOF;\n\n/* DO NOT EDIT! This file is generated by sdlgenaudiocvt.pl */\n\n/*\n\n Simple DirectMedia Layer\n\n Copyright (C) 1997-2016 Sam Lantinga <slouken\\@libsdl.org>\n\n\n\n This software is provided 'as-is', without any express or implied\n\n warranty. In no event will the authors be held liable for any damages\n\n arising from the use of this software.\n\n\n\n Permission is granted to anyone to use this software for any purpose,\n\n including commercial applications, and to alter it and redistribute it\n\n freely, subject to the following restrictions:\n\n\n\n 1. The origin of this software must not be misrepresented; you must not\n\n claim that you wrote the original software. If you use this software\n\n in a product, an acknowledgment in the product documentation would be\n\n appreciated but is not required.\n\n 2. Altered source versions must be plainly marked as such, and must not be\n", "file_path": "MBExtender/external/SDL2/src/audio/sdlgenaudiocvt.pl", "rank": 74, "score": 125787.13076469665 }, { "content": "//==============================================================================\n\nstruct PE_DOS_HEADER {\n\n\tWORD Signature;\n\n\tWORD LastPageBytes;\n\n\tWORD NumberOfPages;\n\n\tWORD Relocations;\n\n\tWORD HeaderSize;\n\n\tWORD MinMemory;\n\n\tWORD MaxMemory;\n\n\tWORD InitialSS;\n\n\tWORD InitialSP;\n\n\tWORD Checksum;\n\n\tWORD InitialIP;\n\n\tWORD InitialCS;\n\n\tWORD RelocTableOffset;\n\n\tWORD Overlay;\n\n\tWORD Reserved1[4];\n\n\tWORD OemId;\n\n\tWORD OemInfo;\n\n\tWORD Reserved2[10];\n\n\tLONG PEHeaderOffset;\n\n};\n", "file_path": "MBExtender/src/MBGPatcher/PEFile.h", "rank": 75, "score": 125787.13076469665 }, { "content": "static int on_header(nghttp2_session *session, const nghttp2_frame *frame,\n\n const uint8_t *name, size_t namelen,\n\n const uint8_t *value, size_t valuelen,\n\n uint8_t flags,\n\n void *userp)\n\n{\n\n struct HTTP *stream;\n\n struct Curl_easy *data_s;\n\n int32_t stream_id = frame->hd.stream_id;\n\n struct connectdata *conn = (struct connectdata *)userp;\n\n (void)flags;\n\n\n\n DEBUGASSERT(stream_id); /* should never be a zero stream ID here */\n\n\n\n /* get the stream from the hash based on Stream ID */\n\n data_s = nghttp2_session_get_stream_user_data(session, stream_id);\n\n if(!data_s)\n\n /* Receiving a Stream ID not in the hash should not happen, this is an\n\n internal error more than anything else! */\n\n return NGHTTP2_ERR_CALLBACK_FAILURE;\n\n\n\n stream = data_s->req.protop;\n\n if(!stream) {\n\n failf(data_s, \"Internal NULL stream! 5\\n\");\n\n return NGHTTP2_ERR_CALLBACK_FAILURE;\n\n }\n\n\n\n /* Store received PUSH_PROMISE headers to be used when the subsequent\n\n PUSH_PROMISE callback comes */\n\n if(frame->hd.type == NGHTTP2_PUSH_PROMISE) {\n\n char *h;\n\n\n\n if(!stream->push_headers) {\n\n stream->push_headers_alloc = 10;\n\n stream->push_headers = malloc(stream->push_headers_alloc *\n\n sizeof(char *));\n\n stream->push_headers_used = 0;\n\n }\n\n else if(stream->push_headers_used ==\n\n stream->push_headers_alloc) {\n\n char **headp;\n\n stream->push_headers_alloc *= 2;\n\n headp = Curl_saferealloc(stream->push_headers,\n\n stream->push_headers_alloc * sizeof(char *));\n\n if(!headp) {\n\n stream->push_headers = NULL;\n\n return NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE;\n\n }\n\n stream->push_headers = headp;\n\n }\n\n h = aprintf(\"%s:%s\", name, value);\n\n if(h)\n\n stream->push_headers[stream->push_headers_used++] = h;\n\n return 0;\n\n }\n\n\n\n if(stream->bodystarted) {\n\n /* This is trailer fields. */\n\n /* 3 is for \":\" and \"\\r\\n\". */\n\n uint32_t n = (uint32_t)(namelen + valuelen + 3);\n\n\n\n DEBUGF(infof(data_s, \"h2 trailer: %.*s: %.*s\\n\", namelen, name, valuelen,\n\n value));\n\n\n\n Curl_add_buffer(stream->trailer_recvbuf, &n, sizeof(n));\n\n Curl_add_buffer(stream->trailer_recvbuf, name, namelen);\n\n Curl_add_buffer(stream->trailer_recvbuf, \": \", 2);\n\n Curl_add_buffer(stream->trailer_recvbuf, value, valuelen);\n\n Curl_add_buffer(stream->trailer_recvbuf, \"\\r\\n\\0\", 3);\n\n\n\n return 0;\n\n }\n\n\n\n if(namelen == sizeof(\":status\") - 1 &&\n\n memcmp(\":status\", name, namelen) == 0) {\n\n /* nghttp2 guarantees :status is received first and only once, and\n\n value is 3 digits status code, and decode_status_code always\n\n succeeds. */\n\n stream->status_code = decode_status_code(value, valuelen);\n\n DEBUGASSERT(stream->status_code != -1);\n\n\n\n Curl_add_buffer(stream->header_recvbuf, \"HTTP/2 \", 7);\n\n Curl_add_buffer(stream->header_recvbuf, value, valuelen);\n\n /* the space character after the status code is mandatory */\n\n Curl_add_buffer(stream->header_recvbuf, \" \\r\\n\", 3);\n\n /* if we receive data for another handle, wake that up */\n\n if(conn->data != data_s)\n\n Curl_expire(data_s, 0, EXPIRE_RUN_NOW);\n\n\n\n DEBUGF(infof(data_s, \"h2 status: HTTP/2 %03d (easy %p)\\n\",\n\n stream->status_code, data_s));\n\n return 0;\n\n }\n\n\n\n /* nghttp2 guarantees that namelen > 0, and :status was already\n\n received, and this is not pseudo-header field . */\n\n /* convert to a HTTP1-style header */\n\n Curl_add_buffer(stream->header_recvbuf, name, namelen);\n\n Curl_add_buffer(stream->header_recvbuf, \": \", 2);\n\n Curl_add_buffer(stream->header_recvbuf, value, valuelen);\n\n Curl_add_buffer(stream->header_recvbuf, \"\\r\\n\", 2);\n\n /* if we receive data for another handle, wake that up */\n\n if(conn->data != data_s)\n\n Curl_expire(data_s, 0, EXPIRE_RUN_NOW);\n\n\n\n DEBUGF(infof(data_s, \"h2 header: %.*s: %.*s\\n\", namelen, name, valuelen,\n\n value));\n\n\n\n return 0; /* 0 is successful */\n", "file_path": "MBExtender/external/curl/lib/http2.c", "rank": 76, "score": 121068.31681583586 }, { "content": "struct IsGenericValueImpl : FalseType {};\n\n\n\n// select candidates according to nested encoding and allocator types\n\ntemplate <typename T> struct IsGenericValueImpl<T, typename Void<typename T::EncodingType>::Type, typename Void<typename T::AllocatorType>::Type>\n\n : IsBaseOf<GenericValue<typename T::EncodingType, typename T::AllocatorType>, T>::Type {};\n\n\n\n// helper to match arbitrary GenericValue instantiations, including derived classes\n\ntemplate <typename T> struct IsGenericValue : IsGenericValueImpl<T>::Type {};\n\n\n\n} // namespace internal\n\n\n\n///////////////////////////////////////////////////////////////////////////////\n\n// TypeHelper\n\n\n\nnamespace internal {\n\n\n\ntemplate <typename ValueType, typename T>\n", "file_path": "MBExtender/external/rapidjson/include/rapidjson/document.h", "rank": 77, "score": 120901.45366190442 }, { "content": "static CURLcode header_append(struct Curl_easy *data,\n\n struct SingleRequest *k,\n\n size_t length)\n\n{\n\n if(k->hbuflen + length >= data->state.headersize) {\n\n /* We enlarge the header buffer as it is too small */\n\n char *newbuff;\n\n size_t hbufp_index;\n\n size_t newsize;\n\n\n\n if(k->hbuflen + length > CURL_MAX_HTTP_HEADER) {\n\n /* The reason to have a max limit for this is to avoid the risk of a bad\n\n server feeding libcurl with a never-ending header that will cause\n\n reallocs infinitely */\n\n failf(data, \"Avoided giant realloc for header (max is %d)!\",\n\n CURL_MAX_HTTP_HEADER);\n\n return CURLE_OUT_OF_MEMORY;\n\n }\n\n\n\n newsize = CURLMAX((k->hbuflen + length) * 3 / 2, data->state.headersize*2);\n\n hbufp_index = k->hbufp - data->state.headerbuff;\n\n newbuff = realloc(data->state.headerbuff, newsize);\n\n if(!newbuff) {\n\n failf(data, \"Failed to alloc memory for big header!\");\n\n return CURLE_OUT_OF_MEMORY;\n\n }\n\n data->state.headersize = newsize;\n\n data->state.headerbuff = newbuff;\n\n k->hbufp = data->state.headerbuff + hbufp_index;\n\n }\n\n memcpy(k->hbufp, k->str_start, length);\n\n k->hbufp += length;\n\n k->hbuflen += length;\n\n *k->hbufp = 0;\n\n\n\n return CURLE_OK;\n", "file_path": "MBExtender/external/curl/lib/http.c", "rank": 78, "score": 118826.49261947455 }, { "content": "#define STDC_HEADERS 1\n\n\n", "file_path": "MBExtender/external/udis86/udis86/config.h", "rank": 79, "score": 118824.46798636661 }, { "content": "static header_instruction inspect_header(const char *name, size_t namelen,\n\n const char *value, size_t valuelen) {\n\n switch(namelen) {\n\n case 2:\n\n if(!strncasecompare(\"te\", name, namelen))\n\n return HEADERINST_FORWARD;\n\n\n\n return contains_trailers(value, valuelen) ?\n\n HEADERINST_TE_TRAILERS : HEADERINST_IGNORE;\n\n case 7:\n\n return strncasecompare(\"upgrade\", name, namelen) ?\n\n HEADERINST_IGNORE : HEADERINST_FORWARD;\n\n case 10:\n\n return (strncasecompare(\"connection\", name, namelen) ||\n\n strncasecompare(\"keep-alive\", name, namelen)) ?\n\n HEADERINST_IGNORE : HEADERINST_FORWARD;\n\n case 16:\n\n return strncasecompare(\"proxy-connection\", name, namelen) ?\n\n HEADERINST_IGNORE : HEADERINST_FORWARD;\n\n case 17:\n\n return strncasecompare(\"transfer-encoding\", name, namelen) ?\n\n HEADERINST_IGNORE : HEADERINST_FORWARD;\n\n default:\n\n return HEADERINST_FORWARD;\n\n }\n", "file_path": "MBExtender/external/curl/lib/http2.c", "rank": 80, "score": 118824.46798636661 }, { "content": " long header_size; /* size of read header(s) in bytes */\n", "file_path": "MBExtender/external/curl/lib/urldata.h", "rank": 81, "score": 118816.98088967393 }, { "content": "void gen_trees_header()\n\n{\n\n FILE *header = fopen(\"trees.h\", \"w\");\n\n int i;\n\n\n\n Assert (header != NULL, \"Can't open trees.h\");\n\n fprintf(header,\n\n \"/* header created automatically with -DGEN_TREES_H */\\n\\n\");\n\n\n\n fprintf(header, \"local const ct_data static_ltree[L_CODES+2] = {\\n\");\n\n for (i = 0; i < L_CODES+2; i++) {\n\n fprintf(header, \"{{%3u},{%3u}}%s\", static_ltree[i].Code,\n\n static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5));\n\n }\n\n\n\n fprintf(header, \"local const ct_data static_dtree[D_CODES] = {\\n\");\n\n for (i = 0; i < D_CODES; i++) {\n\n fprintf(header, \"{{%2u},{%2u}}%s\", static_dtree[i].Code,\n\n static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5));\n\n }\n\n\n\n fprintf(header, \"const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {\\n\");\n\n for (i = 0; i < DIST_CODE_LEN; i++) {\n\n fprintf(header, \"%2u%s\", _dist_code[i],\n\n SEPARATOR(i, DIST_CODE_LEN-1, 20));\n\n }\n\n\n\n fprintf(header,\n\n \"const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {\\n\");\n\n for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) {\n\n fprintf(header, \"%2u%s\", _length_code[i],\n\n SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20));\n\n }\n\n\n\n fprintf(header, \"local const int base_length[LENGTH_CODES] = {\\n\");\n\n for (i = 0; i < LENGTH_CODES; i++) {\n\n fprintf(header, \"%1u%s\", base_length[i],\n\n SEPARATOR(i, LENGTH_CODES-1, 20));\n\n }\n\n\n\n fprintf(header, \"local const int base_dist[D_CODES] = {\\n\");\n\n for (i = 0; i < D_CODES; i++) {\n\n fprintf(header, \"%5u%s\", base_dist[i],\n\n SEPARATOR(i, D_CODES-1, 10));\n\n }\n\n\n\n fclose(header);\n", "file_path": "MBExtender/external/zlib/trees.c", "rank": 82, "score": 118810.49509554419 }, { "content": " bool sep_headers; /* handle host and proxy headers separately */\n", "file_path": "MBExtender/external/curl/lib/urldata.h", "rank": 83, "score": 118810.49509554419 }, { "content": "static int on_begin_headers(nghttp2_session *session,\n\n const nghttp2_frame *frame, void *userp)\n\n{\n\n struct HTTP *stream;\n\n struct Curl_easy *data_s = NULL;\n\n (void)userp;\n\n\n\n data_s = nghttp2_session_get_stream_user_data(session, frame->hd.stream_id);\n\n if(!data_s) {\n\n return 0;\n\n }\n\n\n\n DEBUGF(infof(data_s, \"on_begin_headers() was called\\n\"));\n\n\n\n if(frame->hd.type != NGHTTP2_HEADERS) {\n\n return 0;\n\n }\n\n\n\n stream = data_s->req.protop;\n\n if(!stream || !stream->bodystarted) {\n\n return 0;\n\n }\n\n\n\n /* This is trailer HEADERS started. Allocate buffer for them. */\n\n DEBUGF(infof(data_s, \"trailer field started\\n\"));\n\n\n\n DEBUGASSERT(stream->trailer_recvbuf == NULL);\n\n\n\n stream->trailer_recvbuf = Curl_add_buffer_init();\n\n if(!stream->trailer_recvbuf) {\n\n return NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE;\n\n }\n\n\n\n return 0;\n", "file_path": "MBExtender/external/curl/lib/http2.c", "rank": 84, "score": 118810.49509554419 }, { "content": " mz_uint8 m_raw_header[4], m_len_codes[TINFL_MAX_HUFF_SYMBOLS_0 + TINFL_MAX_HUFF_SYMBOLS_1 + 137];\n", "file_path": "MBExtender/plugins/FileExtension/miniz.c", "rank": 85, "score": 118810.49509554419 }, { "content": " curl_write_callback fwrite_header; /* function that stores headers */\n", "file_path": "MBExtender/external/curl/lib/urldata.h", "rank": 86, "score": 118810.49509554419 }, { "content": " bool include_header; /* include received protocol headers in data output */\n", "file_path": "MBExtender/external/curl/lib/urldata.h", "rank": 87, "score": 118810.49509554419 }, { "content": " size_t used; // count of pages in use (`used <= capacity`)\n", "file_path": "MBExtender/external/mimalloc/include/mimalloc-types.h", "rank": 88, "score": 118674.75520134636 }, { "content": "\tclass Stream\n\n\t{\n\n\t\tBRIDGE_CLASS(Stream);\n\n\tpublic:\n\n\t\t/// Status constants for the stream\n\n\t\tenum StreamStatus\n\n\t\t{\n\n\t\t\tOk = 0, ///< Ok!\n\n\t\t\tIOError, ///< Read or Write error\n\n\t\t\tEOS, ///< End of Stream reached (mostly for reads)\n\n\t\t\tIllegalCall, ///< An unsupported operation used. Always w/ accompanied by AssertWarn\n\n\t\t\tClosed, ///< Tried to operate on a closed stream (or detached filter)\n\n\t\t\tUnknownError ///< Catchall\n\n\t\t};\n\n\n\n\t\tStreamStatus m_streamStatus;\n\n\n\n\t\tGETTERFN(StreamStatus, getStatus, 0x4);\n\n\n\n\t\tvirtual ~Stream() = 0;\n", "file_path": "MBExtender/src/TorqueLib/include/TorqueLib/core/stream.h", "rank": 89, "score": 118341.77677634015 }, { "content": "static CURLcode\n\noutput_auth_headers(struct connectdata *conn,\n\n struct auth *authstatus,\n\n const char *request,\n\n const char *path,\n\n bool proxy)\n\n{\n\n const char *auth = NULL;\n\n CURLcode result = CURLE_OK;\n\n#if !defined(CURL_DISABLE_VERBOSE_STRINGS) || defined(USE_SPNEGO)\n\n struct Curl_easy *data = conn->data;\n\n#endif\n\n#ifdef USE_SPNEGO\n\n struct negotiatedata *negdata = proxy ?\n\n &data->state.proxyneg : &data->state.negotiate;\n\n#endif\n\n\n\n#ifdef CURL_DISABLE_CRYPTO_AUTH\n\n (void)request;\n\n (void)path;\n\n#endif\n\n\n\n#ifdef USE_SPNEGO\n\n negdata->state = GSS_AUTHNONE;\n\n if((authstatus->picked == CURLAUTH_NEGOTIATE) &&\n\n negdata->context && !GSS_ERROR(negdata->status)) {\n\n auth = \"Negotiate\";\n\n result = Curl_output_negotiate(conn, proxy);\n\n if(result)\n\n return result;\n\n authstatus->done = TRUE;\n\n negdata->state = GSS_AUTHSENT;\n\n }\n\n else\n\n#endif\n\n#ifdef USE_NTLM\n\n if(authstatus->picked == CURLAUTH_NTLM) {\n\n auth = \"NTLM\";\n\n result = Curl_output_ntlm(conn, proxy);\n\n if(result)\n\n return result;\n\n }\n\n else\n\n#endif\n\n#if defined(USE_NTLM) && defined(NTLM_WB_ENABLED)\n\n if(authstatus->picked == CURLAUTH_NTLM_WB) {\n\n auth = \"NTLM_WB\";\n\n result = Curl_output_ntlm_wb(conn, proxy);\n\n if(result)\n\n return result;\n\n }\n\n else\n\n#endif\n\n#ifndef CURL_DISABLE_CRYPTO_AUTH\n\n if(authstatus->picked == CURLAUTH_DIGEST) {\n\n auth = \"Digest\";\n\n result = Curl_output_digest(conn,\n\n proxy,\n\n (const unsigned char *)request,\n\n (const unsigned char *)path);\n\n if(result)\n\n return result;\n\n }\n\n else\n\n#endif\n\n if(authstatus->picked == CURLAUTH_BASIC) {\n\n /* Basic */\n\n if((proxy && conn->bits.proxy_user_passwd &&\n\n !Curl_checkProxyheaders(conn, \"Proxy-authorization:\")) ||\n\n (!proxy && conn->bits.user_passwd &&\n\n !Curl_checkheaders(conn, \"Authorization:\"))) {\n\n auth = \"Basic\";\n\n result = http_output_basic(conn, proxy);\n\n if(result)\n\n return result;\n\n }\n\n\n\n /* NOTE: this function should set 'done' TRUE, as the other auth\n\n functions work that way */\n\n authstatus->done = TRUE;\n\n }\n\n\n\n if(auth) {\n\n infof(data, \"%s auth using %s with user '%s'\\n\",\n\n proxy ? \"Proxy\" : \"Server\", auth,\n\n proxy ? (conn->http_proxy.user ? conn->http_proxy.user : \"\") :\n\n (conn->user ? conn->user : \"\"));\n\n authstatus->multipass = (!authstatus->done) ? TRUE : FALSE;\n\n }\n\n else\n\n authstatus->multipass = FALSE;\n\n\n\n return CURLE_OK;\n", "file_path": "MBExtender/external/curl/lib/http.c", "rank": 90, "score": 116673.69154540128 }, { "content": " bool include_headers; /* send headers to data output */\n", "file_path": "MBExtender/external/curl/src/tool_cfgable.h", "rank": 91, "score": 116660.9541932555 }, { "content": " bool suppress_connect_headers; /* suppress proxy CONNECT response headers\n", "file_path": "MBExtender/external/curl/lib/urldata.h", "rank": 92, "score": 116653.80624967098 }, { "content": "static void mi_print_header(mi_output_fun* out, void* arg ) {\n\n _mi_fprintf(out, arg, \"%10s: %10s %10s %10s %10s %10s\\n\", \"heap stats\", \"peak \", \"total \", \"freed \", \"unit \", \"count \");\n", "file_path": "MBExtender/external/mimalloc/src/stats.c", "rank": 93, "score": 116653.80624967098 }, { "content": " mz_uint64 m_local_header_ofs;\n", "file_path": "MBExtender/plugins/FileExtension/miniz.c", "rank": 94, "score": 116653.80624967098 }, { "content": " int use_tracing;\n", "file_path": "MBExtender/external/python/include/cpython/pystate.h", "rank": 95, "score": 116653.47189657432 }, { "content": " FILE *trace_stream;\n", "file_path": "MBExtender/external/curl/src/tool_cfgable.h", "rank": 96, "score": 116642.1390589189 }, { "content": "static SDL_PixelFormat *formats;\n", "file_path": "MBExtender/external/SDL2/src/video/SDL_pixels.c", "rank": 97, "score": 116621.51219965045 }, { "content": " Uint32 texture_formats[16]; /**< The available texture formats */\n", "file_path": "MBExtender/external/SDL2/include/SDL_render.h", "rank": 98, "score": 116592.4170066949 } ]
Rust
src/discard_call_lives.rs
cgswords/rsc
cdfe135888fe1c6da11a792527fa98f86224acbe
use util::Binop; use util::Relop; use util::Label; use util::Location; use util::mk_uvar; use util::Ident; use std::collections::HashMap; use finalize_locations::Program as FLProgram; use finalize_locations::LetrecEntry as FLLetrecEntry; use finalize_locations::Body as FLBody; use finalize_locations::Exp as FLExp; use finalize_locations::Effect as FLEffect; use finalize_locations::Pred as FLPred; use finalize_locations::Triv as FLTriv; use finalize_locations::Offset as FLOffset; use finalize_locations::Variable as FLVar; #[derive(Debug)] pub enum Program { Letrec(Vec<LetrecEntry>, Body) } #[derive(Debug)] pub struct LetrecEntry { pub label : Label , pub rhs : Body } #[derive(Debug)] pub struct Body { pub locations : HashMap<Ident, Location> , pub expression : Exp } #[derive(Debug)] pub enum Exp { Call(Triv, Vec<Location>) , If(Pred,Box<Exp>,Box<Exp>) , Begin(Vec<Effect>,Box<Exp>) } #[derive(Debug)] pub enum Pred { True , False , Op(Relop,Triv,Triv) , If(Box<Pred>,Box<Pred>,Box<Pred>) , Begin(Vec<Effect>, Box<Pred>) } #[derive(Debug)] pub enum Effect { SetOp(Variable, (Binop, Triv, Triv)) , Set(Variable, Triv) , Nop , MSet(Variable, Offset, Triv) , ReturnPoint(Label, Exp, i64) , If(Pred, Box<Effect>, Box<Effect>) , Begin(Box<Vec<Effect>>, Box<Effect>) } #[derive(Debug)] pub enum Variable { Loc(Location) , UVar(Ident) } #[derive(Debug)] pub enum Triv { Var(Variable) , Num(i64) , Label(Label) , MRef(Variable, Offset) } #[derive(Debug)] pub enum Offset { UVar(Ident) , Reg(Ident) , Num(i64) } pub fn discard_call_lives(input : Program) -> FLProgram { return match input { Program::Letrec(letrecs, pgm_body) => FLProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect() , body(pgm_body)) } } fn letrec_entry(input : LetrecEntry) -> FLLetrecEntry { FLLetrecEntry { label : input.label, rhs : body(input.rhs) } } fn body(input : Body) -> FLBody { FLBody { locations : input.locations , expression : exp(input.expression) } } macro_rules! mk_box { ($e:expr) => [Box::new($e)] } fn exp(input : Exp) -> FLExp { return match input { Exp::Call(t, _) => FLExp::Call(triv(t)) , Exp::If(test, conseq, alt) => FLExp::If(pred(test), mk_box!(exp(*conseq)), mk_box!(exp(*alt))) , Exp::Begin(effs, body) => FLExp::Begin(effs.into_iter().map(|e| effect(e)).collect(), mk_box!(exp(*body))) } } fn pred(input : Pred) -> FLPred { return match input { Pred::True => FLPred::True , Pred::False => FLPred::False , Pred::Op(op,t1,t2) => FLPred::Op(op, triv(t1), triv(t2)) , Pred::If(test, conseq, alt) => FLPred::If(mk_box!(pred(*test)), mk_box!(pred(*conseq)), mk_box!(pred(*alt))) , Pred::Begin(effs, body) => FLPred::Begin( effs.into_iter().map(|e| effect(e)).collect(), mk_box!(pred(*body))) } } fn effect(input: Effect) -> FLEffect { return match input { Effect::SetOp(l, (op, t1, t2)) => FLEffect::SetOp(var(l), (op, triv(t1), triv(t2))) , Effect::Set(l, t) => FLEffect::Set(var(l), triv(t)) , Effect::Nop => FLEffect::Nop , Effect::MSet(base, off, val) => FLEffect::MSet(var(base), offset(off), triv(val)) , Effect::ReturnPoint(lbl, body, off) => FLEffect::ReturnPoint(lbl, exp(body), off) , Effect::If(test, conseq, alt) => FLEffect::If(pred(test), mk_box!(effect(*conseq)) , mk_box!(effect(*alt))) , Effect::Begin(effs, body) => FLEffect::Begin( mk_box!((*effs).into_iter().map(|e| effect(e)).collect()) , mk_box!(effect(*body))) } } fn loc(input : Location) -> Location { return input; } fn var(input : Variable) -> FLVar { return match input { Variable::Loc(l) => FLVar::Loc(loc(l)) , Variable::UVar(uv) => FLVar::UVar(uv) } } fn triv(input : Triv) -> FLTriv { return match input { Triv::Var(v) => FLTriv::Var(var(v)) , Triv::Num(n) => FLTriv::Num(n) , Triv::Label(l) => FLTriv::Label(l) , Triv::MRef(base, off) => FLTriv::MRef(var(base), offset(off)) } } fn offset(input: Offset) -> FLOffset { return match input { Offset::UVar(uv) => FLOffset::UVar(uv) , Offset::Reg(r) => FLOffset::Reg(r) , Offset::Num(n) => FLOffset::Num(n) } } fn mk_num_lit(n: i64) -> Triv { return Triv::Num(n); } fn mk_fv_triv(n: i64) -> Triv { return mk_loc_triv(Location::FrameVar(n)); } fn mk_reg(s: &str) -> Variable { return Variable::Loc(mk_loc_reg(s)); } fn mk_loc_reg(s: &str) -> Location { return Location::Reg(Ident::from_str(s)); } fn mk_call(s: &str, lives: Vec<Location>) -> Exp { return Exp::Call(Triv::Label(mk_lbl(s)), lives); } fn mk_lbl(s : &str) -> Label { return Label::new(Ident::from_str(s)); } fn mk_set_op(dest: Variable, op: Binop, t1 : Triv, t2: Triv) -> Effect { return Effect::SetOp(dest, (op, t1, t2)); } fn mk_mset(dest: Variable, offset: Offset, val : Triv) -> Effect { return Effect::MSet(dest, offset, val); } fn mk_loc_triv(l : Location) -> Triv { return as_var_triv(loc_as_var(l)); } fn mk_var(id : Ident) -> Variable { return Variable::UVar(id); } fn mk_var_triv(id: Ident) -> Triv { return as_var_triv(Variable::UVar(id)); } fn as_var_triv(v: Variable) -> Triv { return Triv::Var(v); } fn loc_as_var(l: Location) -> Variable { return Variable::Loc(l); } fn mk_set(dest: Variable, val: Triv) -> Effect { return Effect::Set(dest,val) } pub fn test1() -> Program { let x0 = mk_uvar("x"); let x1 = mk_uvar("x"); let x2 = mk_uvar("x"); let x3 = mk_uvar("x"); let y4 = mk_uvar("y"); let mut map = HashMap::new(); map.insert(x0, mk_loc_reg("rbx")); map.insert(x1, Location::FrameVar(2)); map.insert(x2, mk_loc_reg("r8")); map.insert(x3, mk_loc_reg("r9")); map.insert(y4, mk_loc_reg("r15")); let mut body_map = HashMap::new(); body_map.insert(x2, mk_loc_reg("r8")); body_map.insert(x3, mk_loc_reg("r9")); return Program::Letrec( vec![ LetrecEntry{ label : mk_lbl("X1") , rhs : Body { locations : map , expression : Exp::If(Pred::Op(Relop::LT, mk_var_triv(x2), mk_var_triv(x3)), Box::new( Exp::Begin( vec![ mk_set_op(mk_var(x1), Binop::Plus, mk_var_triv(x1), mk_num_lit(35)) , mk_mset(mk_var(x0), Offset::Num(10), mk_num_lit(40)) , mk_mset(mk_var(x0), Offset::UVar(y4), mk_num_lit(25)) , Effect::ReturnPoint(mk_lbl("foo"), Exp::Begin( vec![ mk_set(mk_reg("rax"), mk_fv_triv(1)) ] , mk_box!(mk_call("X1", Vec::new()))) , 16) , mk_set(mk_var(x0), Triv::MRef(mk_reg("rax"),Offset::Num(10)))] , Box::new(mk_call("void", vec![mk_loc_reg("rax")])))) , Box::new( Exp::Begin( vec![mk_set_op(mk_reg("rax"), Binop::Plus, as_var_triv(mk_reg("rax")), mk_num_lit(10))] , Box::new(mk_call("void", vec![mk_loc_reg("rax"), mk_loc_reg("rbp")]))))) } } ] , Body { locations : body_map , expression : Exp::Begin( vec![ mk_set(mk_var(x2), mk_num_lit(0)) , mk_set(mk_var(x3), mk_num_lit(1)) ] , Box::new(mk_call("X1", vec![mk_loc_reg("rax"), mk_loc_reg("rbp")]))) }); }
use util::Binop; use util::Relop; use util::Label; use util::Location; use util::mk_uvar; use util::Ident; use std::collections::HashMap; use finalize_locations::Program as FLProgram; use finalize_locations::LetrecEntry as FLLetrecEntry; use finalize_locations::Body as FLBody; use finalize_locations::Exp as FLExp; use finalize_locations::Effect as FLEffect; use finalize_locations::Pred as FLPred; use finalize_locations::Triv as FLTriv; use finalize_locations::Offset as FLOffset; use finalize_locations::Variable as FLVar; #[derive(Debug)] pub enum Program { Letrec(Vec<LetrecEntry>, Body) } #[derive(Debug)] pub struct LetrecEntry { pub label : Label , pub rhs : Body } #[derive(Debug)] pub struct Body { pub locations : HashMap<Ident, Location> , pub expression : Exp } #[derive(Debug)] pub enum Exp { Call(Triv, Vec<Location>) , If(Pred,Box<Exp>,Box<Exp>) , Begin(Vec<Effect>,Box<Exp>) } #[derive(Debug)] pub enum Pred { True , False , Op(Relop,Triv,Triv) , If(Box<Pred>,Box<Pred>,Box<Pred>) , Begin(Vec<Effect>, Box<Pred>) } #[derive(Debug)] pub enum Effect { SetOp(Variable, (Binop, Triv, Triv)) , Set(Variable, Triv) , Nop , MSet(Variable, Offset, Triv) , ReturnPoint(Label, Exp, i64) , If(Pred, Box<Effect>, Box<Effect>) , Begin(Box<Vec<Effect>>, Box<Effect>) } #[derive(Debug)] pub enum Variable { Loc(Location) , UVar(Ident) } #[derive(Debug)] pub enum Triv { Var(Variable) , Num(i64) , Label(Label) , MRef(Variable, Offset) } #[derive(Debug)] pub enum Offset { UVar(Ident) , Reg(Ident) , Num(i64) } pub fn discard_call_lives(input : Program) -> FLProgram { return
} fn letrec_entry(input : LetrecEntry) -> FLLetrecEntry { FLLetrecEntry { label : input.label, rhs : body(input.rhs) } } fn body(input : Body) -> FLBody { FLBody { locations : input.locations , expression : exp(input.expression) } } macro_rules! mk_box { ($e:expr) => [Box::new($e)] } fn exp(input : Exp) -> FLExp { return match input { Exp::Call(t, _) => FLExp::Call(triv(t)) , Exp::If(test, conseq, alt) => FLExp::If(pred(test), mk_box!(exp(*conseq)), mk_box!(exp(*alt))) , Exp::Begin(effs, body) => FLExp::Begin(effs.into_iter().map(|e| effect(e)).collect(), mk_box!(exp(*body))) } } fn pred(input : Pred) -> FLPred { return match input { Pred::True => FLPred::True , Pred::False => FLPred::False , Pred::Op(op,t1,t2) => FLPred::Op(op, triv(t1), triv(t2)) , Pred::If(test, conseq, alt) => FLPred::If(mk_box!(pred(*test)), mk_box!(pred(*conseq)), mk_box!(pred(*alt))) , Pred::Begin(effs, body) => FLPred::Begin( effs.into_iter().map(|e| effect(e)).collect(), mk_box!(pred(*body))) } } fn effect(input: Effect) -> FLEffect { return match input { Effect::SetOp(l, (op, t1, t2)) => FLEffect::SetOp(var(l), (op, triv(t1), triv(t2))) , Effect::Set(l, t) => FLEffect::Set(var(l), triv(t)) , Effect::Nop => FLEffect::Nop , Effect::MSet(base, off, val) => FLEffect::MSet(var(base), offset(off), triv(val)) , Effect::ReturnPoint(lbl, body, off) => FLEffect::ReturnPoint(lbl, exp(body), off) , Effect::If(test, conseq, alt) => FLEffect::If(pred(test), mk_box!(effect(*conseq)) , mk_box!(effect(*alt))) , Effect::Begin(effs, body) => FLEffect::Begin( mk_box!((*effs).into_iter().map(|e| effect(e)).collect()) , mk_box!(effect(*body))) } } fn loc(input : Location) -> Location { return input; } fn var(input : Variable) -> FLVar { return match input { Variable::Loc(l) => FLVar::Loc(loc(l)) , Variable::UVar(uv) => FLVar::UVar(uv) } } fn triv(input : Triv) -> FLTriv { return match input { Triv::Var(v) => FLTriv::Var(var(v)) , Triv::Num(n) => FLTriv::Num(n) , Triv::Label(l) => FLTriv::Label(l) , Triv::MRef(base, off) => FLTriv::MRef(var(base), offset(off)) } } fn offset(input: Offset) -> FLOffset { return match input { Offset::UVar(uv) => FLOffset::UVar(uv) , Offset::Reg(r) => FLOffset::Reg(r) , Offset::Num(n) => FLOffset::Num(n) } } fn mk_num_lit(n: i64) -> Triv { return Triv::Num(n); } fn mk_fv_triv(n: i64) -> Triv { return mk_loc_triv(Location::FrameVar(n)); } fn mk_reg(s: &str) -> Variable { return Variable::Loc(mk_loc_reg(s)); } fn mk_loc_reg(s: &str) -> Location { return Location::Reg(Ident::from_str(s)); } fn mk_call(s: &str, lives: Vec<Location>) -> Exp { return Exp::Call(Triv::Label(mk_lbl(s)), lives); } fn mk_lbl(s : &str) -> Label { return Label::new(Ident::from_str(s)); } fn mk_set_op(dest: Variable, op: Binop, t1 : Triv, t2: Triv) -> Effect { return Effect::SetOp(dest, (op, t1, t2)); } fn mk_mset(dest: Variable, offset: Offset, val : Triv) -> Effect { return Effect::MSet(dest, offset, val); } fn mk_loc_triv(l : Location) -> Triv { return as_var_triv(loc_as_var(l)); } fn mk_var(id : Ident) -> Variable { return Variable::UVar(id); } fn mk_var_triv(id: Ident) -> Triv { return as_var_triv(Variable::UVar(id)); } fn as_var_triv(v: Variable) -> Triv { return Triv::Var(v); } fn loc_as_var(l: Location) -> Variable { return Variable::Loc(l); } fn mk_set(dest: Variable, val: Triv) -> Effect { return Effect::Set(dest,val) } pub fn test1() -> Program { let x0 = mk_uvar("x"); let x1 = mk_uvar("x"); let x2 = mk_uvar("x"); let x3 = mk_uvar("x"); let y4 = mk_uvar("y"); let mut map = HashMap::new(); map.insert(x0, mk_loc_reg("rbx")); map.insert(x1, Location::FrameVar(2)); map.insert(x2, mk_loc_reg("r8")); map.insert(x3, mk_loc_reg("r9")); map.insert(y4, mk_loc_reg("r15")); let mut body_map = HashMap::new(); body_map.insert(x2, mk_loc_reg("r8")); body_map.insert(x3, mk_loc_reg("r9")); return Program::Letrec( vec![ LetrecEntry{ label : mk_lbl("X1") , rhs : Body { locations : map , expression : Exp::If(Pred::Op(Relop::LT, mk_var_triv(x2), mk_var_triv(x3)), Box::new( Exp::Begin( vec![ mk_set_op(mk_var(x1), Binop::Plus, mk_var_triv(x1), mk_num_lit(35)) , mk_mset(mk_var(x0), Offset::Num(10), mk_num_lit(40)) , mk_mset(mk_var(x0), Offset::UVar(y4), mk_num_lit(25)) , Effect::ReturnPoint(mk_lbl("foo"), Exp::Begin( vec![ mk_set(mk_reg("rax"), mk_fv_triv(1)) ] , mk_box!(mk_call("X1", Vec::new()))) , 16) , mk_set(mk_var(x0), Triv::MRef(mk_reg("rax"),Offset::Num(10)))] , Box::new(mk_call("void", vec![mk_loc_reg("rax")])))) , Box::new( Exp::Begin( vec![mk_set_op(mk_reg("rax"), Binop::Plus, as_var_triv(mk_reg("rax")), mk_num_lit(10))] , Box::new(mk_call("void", vec![mk_loc_reg("rax"), mk_loc_reg("rbp")]))))) } } ] , Body { locations : body_map , expression : Exp::Begin( vec![ mk_set(mk_var(x2), mk_num_lit(0)) , mk_set(mk_var(x3), mk_num_lit(1)) ] , Box::new(mk_call("X1", vec![mk_loc_reg("rax"), mk_loc_reg("rbp")]))) }); }
match input { Program::Letrec(letrecs, pgm_body) => FLProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect() , body(pgm_body)) }
if_condition
[ { "content": "fn mk_mset(dest: Variable, offset: Offset, val : Triv) -> Effect {\n\n return Effect::MSet(dest, offset, val);\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 0, "score": 258680.8981013795 }, { "content": "fn mk_set_op(dest: Variable, op: Binop, t1 : Triv, t2: Triv) -> Effect {\n\n return Effect::SetOp(dest, (op, t1, t2));\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 1, "score": 237015.2414234196 }, { "content": "fn mk_set_op(dest: Location, op: Binop, t1 : Triv, t2: Triv) -> Effect {\n\n return Effect::SetOp(dest, (op, t1, t2));\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 2, "score": 233085.01245954615 }, { "content": "fn mk_mset(dest: Ident, offset: Offset, val : Triv) -> Effect {\n\n return Effect::MSet(dest, offset, val);\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 4, "score": 228894.86851859454 }, { "content": "fn mk_mset(dest: Variable, offset: Offset, val : Triv) -> Effect {\n\n return Effect::MSet(dest, offset, val);\n\n}\n\n\n", "file_path": "src/discard_allocation_info.rs", "rank": 5, "score": 228894.86851859454 }, { "content": "fn pred(input : Pred, frame_offset : i64) -> EFPPred {\n\n return match input \n\n { Pred::True => EFPPred::True\n\n , Pred::False => EFPPred::False\n\n , Pred::Op(op,t1,t2) => EFPPred::Op(op, triv(t1, frame_offset), triv(t2, frame_offset))\n\n , Pred::If(test, conseq, alt) => EFPPred::If(mk_box!(pred(*test, frame_offset)),\n\n mk_box!(pred(*conseq, frame_offset)),\n\n mk_box!(pred(*alt, frame_offset)))\n\n , Pred::Begin(effs, body) => EFPPred::Begin( effs.into_iter().map(|e| effect(e, frame_offset)).collect()\n\n , mk_box!(pred(*body, frame_offset)))\n\n }\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 6, "score": 223845.27557282237 }, { "content": "fn exp(input : Exp, frame_offset: i64) -> EFPExp {\n\n return match input \n\n { Exp::Call(t) => EFPExp::Call(triv(t, frame_offset))\n\n , Exp::If(test, conseq, alt) => EFPExp::If( pred(test, frame_offset)\n\n , mk_box!(exp(*conseq, frame_offset))\n\n , mk_box!(exp(*alt, frame_offset)))\n\n , Exp::Begin(effs, body) => EFPExp::Begin( effs.into_iter().map(|e| effect(e, frame_offset)).collect()\n\n , mk_box!(exp(*body, frame_offset)))\n\n }\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 7, "score": 223363.6816918758 }, { "content": "fn effect(input: Effect, frame_offset: i64) -> EFPEffect {\n\n return match input \n\n { Effect::SetOp(l, (op, t1, t2)) => EFPEffect::SetOp( loc(l, frame_offset)\n\n , (op, triv(t1, frame_offset), triv(t2, frame_offset)))\n\n , Effect::Set(l, t) => EFPEffect::Set(loc(l, frame_offset), triv(t, frame_offset))\n\n , Effect::Nop => EFPEffect::Nop\n\n , Effect::MSet(base, off, val) => EFPEffect::MSet(base, offset(off), triv(val, frame_offset))\n\n , Effect::ReturnPoint(lbl, body, new_offset) => EFPEffect::ReturnPoint(lbl, exp(body, frame_offset + new_offset), new_offset)\n\n , Effect::If(test, conseq, alt) => EFPEffect::If( pred(test, frame_offset)\n\n , mk_box!(effect(*conseq, frame_offset))\n\n , mk_box!(effect(*alt, frame_offset)))\n\n , Effect::Begin(effs, body) => EFPEffect::Begin( mk_box!((*effs).into_iter().map(|e| effect(e, frame_offset)).collect())\n\n , mk_box!(effect(*body, frame_offset)))\n\n }\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 8, "score": 222814.84349967496 }, { "content": "fn triv(input : Triv, frame_offset : i64) -> EFPTriv {\n\n return match input\n\n { Triv::Loc(l) => EFPTriv::Loc(loc(l, frame_offset))\n\n , Triv::Num(n) => EFPTriv::Num(n)\n\n , Triv::Label(l) => EFPTriv::Label(l)\n\n , Triv::MRef(base, off) => EFPTriv::MRef(base, offset(off))\n\n } \n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 9, "score": 221259.85032150388 }, { "content": "fn mk_mset(dest: Triv, offset: Triv, val : Triv) -> Effect {\n\n return Effect::MSet(dest, offset, val);\n\n}\n\n\n", "file_path": "src/finalize_instruction_selection.rs", "rank": 10, "score": 213633.8311932984 }, { "content": "fn binop_fst_arg(dest : Triv, op : Binop, arg1 : Triv, arg2 : Triv) -> Effect {\n\n if dest == arg1 {\n\n binop_snd_arg(dest, op, arg1, arg2)\n\n } else if dest == arg2 && op.commutes() {\n\n binop_snd_arg(dest, op, arg2, arg1)\n\n } else {\n\n let new_var = Triv::Var(Variable::UVar(mk_uvar(\"arg\")));\n\n Effect::Begin(mk_box!(vec![ Effect::Set(new_var.clone(), arg1)\n\n , binop_snd_arg(new_var.clone(), op, new_var.clone(), arg2)\n\n , Effect::Set(dest, new_var)]))\n\n }\n\n}\n\n\n", "file_path": "src/select_instructions.rs", "rank": 11, "score": 211014.50511424794 }, { "content": "// dest == arg1 at this point\n\nfn binop_snd_arg(dest : Triv, op : Binop, arg1 : Triv, arg2 : Triv) -> Effect {\n\n if (dest.is_uvar() || dest.is_reg()) && (arg2.is_label() || (arg2.is_int() && !arg2.is_int32())) {\n\n let new_var = Triv::Var(Variable::UVar(mk_uvar(\"arg\"))); \n\n return Effect::Begin(mk_box!(vec![Effect::Set(new_var.clone(), arg2), Effect::SetOp(dest, (op, arg1, new_var))]));\n\n } else if op.is_mult() && dest.is_fvar() {\n\n let new_var = Triv::Var(Variable::UVar(mk_uvar(\"arg\")));\n\n Effect::Begin(mk_box!(vec![ Effect::Set(new_var.clone(), arg1)\n\n , binop_snd_arg(new_var.clone(), op, new_var.clone(), arg2)\n\n , Effect::Set(dest, new_var)]))\n\n } else if dest.is_fvar() && (arg2.is_label() || (arg2.is_int() && !arg2.is_int32()) || arg2.is_fvar()) {\n\n let new_var = Triv::Var(Variable::UVar(mk_uvar(\"arg\"))); \n\n return Effect::Begin(mk_box!(vec![Effect::Set(new_var.clone(), arg2), Effect::SetOp(dest, (op, arg1, new_var))]));\n\n } else { \n\n Effect::SetOp(dest, (op, arg1, arg2)) \n\n }\n\n}\n\n\n\n// ---------------------------------------------------------------------------\n\n\n\n// ---------------------------------------------------------------------------\n", "file_path": "src/select_instructions.rs", "rank": 12, "score": 211014.50511424794 }, { "content": "fn mk_set_op(dest: X86Loc, op: Binop, t1 : Triv, t2: Triv) -> Effect {\n\n return Effect::SetOp(dest, (op, t1, t2));\n\n}\n\n\n", "file_path": "src/flatten_program.rs", "rank": 13, "score": 210314.52142256545 }, { "content": "fn mk_set_op(dest: Variable, op: Binop, t1 : Triv, t2: Triv) -> Effect {\n\n return Effect::SetOp(dest, (op, t1, t2));\n\n}\n\n\n", "file_path": "src/discard_allocation_info.rs", "rank": 14, "score": 210029.71521008355 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn assign_frame_variables(input : Program) -> Program {\n\n return match input \n\n { Program::Letrec(letrecs, body_exp) => \n\n Program::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , body(body_exp))\n\n } \n\n}\n\n\n", "file_path": "src/assign_frame_variables.rs", "rank": 16, "score": 208216.3386227165 }, { "content": "pub fn test1() -> Program {\n\n let x0 = mk_uvar(\"x\");\n\n let x1 = mk_uvar(\"x\");\n\n let x2 = mk_uvar(\"x\");\n\n let x3 = mk_uvar(\"x\");\n\n let y4 = mk_uvar(\"y\");\n\n\n\n let mut map = HashMap::new();\n\n map.insert(x0, mk_loc_reg(\"rbx\"));\n\n map.insert(x1, Location::FrameVar(2));\n\n map.insert(x2, mk_loc_reg(\"r8\"));\n\n map.insert(x3, mk_loc_reg(\"r9\"));\n\n map.insert(y4, mk_loc_reg(\"r15\"));\n\n\n\n let mut body_map = HashMap::new();\n\n body_map.insert(x2, mk_loc_reg(\"r8\"));\n\n body_map.insert(x3, mk_loc_reg(\"r9\"));\n\n\n\n return Program::Letrec(\n\n vec![ LetrecEntry { label : mk_lbl(\"X1\")\n", "file_path": "src/finalize_locations.rs", "rank": 17, "score": 208186.36151795211 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn finalize_alloc_locations(input : Program) -> Program {\n\n return match input \n\n { Program::Letrec(letrecs, body_exp) => \n\n Program::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , body(body_exp))\n\n } \n\n}\n\n\n", "file_path": "src/finalize_alloc_locations.rs", "rank": 18, "score": 208014.41644174972 }, { "content": "pub fn test1() -> Program {\n\n\n\n let rax = Ident::from_str(\"rax\");\n\n let rbx = Ident::from_str(\"rbx\");\n\n let r15 = Ident::from_str(\"rbx\");\n\n\n\n return Program::Letrec(\n\n vec![ Letrec::Entry(mk_lbl(\"X1\")\n\n , Exp::If(Pred::Op(Relop::LT ,mk_loc_triv(mk_reg(\"r9\")), mk_loc_triv(mk_reg(\"r8\"))),\n\n Box::new(\n\n Exp::Begin(\n\n vec![ mk_set_op(Location::FrameVar(2), Binop::Plus, mk_fv_triv(2), mk_num_lit(35))\n\n , mk_mset(rbx, Offset::Num(10), mk_num_lit(40))\n\n , mk_mset(rbx, Offset::Reg(r15), mk_num_lit(25))\n\n , Effect::ReturnPoint(mk_lbl(\"foo\"), \n\n Exp::Begin(\n\n vec![ mk_set(mk_reg(\"rax\"), mk_fv_triv(1)) ]\n\n , mk_box!(mk_call(\"X1\")))\n\n , 16)\n\n , mk_set(mk_reg(\"rbx\"), Triv::MRef(rax, Offset::Num(10)))]\n", "file_path": "src/expose_frame_variables.rs", "rank": 19, "score": 203729.79825558816 }, { "content": "fn mk_set(dest: Variable, val: Triv) -> Effect {\n\n return Effect::Set(dest,val)\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 20, "score": 203724.25783499918 }, { "content": "fn mk_mset(dest: Ident, offset: Offset, val : Triv) -> Effect {\n\n return Effect::MSet(dest, offset, val);\n\n}\n\n\n", "file_path": "src/expose_frame_pointer.rs", "rank": 21, "score": 203615.0994981311 }, { "content": "fn mk_mset(dest: Ident, offset: Offset, val : Triv) -> Effect {\n\n return Effect::MSet(dest, offset, val);\n\n}\n\n\n", "file_path": "src/expose_memory_operands.rs", "rank": 22, "score": 203615.0994981311 }, { "content": "fn mk_fv_triv(n: i64) -> Triv {\n\n return mk_loc_triv(Location::FrameVar(n));\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 23, "score": 202718.6661826778 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn finalize_locations(input : Program) -> EFVProgram {\n\n return match input \n\n { Program::Letrec(letrecs, pgm_body) => \n\n EFVProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , exp(pgm_body.expression, &pgm_body.locations))\n\n } \n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 24, "score": 201965.19862595707 }, { "content": "fn mk_set(dest: Location, val: Triv) -> Effect {\n\n return Effect::Set(dest,val)\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 25, "score": 199349.63105052005 }, { "content": "fn mk_fv_triv(n: i64) -> Triv {\n\n return Triv::Loc(Location::FrameVar(n));\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 26, "score": 198983.11090756598 }, { "content": "fn mk_set_op(dest: Triv, op: Binop, t1 : Triv, t2: Triv) -> Effect {\n\n return Effect::SetOp(dest, (op, t1, t2));\n\n}\n\n\n", "file_path": "src/finalize_instruction_selection.rs", "rank": 27, "score": 197922.55702686866 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn expose_frame_variables(input : Program) -> EFPProgram {\n\n return match input \n\n { Program::Letrec(letrecs, body) => \n\n EFPProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , exp(body, 0))\n\n } \n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 28, "score": 195551.74133659623 }, { "content": "// Returns the frame index of frame variable locations\n\n// (and -1 for registers).\n\npub fn frame_index(l: Location) -> i64 {\n\n match l \n\n { Location::Reg(_) => -1\n\n , Location::FrameVar(n) => n\n\n } \n\n}\n\n\n", "file_path": "src/util.rs", "rank": 29, "score": 193651.78060565318 }, { "content": "pub fn index_fvar(n : i64) -> Location {\n\n Location::FrameVar(n)\n\n}\n\n\n\n// An x86_64 location is one of:\n\n// - a register\n\n// -a displacement operand (a register and offset value)\n\n// - an index operand (a pair of registers)\n\n#[derive(Debug, Clone)]\n\npub enum X86Loc \n\n { Reg(Ident)\n\n , DisplaceOperand(Ident, i64) // base register and offset value\n\n , IndexOperand(Ident, Ident) // base register and offset register\n\n }\n", "file_path": "src/util.rs", "rank": 30, "score": 193640.9104623622 }, { "content": "// ---------------------------------------------------------------------------\n\n// TESTING\n\n// ---------------------------------------------------------------------------\n\nfn mk_num_lit(n: i64) -> Triv {\n\n return Triv::Num(n);\n\n}\n", "file_path": "src/flatten_program.rs", "rank": 31, "score": 193225.2103967541 }, { "content": "fn mk_num_lit(n: i64) -> Triv {\n\n return Triv::Num(n);\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 32, "score": 192697.64706066728 }, { "content": "fn mk_num_lit(n: i64) -> Triv {\n\n return Triv::Num(n);\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 33, "score": 188864.22198467195 }, { "content": "pub fn test1() -> Program {\n\n return Program::Letrec(\n\n vec![ Letrec::Entry(mk_lbl(\"X2\")\n\n , Exp::Begin(\n\n vec![mk_set_op(mk_reg(\"rax\"), Binop::Plus, mk_loc_triv(mk_reg(\"rax\")), mk_num_lit(10))]\n\n , Box::new(mk_call(\"X3\"))))\n\n , Letrec::Entry(mk_lbl(\"X1\")\n\n , Exp::If(Relop::LT ,mk_loc_triv(mk_reg(\"r9\")), mk_loc_triv(mk_reg(\"r8\")), \n\n mk_lbl(\"X2\"),mk_lbl(\"X3\")))\n\n , Letrec::Entry(mk_lbl(\"X3\")\n\n , Exp::Begin(\n\n vec![mk_set_op(mk_reg(\"rax\"), Binop::Plus, mk_loc_triv(mk_reg(\"rax\")), mk_num_lit(10))]\n\n , Box::new(mk_call(\"void\"))))\n\n ] \n\n , Exp::Begin(\n\n vec![ mk_set(mk_reg(\"r9\"), mk_num_lit(0)) \n\n , mk_set(mk_reg(\"r8\"), mk_num_lit(1))]\n\n , Box::new(mk_call(\"X1\"))));\n\n}\n", "file_path": "src/flatten_program.rs", "rank": 35, "score": 186171.93381693016 }, { "content": "fn label(input : Label) -> X86Exp {\n\n X86Exp::ExpLabel(input.to_id())\n\n}\n\n\n", "file_path": "src/flatten_program.rs", "rank": 36, "score": 185539.12880857478 }, { "content": "pub fn unique_label(label: &str) -> Label {\n\n let mut label_str = label.to_string();\n\n label_str.push_str(&next_lbl_cnt());\n\n return Label::new(Ident::from_str(&label_str));\n\n}\n\n\n\n\n\n// We also need a way to make new, unique indentifiers. We also do this with a\n\n// static, mutable counter.\n\nstatic mut UVAR_COUNTER : i64 = 0;\n\n\n", "file_path": "src/util.rs", "rank": 37, "score": 185129.0328692965 }, { "content": "fn mk_set_op(dest: X86Loc, op: Binop, t1 : Triv, t2: Triv) -> Effect {\n\n return Effect::SetOp(dest, (op, t1, t2));\n\n}\n\n\n", "file_path": "src/expose_basic_blocks.rs", "rank": 38, "score": 184364.78489054955 }, { "content": "fn mk_set_op(dest: X86Loc, op: Binop, t1 : Triv, t2: Triv) -> Effect {\n\n return Effect::SetOp(dest, (op, t1, t2));\n\n}\n\n\n", "file_path": "src/expose_frame_pointer.rs", "rank": 39, "score": 184364.78489054955 }, { "content": "fn mk_set_op(dest: X86Loc, op: Binop, t1 : Triv, t2: Triv) -> Effect {\n\n return Effect::SetOp(dest, (op, t1, t2));\n\n}\n\n\n", "file_path": "src/expose_memory_operands.rs", "rank": 40, "score": 184364.78489054955 }, { "content": "fn pred(input : Pred, con_lbl : Label, alt_lbl : Label) -> (FPExp, Vec<FPLetrec>) {\n\n return match input \n\n { Pred::True => (mk_fp_call(con_lbl), Vec::new())\n\n , Pred::False => (mk_fp_call(alt_lbl), Vec::new())\n\n , Pred::Op(op,t1,t2) => (FPExp::If(op, triv(t1), triv(t2), con_lbl, alt_lbl), Vec::new())\n\n , Pred::If(test, conseq, alt) =>\n\n { let new_con_label = unique_label(\"C\");\n\n let new_alt_label = unique_label(\"A\");\n\n\n\n let (pred_body, mut pred_bindings) = pred(*test, new_con_label, new_alt_label);\n\n let (con_body, mut con_bindings) = pred(*conseq, con_lbl, alt_lbl);\n\n let (alt_body, mut alt_bindings) = pred(*alt, con_lbl, alt_lbl);\n\n\n\n let mut output_letrec = Vec::new();\n\n output_letrec.append(&mut pred_bindings);\n\n output_letrec.push(FPLetrec::Entry(new_con_label, make_begin(con_body)));\n\n output_letrec.append(&mut con_bindings);\n\n output_letrec.push(FPLetrec::Entry(new_alt_label, make_begin(alt_body)));\n\n output_letrec.append(&mut alt_bindings);\n\n\n\n return (pred_body, output_letrec);\n\n }\n\n , Pred::Begin(effects, body) => {\n\n let (pred_body, mut pred_bindings) = pred(*body, con_lbl, alt_lbl);\n\n return effect_star(effects, pred_body, &mut pred_bindings);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/expose_basic_blocks.rs", "rank": 41, "score": 183358.12705509964 }, { "content": "fn triv(input : Triv) -> X86Exp {\n\n return match input\n\n { Triv::Loc(l) => loc(l)\n\n , Triv::Num(n) => X86Exp::ExpNum(n)\n\n , Triv::Label(l) => label(l) \n\n } \n\n}\n\n\n", "file_path": "src/flatten_program.rs", "rank": 42, "score": 182873.937036068 }, { "content": "fn pred(input : Pred, frame_size : i64) -> Pred {\n\n match input\n\n { Pred::True => Pred::True\n\n , Pred::False => Pred::False\n\n , Pred::Op(op, triv1, triv2) => Pred::Op(op, triv1, triv2)\n\n , Pred::If(test, conseq, alt) => Pred::If( mk_box!(pred(*test, frame_size))\n\n , mk_box!(pred(*conseq, frame_size))\n\n , mk_box!(pred(*alt, frame_size)))\n\n , Pred::Begin(effs, test) => Pred::Begin(effs.into_iter().map(|e| effect(e, frame_size)).collect(), mk_box!(pred(*test, frame_size)))\n\n }\n\n}\n\n\n", "file_path": "src/assign_frame_args.rs", "rank": 43, "score": 178487.226733233 }, { "content": "fn exp(input : Exp, frame_size : i64) -> Exp { \n\n match input\n\n { Exp::Call(target, lives) => Exp::Call(target, lives) \n\n , Exp::If(test, con, alt) => Exp::If(pred(test, frame_size), mk_box!(exp(*con, frame_size)), mk_box!(exp(*alt, frame_size)))\n\n , Exp::Begin(effs, tail) => Exp::Begin(effs.into_iter().map(|e| effect(e, frame_size)).collect(), mk_box!(exp(*tail, frame_size)))\n\n }\n\n}\n\n\n", "file_path": "src/assign_frame_args.rs", "rank": 44, "score": 177975.75630100537 }, { "content": "fn effect(input : Effect, frame_size : i64) -> Effect {\n\n match input\n\n { Effect::SetOp(dest, (op, arg1, arg2)) => Effect::SetOp(dest, (op, arg1, arg2))\n\n , Effect::Set(dest, src) => Effect::Set(dest, src) \n\n , Effect::Nop => Effect::Nop\n\n , Effect::MSet(dest, offset, src) => Effect::MSet(dest, offset, src)\n\n , Effect::ReturnPoint(lbl, body, size) => \n\n Effect::Begin(mk_box!(vec![ Effect::SetOp(fv_as_triv(), (Binop::Plus, fv_as_triv(), Triv::Num(frame_size << WORD_SIZE)))\n\n , Effect::ReturnPoint(lbl, exp(body, frame_size), frame_size)\n\n , Effect::SetOp(fv_as_triv(), (Binop::Minus, fv_as_triv(), Triv::Num(frame_size << WORD_SIZE)))]))\n\n , Effect::If(test, conseq, alt) => Effect::If( pred(test, frame_size)\n\n , mk_box!(effect(*conseq, frame_size))\n\n , mk_box!(effect(*alt, frame_size)))\n\n , Effect::Begin(effs) => Effect::Begin(mk_box!((*effs).into_iter().map(|e| effect(e, frame_size)).collect()))\n\n }\n\n} \n\n\n", "file_path": "src/assign_frame_args.rs", "rank": 45, "score": 177363.52445442023 }, { "content": "fn as_var_triv(v: Variable) -> Triv {\n\n return Triv::Var(v);\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 46, "score": 177081.4165475698 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn select_instructions(input : Program) -> Program {\n\n return match input \n\n { Program::Letrec(letrecs, body_exp) => \n\n Program::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , body(body_exp))\n\n } \n\n}\n\n\n", "file_path": "src/select_instructions.rs", "rank": 47, "score": 176330.23390679812 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn assign_registers(input : Program) -> Program {\n\n return match input \n\n { Program::Letrec(letrecs, body_exp) => \n\n Program::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , body(body_exp))\n\n } \n\n}\n\n\n", "file_path": "src/assign_registers.rs", "rank": 48, "score": 176330.23390679812 }, { "content": "pub fn test1() -> Program {\n\n\n\n let rax = Ident::from_str(\"rax\");\n\n let r8 = Ident::from_str(\"r8\");\n\n let r9 = Ident::from_str(\"r9\");\n\n let lbl_x2 = Ident::from_str(\"X2\");\n\n let lbl_x3 = Ident::from_str(\"X3\");\n\n\n\n return Program::Program(\n\n vec![ X86LangStmt::SetLoad(mk_reg(\"r9\"), mk_num_lit(0)) \n\n , X86LangStmt::SetLoad(mk_reg(\"r8\"), mk_num_lit(1)) \n\n , X86LangStmt::Jump(mk_exp_lbl(\"X1\"))\n\n , mk_lbl(\"X2\")\n\n , X86LangStmt::SetOp(X86Exp::ExpReg(rax),(Binop::Plus,X86Exp::ExpReg(rax),X86Exp::ExpNum(10)))\n\n , X86LangStmt::Jump(X86Exp::ExpLabel(lbl_x3))\n\n , mk_lbl(\"X1\")\n\n , X86LangStmt::If(Relop::LT,X86Exp::ExpReg(r9),X86Exp::ExpReg(r8),X86Exp::ExpLabel(lbl_x2))\n\n , X86LangStmt::Jump(X86Exp::ExpLabel(lbl_x3))\n\n , mk_lbl(\"X3\")\n\n , X86LangStmt::SetOp(X86Exp::ExpReg(rax),(Binop::Plus,X86Exp::ExpReg(rax),X86Exp::ExpNum(10)))\n\n ]);\n\n}\n", "file_path": "src/generate_x86_64.rs", "rank": 49, "score": 176311.07169669974 }, { "content": "fn mk_set(dest: X86Loc, val: Triv) -> Effect {\n\n return Effect::Set(dest,val)\n\n}\n\n\n", "file_path": "src/flatten_program.rs", "rank": 50, "score": 172922.97006320494 }, { "content": "pub fn test1() -> Program {\n\n let x0 = mk_uvar(\"x\");\n\n let x1 = mk_uvar(\"x\");\n\n let x2 = mk_uvar(\"x\");\n\n let x3 = mk_uvar(\"x\");\n\n let y4 = mk_uvar(\"y\");\n\n\n\n let mut map = HashMap::new();\n\n map.insert(x0, mk_loc_reg(\"rbx\"));\n\n map.insert(x1, Location::FrameVar(2));\n\n map.insert(x2, mk_loc_reg(\"r8\"));\n\n map.insert(x3, mk_loc_reg(\"r9\"));\n\n map.insert(y4, mk_loc_reg(\"r15\"));\n\n\n\n let mut body_map = HashMap::new();\n\n body_map.insert(x2, mk_loc_reg(\"r8\"));\n\n body_map.insert(x3, mk_loc_reg(\"r9\"));\n\n\n\n return Program::Letrec(\n\n vec![ LetrecEntry{ label : mk_lbl(\"X1\")\n", "file_path": "src/discard_allocation_info.rs", "rank": 51, "score": 172861.24334407353 }, { "content": "pub fn test1() -> Program {\n\n\n\n let rax = Ident::from_str(\"rax\");\n\n let rbx = Ident::from_str(\"rbx\");\n\n let r15 = Ident::from_str(\"rbx\");\n\n\n\n return Program::Letrec(\n\n vec![ Letrec::Entry(mk_lbl(\"X1\")\n\n , Exp::If(Pred::Op(Relop::LT ,mk_loc_triv(mk_reg(\"r9\")), mk_loc_triv(mk_reg(\"r8\"))),\n\n Box::new(\n\n Exp::Begin(\n\n vec![ mk_set_op(mk_reg(\"rax\"), Binop::Plus, mk_loc_reg(\"rax\"), mk_num_lit(10))\n\n , mk_mset(rbx, Offset::Num(10), mk_num_lit(40))\n\n , mk_mset(rbx, Offset::Reg(r15), mk_num_lit(40))\n\n , Effect::ReturnPoint(mk_lbl(\"foo\"), \n\n Exp::Begin(\n\n vec![ mk_set_op(mk_reg(\"rax\")\n\n , Binop::Plus\n\n , mk_loc_reg(\"rax\")\n\n , mk_num_lit(10))\n", "file_path": "src/expose_frame_pointer.rs", "rank": 52, "score": 172861.24334407353 }, { "content": "pub fn test1() -> Program {\n\n let x0 = mk_uvar(\"x\");\n\n let x1 = mk_uvar(\"x\");\n\n let x2 = mk_uvar(\"x\");\n\n let x3 = mk_uvar(\"x\");\n\n let y4 = mk_uvar(\"y\");\n\n\n\n let mut map = HashMap::new();\n\n map.insert(x0, mk_loc_reg(\"rbx\"));\n\n map.insert(x1, Location::FrameVar(2));\n\n map.insert(x2, mk_loc_reg(\"r8\"));\n\n map.insert(x3, mk_loc_reg(\"r9\"));\n\n map.insert(y4, mk_loc_reg(\"r15\"));\n\n\n\n let mut body_map = HashMap::new();\n\n body_map.insert(x2, mk_loc_reg(\"r8\"));\n\n body_map.insert(x3, mk_loc_reg(\"r9\"));\n\n\n\n return Program::Letrec(\n\n vec![ LetrecEntry{ label : mk_lbl(\"X1\")\n", "file_path": "src/finalize_instruction_selection.rs", "rank": 53, "score": 172861.24334407353 }, { "content": "pub fn test1() -> Program {\n\n return Program::Letrec(\n\n vec![ Letrec::Entry(mk_lbl(\"X1\")\n\n , Exp::If(Pred::Op(Relop::LT ,mk_loc_triv(mk_reg(\"r9\")), mk_loc_triv(mk_reg(\"r8\"))),\n\n Box::new(\n\n Exp::Begin(\n\n vec![mk_set_op(mk_reg(\"rax\"), Binop::Plus, mk_loc_triv(mk_reg(\"rax\")), mk_num_lit(10))]\n\n , Box::new(mk_call(\"void\"))))\n\n , Box::new(\n\n Exp::Begin(\n\n vec![mk_set_op(mk_reg(\"rax\"), Binop::Plus, mk_loc_triv(mk_reg(\"rax\")), mk_num_lit(10))]\n\n , Box::new(mk_call(\"void\"))))))\n\n ]\n\n , Exp::Begin(\n\n vec![ mk_set(mk_reg(\"r9\"), mk_num_lit(0))\n\n , mk_set(mk_reg(\"r8\"), mk_num_lit(1))]\n\n , Box::new(mk_call(\"X1\"))));\n\n}\n", "file_path": "src/expose_basic_blocks.rs", "rank": 55, "score": 172861.24334407353 }, { "content": "pub fn test1() -> Program {\n\n\n\n let rax = Ident::from_str(\"rax\");\n\n let rbx = Ident::from_str(\"rbx\");\n\n let r15 = Ident::from_str(\"rbx\");\n\n\n\n return Program::Letrec(\n\n vec![ Letrec::Entry(mk_lbl(\"X1\")\n\n , Exp::If(Pred::Op(Relop::LT ,mk_loc_triv(mk_reg(\"r9\")), mk_loc_triv(mk_reg(\"r8\"))),\n\n Box::new(\n\n Exp::Begin(\n\n vec![ mk_set_op(mk_reg(\"rax\"), Binop::Plus, mk_loc_triv(mk_reg(\"rax\")), mk_num_lit(10))\n\n , mk_mset(rbx, Offset::Num(10), mk_num_lit(40))\n\n , mk_mset(rbx, Offset::Reg(r15), mk_num_lit(40))\n\n , mk_set(mk_reg(\"rbx\"), Triv::MRef(rax, Offset::Num(10)))]\n\n , Box::new(mk_call(\"void\"))))\n\n , Box::new(\n\n Exp::Begin(\n\n vec![mk_set_op(mk_reg(\"rax\"), Binop::Plus, mk_loc_triv(mk_reg(\"rax\")), mk_num_lit(10))]\n\n , Box::new(mk_call(\"void\"))))))\n\n ]\n\n , Exp::Begin(\n\n vec![ mk_set(mk_reg(\"r9\"), mk_num_lit(0))\n\n , mk_set(mk_reg(\"r8\"), mk_num_lit(1))]\n\n , Box::new(mk_call(\"X1\"))));\n\n}\n", "file_path": "src/expose_memory_operands.rs", "rank": 56, "score": 172861.24334407353 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn flatten_program(input : Program) -> X86Program {\n\n match input {\n\n Program::Letrec(letrecs, body) => {\n\n let mut output = Vec::new();\n\n output.append(&mut exp(body));\n\n for binding in letrecs {\n\n output.append(&mut letrec_entry(binding));\n\n }\n\n return X86Program::Program(output);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/flatten_program.rs", "rank": 57, "score": 172819.5643484517 }, { "content": "fn mk_set(dest: Variable, val: Triv) -> Effect {\n\n return Effect::Set(dest,val)\n\n}\n\n\n", "file_path": "src/discard_allocation_info.rs", "rank": 59, "score": 172592.43361530325 }, { "content": "fn loc(input : Location, frame_offset : i64) -> X86Loc {\n\n return match input\n\n { Location::Reg(reg) => X86Loc::Reg(reg)\n\n , Location::FrameVar(n) => X86Loc::DisplaceOperand(Ident::from_str(FRAME_PTR_REG), (n << WORD_SIZE) - frame_offset)\n\n // We compute the offset by shifting the variable index by the word_size, then\n\n // subtract the frame offset so that if the FV is bumped, we get to the right\n\n // place anyway.\n\n }\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 60, "score": 171119.31411001168 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn assign_frame_args(input : Program) -> Program {\n\n return match input \n\n { Program::Letrec(letrecs, body_exp) => \n\n Program::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , body(body_exp))\n\n } \n\n}\n\n\n", "file_path": "src/assign_frame_args.rs", "rank": 61, "score": 171059.47477961646 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn uncover_register_conflicts(input : Program) -> Program {\n\n return match input \n\n { Program::Letrec(letrecs, body_exp) => \n\n Program::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , body(body_exp))\n\n } \n\n}\n\n\n", "file_path": "src/uncover_register_conflicts.rs", "rank": 62, "score": 171059.4747796165 }, { "content": "fn mk_fv_triv(n: i64) -> Triv {\n\n return mk_loc_triv(Location::FrameVar(n));\n\n}\n\n\n", "file_path": "src/discard_allocation_info.rs", "rank": 64, "score": 170233.2074087446 }, { "content": "fn mk_fv_triv(n: i64) -> Triv {\n\n return mk_loc_triv(Location::FrameVar(n));\n\n}\n\n\n", "file_path": "src/finalize_instruction_selection.rs", "rank": 65, "score": 170233.2074087446 }, { "content": "fn mk_loc_triv(l : Location) -> Triv {\n\n return Triv::Loc(l);\n\n}\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 66, "score": 169072.09289583447 }, { "content": "fn mk_set(dest: Triv, val: Triv) -> Effect {\n\n return Effect::Set(dest,val)\n\n}\n\n\n", "file_path": "src/finalize_instruction_selection.rs", "rank": 67, "score": 165163.9055871234 }, { "content": "fn mk_num_lit(n: i64) -> Triv {\n\n return Triv::Num(n);\n\n}\n\n\n", "file_path": "src/expose_memory_operands.rs", "rank": 68, "score": 160114.31848585058 }, { "content": "fn mk_num_lit(n: i64) -> Triv {\n\n return Triv::Num(n);\n\n}\n\n\n", "file_path": "src/expose_basic_blocks.rs", "rank": 69, "score": 160114.31848585058 }, { "content": "fn mk_num_lit(n: i64) -> Triv {\n\n return Triv::Num(n);\n\n}\n\n\n", "file_path": "src/finalize_instruction_selection.rs", "rank": 70, "score": 160114.31848585058 }, { "content": "fn mk_num_lit(n: i64) -> Triv {\n\n return Triv::Num(n);\n\n}\n\n\n", "file_path": "src/discard_allocation_info.rs", "rank": 71, "score": 160114.31848585058 }, { "content": "fn mk_num_lit(n: i64) -> Triv {\n\n return Triv::Num(n);\n\n}\n\n\n", "file_path": "src/expose_frame_pointer.rs", "rank": 72, "score": 160114.31848585058 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn expose_basic_blocks(input : Program) -> FPProgram {\n\n match input {\n\n Program::Letrec(letrecs, body) => {\n\n let mut output_letrec = Vec::new();\n\n for binding in letrecs {\n\n output_letrec.append(&mut letrec_entry(binding));\n\n }\n\n let (new_body, mut body_bindings) = exp(body);\n\n output_letrec.append(&mut body_bindings);\n\n\n\n return FPProgram::Letrec(output_letrec,new_body);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/expose_basic_blocks.rs", "rank": 74, "score": 159200.99655399218 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn expose_memory_operands(input : Program) -> EBBProgram {\n\n return match input \n\n { Program::Letrec(letrecs, body) => \n\n EBBProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , exp(body))\n\n } \n\n}\n\n\n", "file_path": "src/expose_memory_operands.rs", "rank": 75, "score": 159200.99655399218 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn finalize_instruction_selection(input : Program) -> DAIProgram {\n\n return match input \n\n { Program::Letrec(letrecs, pgm_body) => \n\n DAIProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , body(pgm_body))\n\n } \n\n}\n\n\n\n\n", "file_path": "src/finalize_instruction_selection.rs", "rank": 76, "score": 159200.99655399218 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn expose_frame_pointer(input : Program) -> EMOProgram {\n\n return match input \n\n { Program::Letrec(letrecs, body) => \n\n EMOProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , exp(body))\n\n } \n\n}\n\n\n", "file_path": "src/expose_frame_pointer.rs", "rank": 77, "score": 159200.99655399218 }, { "content": "// ---------------------------------------------------------------------------\n\n// IMPLEMENTATION\n\n// ---------------------------------------------------------------------------\n\npub fn discard_allocation_info(input : Program) -> DCLProgram {\n\n return match input \n\n { Program::Letrec(letrecs, pgm_body) => \n\n DCLProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect()\n\n , body(pgm_body))\n\n } \n\n}\n\n\n\n\n", "file_path": "src/discard_allocation_info.rs", "rank": 78, "score": 159200.99655399218 }, { "content": "fn offset(input: Offset) -> EFPOffset {\n\n return match input\n\n { Offset::Reg(s) => EFPOffset::Reg(s)\n\n , Offset::Num(n) => EFPOffset::Num(n)\n\n }\n\n}\n\n\n\n// ---------------------------------------------------------------------------\n\n// TESTING\n\n// ---------------------------------------------------------------------------\n\n\n", "file_path": "src/expose_frame_variables.rs", "rank": 82, "score": 157274.39300043587 }, { "content": "pub fn generate_x86_64 (input: Program) -> String {\n\n match input {\n\n Program::Program(stmts) =>\n\n emit_program(stmts.into_iter().map(|x| statement(x)).collect())\n\n }\n\n}\n\n\n", "file_path": "src/generate_x86_64.rs", "rank": 84, "score": 156629.89547839947 }, { "content": "pub fn everybody_home(pgm : &Program) -> bool {\n\n fn is_home(entry : &Body) -> bool {\n\n match entry.alloc\n\n { RegAllocForm::Allocated(_) => true\n\n , RegAllocForm::Unallocated(_, _) => false\n\n }\n\n }\n\n\n\n match pgm\n\n { Program::Letrec(bindings, main) => \n\n { let bindings_home = bindings.into_iter().fold(true, |acc, binding| acc && is_home(&binding.rhs));\n\n bindings_home && is_home(&main)\n\n }\n\n }\n\n}\n", "file_path": "src/alloc_lang.rs", "rank": 85, "score": 156629.89547839947 }, { "content": "fn relop_fst_arg(op : Relop, arg1 : Triv, arg2 : Triv) -> Pred {\n\n if arg1.is_var() {\n\n relop_snd_arg(op, arg1, arg2)\n\n } else if arg2.is_var() {\n\n relop_snd_arg(op.flip(), arg2, arg1)\n\n } else {\n\n let new_var = Triv::Var(Variable::UVar(mk_uvar(\"px\"))); \n\n return Pred::Begin(vec![Effect::Set(new_var.clone(), arg1)], mk_box!(relop_snd_arg(op, new_var, arg2)));\n\n }\n\n}\n\n\n", "file_path": "src/select_instructions.rs", "rank": 87, "score": 154291.34435054072 }, { "content": "fn relop_snd_arg(op: Relop, arg1 : Triv, arg2 : Triv) -> Pred {\n\n if ((arg1.is_uvar() || arg1.is_reg()) && ((arg2.is_int() && !arg2.is_int32()) || arg2.is_label())) ||\n\n (arg1.is_fvar() && ((arg2.is_int() && !arg2.is_int32()) || arg2.is_label() || arg2.is_fvar())) {\n\n\n\n let new_var = Triv::Var(Variable::UVar(mk_uvar(\"py\"))); \n\n return Pred::Begin(vec![Effect::Set(new_var.clone(), arg2)], mk_box!(relop_snd_arg(op, arg1, new_var)));\n\n\n\n } else {\n\n Pred::Op(op, arg1, arg2)\n\n }\n\n}\n\n\n", "file_path": "src/select_instructions.rs", "rank": 88, "score": 154291.34435054072 }, { "content": "fn mk_loc_triv(l : Location) -> Triv {\n\n return as_var_triv(loc_as_var(l));\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 89, "score": 153152.90984910232 }, { "content": "fn pred(input : Pred, var_map : &HashMap<Ident, Location>) -> Pred {\n\n match input\n\n { Pred::True => Pred::True\n\n , Pred::False => Pred::False\n\n , Pred::Op(op, triv1, triv2) => Pred::Op(op, triv(triv1, var_map), triv(triv2, var_map))\n\n , Pred::If(test, conseq, alt) => Pred::If(mk_box!(pred(*test, var_map)), mk_box!(pred(*conseq, var_map)), mk_box!(pred(*alt, var_map)))\n\n , Pred::Begin(effs, test) => Pred::Begin(effs.into_iter().map(|e| effect(e, var_map)).collect(), mk_box!(pred(*test, var_map)))\n\n }\n\n}\n\n\n", "file_path": "src/finalize_alloc_locations.rs", "rank": 90, "score": 151622.07473437427 }, { "content": "fn effect(input : Effect) -> X86LangStmt {\n\n return match input \n\n { Effect::SetOp(location, (binop, t1, t2)) => X86LangStmt::SetOp(loc(location), (binop, triv(t1), triv(t2)))\n\n , Effect::Set(location, trivial) => X86LangStmt::SetLoad(loc(location),triv(trivial))\n\n }\n\n}\n\n\n", "file_path": "src/flatten_program.rs", "rank": 91, "score": 151332.0087241614 }, { "content": "fn exp(input : Exp, var_map : &HashMap<Ident, Location>) -> Exp { \n\n match input\n\n { Exp::Call(target, lives) => Exp::Call(triv(target, var_map), lives) \n\n , Exp::If(test, con, alt) => Exp::If(pred(test, var_map), mk_box!(exp(*con, var_map)), mk_box!(exp(*alt, var_map)))\n\n , Exp::Begin(effs, tail) => Exp::Begin(effs.into_iter().map(|e| effect(e, var_map)).collect(), mk_box!(exp(*tail, var_map)))\n\n }\n\n}\n\n\n", "file_path": "src/finalize_alloc_locations.rs", "rank": 92, "score": 151136.91584519172 }, { "content": "fn effect(input : Effect, var_map : &HashMap<Ident, Location>) -> Effect {\n\n match input\n\n { Effect::SetOp(dest, (op, arg1, arg2)) => Effect::SetOp(triv(dest, var_map), (op, triv(arg1, var_map), triv(arg2, var_map)))\n\n , Effect::Set(dest, src) => Effect::Set(triv(dest, var_map), triv(src, var_map)) \n\n , Effect::Nop => Effect::Nop\n\n , Effect::MSet(dest, offset, src) => Effect::MSet(triv(dest, var_map), triv(offset, var_map), triv(src, var_map)) \n\n , Effect::ReturnPoint(lbl, body, size) => Effect::ReturnPoint(lbl, exp(body, var_map), size)\n\n , Effect::If(test, conseq, alt) => Effect::If(pred(test, var_map), mk_box!(effect(*conseq, var_map)), mk_box!(effect(*alt, var_map)))\n\n , Effect::Begin(effs) => Effect::Begin(mk_box!((*effs).into_iter().map(|e| effect(e, var_map)).collect()))\n\n }\n\n} \n\n\n", "file_path": "src/finalize_alloc_locations.rs", "rank": 93, "score": 150558.22535517125 }, { "content": "fn triv(input : Triv, var_map : &HashMap<Ident, Location>) -> Triv {\n\n match input \n\n { Triv::Var(v) => try_var_lookup(v, var_map)\n\n , Triv::Num(_) => input\n\n , Triv::Label(_) => input\n\n , Triv::MRef(t1, t2) => Triv::MRef(mk_box!(triv(*t1, var_map)), mk_box!(triv(*t2, var_map)))\n\n }\n\n}\n\n\n\n\n\n// ---------------------------------------------------------------------------\n\n\n\n// ---------------------------------------------------------------------------\n\n// TESTING\n\n// ---------------------------------------------------------------------------\n\n\n\npub mod test {\n\n\n\n macro_rules! mk_box {\n\n ($e:expr) => [Box::new($e)]\n", "file_path": "src/finalize_alloc_locations.rs", "rank": 94, "score": 148953.09954804662 }, { "content": "fn triv_to_offset(input : Triv) -> DAIOffset {\n\n match input\n\n { Triv::Var(Variable::UVar(uv)) => DAIOffset::UVar(uv)\n\n , Triv::Var(Variable::Loc(Location::Reg(s))) => DAIOffset::Reg(s)\n\n , Triv::Num(n) => DAIOffset::Num(n)\n\n , _ => panic!(\"Instruction selection has left a non-offset triv in an offset-only place!\")\n\n }\n\n}\n\n\n", "file_path": "src/finalize_instruction_selection.rs", "rank": 95, "score": 148828.4912251126 }, { "content": "fn pred(input : Pred, map: &HashMap<Ident, Location>) -> EFVPred {\n\n return match input \n\n { Pred::True => EFVPred::True\n\n , Pred::False => EFVPred::False\n\n , Pred::Op(op,t1,t2) => EFVPred::Op(op, triv(t1, map), triv(t2, map))\n\n , Pred::If(test, conseq, alt) => EFVPred::If(mk_box!(pred(*test, map)),\n\n mk_box!(pred(*conseq, map)),\n\n mk_box!(pred(*alt, map)))\n\n , Pred::Begin(effs, body) => EFVPred::Begin( effs.into_iter().map(|e| effect(e, map)).collect()\n\n , mk_box!(pred(*body, map)))\n\n }\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 96, "score": 148371.64257389252 }, { "content": "fn offset(input: Offset, map: &HashMap<Ident, Location>) -> EFVOffset {\n\n return match input\n\n { Offset::UVar(uv) => if let Location::Reg(s) = uvar(uv, map) {\n\n EFVOffset::Reg(s)\n\n } else {\n\n panic!(\"Tried to place an offset variable on the stack.\")\n\n }\n\n , Offset::Reg(s) => EFVOffset::Reg(s)\n\n , Offset::Num(n) => EFVOffset::Num(n)\n\n }\n\n}\n\n\n\n// ---------------------------------------------------------------------------\n\n// TESTING\n\n// ---------------------------------------------------------------------------\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 97, "score": 148317.82738288757 }, { "content": "fn exp(input : Exp, map: &HashMap<Ident, Location>) -> EFVExp {\n\n return match input \n\n { Exp::Call(t) => EFVExp::Call(triv(t, map))\n\n , Exp::If(test, conseq, alt) => EFVExp::If( pred(test, map)\n\n , mk_box!(exp(*conseq, map))\n\n , mk_box!(exp(*alt, map)))\n\n , Exp::Begin(effs, body) => EFVExp::Begin( effs.into_iter().map(|e| effect(e, map)).collect()\n\n , mk_box!(exp(*body, map)))\n\n }\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 98, "score": 147907.85476969683 }, { "content": "fn effect(input: Effect, map: &HashMap<Ident, Location>) -> EFVEffect {\n\n return match input \n\n { Effect::SetOp(l, (op, t1, t2)) => EFVEffect::SetOp(var(l, map), (op, triv(t1, map), triv(t2, map)))\n\n , Effect::Set(l, t) => EFVEffect::Set(var(l, map), triv(t, map))\n\n , Effect::Nop => EFVEffect::Nop\n\n , Effect::MSet(base, off, val) => \n\n { let new_base = var(base, map);\n\n if let Location::Reg(s) = new_base {\n\n EFVEffect::MSet(s, offset(off, map), triv(val, map))\n\n } else {\n\n panic!(\"Tried to place an ref base on the stack.\");\n\n }\n\n }\n\n , Effect::ReturnPoint(lbl, body, off) => EFVEffect::ReturnPoint(lbl, exp(body, map), off)\n\n , Effect::If(test, conseq, alt) => EFVEffect::If( pred(test, map)\n\n , mk_box!(effect(*conseq, map))\n\n , mk_box!(effect(*alt, map)))\n\n , Effect::Begin(effs, body) => EFVEffect::Begin( mk_box!((*effs).into_iter().map(|e| effect(e, map)).collect())\n\n , mk_box!(effect(*body, map)))\n\n }\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 99, "score": 147392.70754615313 } ]
Rust
src/lib.rs
rust-cv/cv-geom
645b4894a3c056f9043b4be7755a3a4dd0e41ae6
#![no_std] use cv_core::nalgebra::{zero, Matrix3x4, Matrix4, RowVector4}; use cv_core::{ Bearing, CameraPoint, CameraToCamera, Pose, TriangulatorObservances, TriangulatorRelative, WorldPoint, WorldToCamera, }; #[derive(Copy, Clone, Debug)] pub struct MinSquaresTriangulator { epsilon: f64, max_iterations: usize, } impl MinSquaresTriangulator { pub fn new() -> Self { Default::default() } pub fn epsilon(self, epsilon: f64) -> Self { Self { epsilon, ..self } } pub fn max_iterations(self, max_iterations: usize) -> Self { Self { max_iterations, ..self } } } impl Default for MinSquaresTriangulator { fn default() -> Self { Self { epsilon: 1e-9, max_iterations: 100, } } } impl TriangulatorObservances for MinSquaresTriangulator { fn triangulate_observances<B: Bearing>( &self, pairs: impl IntoIterator<Item = (WorldToCamera, B)>, ) -> Option<WorldPoint> { let mut a: Matrix4<f64> = zero(); for (pose, bearing) in pairs { let bearing = bearing.bearing().into_inner(); let rot = pose.0.rotation.matrix(); let trans = pose.0.translation.vector; let pose = Matrix3x4::<f64>::from_columns(&[ rot.column(0), rot.column(1), rot.column(2), trans.column(0), ]); let term = pose - bearing * bearing.transpose() * pose; a += term.transpose() * term; } let se = a.try_symmetric_eigen(self.epsilon, self.max_iterations)?; se.eigenvalues .iter() .enumerate() .min_by_key(|&(_, &n)| float_ord::FloatOrd(n)) .map(|(ix, _)| se.eigenvectors.column(ix).into_owned()) .map(|v| if v.w.is_sign_negative() { -v } else { v }) .map(Into::into) } } #[derive(Copy, Clone, Debug)] pub struct RelativeDltTriangulator { epsilon: f64, max_iterations: usize, } impl RelativeDltTriangulator { pub fn new() -> Self { Default::default() } pub fn epsilon(self, epsilon: f64) -> Self { Self { epsilon, ..self } } pub fn max_iterations(self, max_iterations: usize) -> Self { Self { max_iterations, ..self } } } impl Default for RelativeDltTriangulator { fn default() -> Self { Self { epsilon: 1e-9, max_iterations: 100, } } } impl TriangulatorRelative for RelativeDltTriangulator { fn triangulate_relative<A: Bearing, B: Bearing>( &self, relative_pose: CameraToCamera, a: A, b: B, ) -> Option<CameraPoint> { let pose = relative_pose.homogeneous(); let a = a.bearing_unnormalized(); let b = b.bearing_unnormalized(); let mut design = Matrix4::zeros(); design .row_mut(0) .copy_from(&RowVector4::new(-a.z, 0.0, a.x, 0.0)); design .row_mut(1) .copy_from(&RowVector4::new(0.0, -a.z, a.y, 0.0)); design .row_mut(2) .copy_from(&(b.x * pose.row(2) - b.z * pose.row(0))); design .row_mut(3) .copy_from(&(b.y * pose.row(2) - b.z * pose.row(1))); let svd = design.try_svd(false, true, self.epsilon, self.max_iterations)?; svd.singular_values .iter() .enumerate() .min_by_key(|&(_, &n)| float_ord::FloatOrd(n)) .map(|(ix, _)| svd.v_t.unwrap().row(ix).transpose().into_owned()) .map(|v| if v.w.is_sign_negative() { -v } else { v }) .map(Into::into) } }
#![no_std] use cv_core::nalgebra::{zero, Matrix3x4, Matrix4, RowVector4}; use cv_core::{ Bearing, CameraPoint, CameraToCamera, Pose, TriangulatorObservances, TriangulatorRelative, WorldPoint, WorldToCamera, }; #[derive(Copy, Clone, Debug)] pub struct MinSquaresTriangulator { epsilon: f64, max_iterations: usize, } impl MinSquaresTriangulator { pub fn new() -> Self { Default::default() } pub fn epsilon(self, epsilon: f64) -> Self { Self { epsilon, ..self } } pub fn max_iterations(self, max_iterations: usize) -> Self { Self { max_iterations, ..self } } } impl Default for MinSquaresTriangulator { fn default() -> Self { Self { epsilon: 1e-9, max_iterations: 100, } } } impl TriangulatorObservances for MinSquaresTriangulator { fn triangulate_observances<B: Bearing>( &self, pairs: impl IntoIterator<Item = (WorldToCamera, B)>, ) -> Option<WorldPoint> { let mut a: Matrix4<f64> = zero(); for (pose, bearing) in pairs { let bearing = bearing.bearing().into_inner(); let rot = pose.0.rotation.matrix(); let trans = pose.0.translation.vector; let pose = Matrix3x4::<f64>::from_columns(&[ rot.column(0), rot.column(1), rot.column(2), trans.column(0), ]); let term = pose - bearing * bearing.transpose() * pose; a += term.transpose() * term; } let se = a.try_symmetric_eigen(self.epsilon, self.max_iterations)?; se.eigenvalues .iter() .enumerate() .min_by_key(|&(_, &n)| float_ord::FloatOrd(n)) .map(|(ix, _)| se.eigenvectors.column(ix).into_owned()) .map(|v| if v.w.is_sign_negative() { -v } else { v }) .map(Into::into) } } #[derive(Copy, Clone, Debug)] pub struct RelativeDltTriangulator { epsilon: f64, max_iterations: usize, } impl RelativeDltTriangulator { pub fn new() -> Self { Default::default() } pub fn epsilon(self, epsilon: f64) -> Self { Self { epsilon, ..self } } pub fn max_iterations(self, max_iterations: usize) -> Self { Self { max_iterations, ..self } } } impl Default for RelativeDltTriangulator { fn default() -> Self { Self { epsilon: 1e-9, max_iterations: 100, } } } impl TriangulatorRelative for RelativeDltTriangulator { fn triangulate_relative<A: Bearing, B: Bearing>( &self, relative_pose: CameraToCamera, a: A, b: B, ) -> Option<CameraPoint> { let pose = relative_pose.homogeneous(); let a = a.bearing_unnormalized(); let b = b.bearing_unnormalized(); let mut design = Matrix4::zeros(); design .row_mut(0) .copy_from(&RowVector4::new(-a.z, 0.0, a.x, 0.0)); design .row_mut(1) .copy_from(&RowVector4::new(0.0, -a.z, a.y, 0.0)); design .row_mut(2) .copy_from(&(b.x * pose.row(2) - b.z * pose.row(0))); design .row_mut(3) .copy_from(&(b.y * pose.row(2) - b.z * pose.row(1))); let svd = design.try_svd(false, true, self.epsilon, self.max_iteration
}
s)?; svd.singular_values .iter() .enumerate() .min_by_key(|&(_, &n)| float_ord::FloatOrd(n)) .map(|(ix, _)| svd.v_t.unwrap().row(ix).transpose().into_owned()) .map(|v| if v.w.is_sign_negative() { -v } else { v }) .map(Into::into) }
function_block-function_prefixed
[ { "content": "#[panic_handler]\n\nfn panic(_info: &PanicInfo) -> ! {\n\n loop {}\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn _start() -> ! {\n\n loop {}\n\n}\n", "file_path": "ensure_no_std/src/main.rs", "rank": 0, "score": 12230.500703461832 }, { "content": "// ensure_no_std/src/main.rs\n\n#![no_std]\n\n#![no_main]\n\n\n\nuse core::panic::PanicInfo;\n\n\n\n/// This function is called on panic.\n\n#[panic_handler]\n", "file_path": "ensure_no_std/src/main.rs", "rank": 12, "score": 1.2679460634321342 }, { "content": "# cv-geom\n\n\n\n[![Discord][dci]][dcl] [![Crates.io][ci]][cl] ![MIT/Apache][li] [![docs.rs][di]][dl] ![LoC][lo] ![Tests][btl] ![Lints][bll] ![no_std][bnl]\n\n\n\n[ci]: https://img.shields.io/crates/v/cv-geom.svg\n\n[cl]: https://crates.io/crates/cv-geom/\n\n\n\n[li]: https://img.shields.io/crates/l/specs.svg?maxAge=2592000\n\n\n\n[di]: https://docs.rs/cv-geom/badge.svg\n\n[dl]: https://docs.rs/cv-geom/\n\n\n\n[lo]: https://tokei.rs/b1/github/rust-cv/cv-geom?category=code\n\n\n\n[dci]: https://img.shields.io/discord/550706294311485440.svg?logo=discord&colorB=7289DA\n\n[dcl]: https://discord.gg/d32jaam\n\n\n\n[btl]: https://github.com/rust-cv/cv-geom/workflows/unit%20tests/badge.svg\n\n[bll]: https://github.com/rust-cv/cv-geom/workflows/lints/badge.svg\n\n[bnl]: https://github.com/rust-cv/cv-geom/workflows/no-std/badge.svg\n\n\n\nCollection of computational geometry algorithms for Rust CV\n", "file_path": "README.md", "rank": 13, "score": 0.4077759394894551 } ]
Rust
tests/src/vector.rs
sndels/yuki
bac8c1530ecc03b2c9657cd4cbde91112fbf82a8
#[cfg(test)] mod tests { use approx::{assert_abs_diff_eq, assert_abs_diff_ne, assert_relative_eq, assert_relative_ne}; use std::panic; use yuki::math::{Normal, Point3, Vec2, Vec3, Vec4}; #[test] fn new() { let v = Vec2::new(0.0, 1.0); assert_eq!(v.x, 0.0); assert_eq!(v.y, 1.0); assert_eq!(Vec2::new(0.0, 1.0), v); let v = Vec3::new(0.0, 1.0, 2.0); assert_eq!(v.x, 0.0); assert_eq!(v.y, 1.0); assert_eq!(v.z, 2.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0), v); let v = Vec4::new(0.0, 1.0, 2.0, 3.0); assert_eq!(v.x, 0.0f32); assert_eq!(v.y, 1.0f32); assert_eq!(v.z, 2.0f32); assert_eq!(v.w, 3.0f32); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0), v); } #[test] fn zeros() { assert_eq!(Vec2::zeros(), Vec2::new(0, 0)); assert_eq!(Vec3::zeros(), Vec3::new(0, 0, 0)); assert_eq!(Vec4::zeros(), Vec4::new(0, 0, 0, 0)); } #[test] fn ones() { assert_eq!(Vec2::ones(), Vec2::new(1, 1)); assert_eq!(Vec3::ones(), Vec3::new(1, 1, 1)); assert_eq!(Vec4::ones(), Vec4::new(1, 1, 1, 1)); } #[test] fn has_nans() { let result = panic::catch_unwind(|| Vec2::new(f32::NAN, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(0.0, f32::NAN)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec3::new(f32::NAN, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec4::new(f32::NAN, 0.0, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(f32::NAN, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(0.0, f32::NAN)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec3::new(f32::NAN, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec4::new(f32::NAN, 0.0, 0.0, 0.0)); assert!(result.is_err()); } #[test] fn dot() { assert_eq!(Vec2::new(2, 3).dot(Vec2::new(4, 5)), 2 * 4 + 3 * 5); assert_eq!( Vec3::new(2, 3, 4).dot(Vec3::new(5, 6, 7)), 2 * 5 + 3 * 6 + 4 * 7 ); assert_eq!( Vec4::new(2, 3, 4, 5).dot(Vec4::new(6, 7, 8, 9)), 2 * 6 + 3 * 7 + 4 * 8 + 5 * 9 ); assert_eq!( Vec3::new(2.0, 3.0, 4.0).dot_n(Normal::new(5.0, 6.0, 7.0)), 2.0 * 5.0 + 3.0 * 6.0 + 4.0 * 7.0 ); } #[test] fn cross() { assert_eq!( Vec3::new(2.0, 3.0, 4.0).cross(Vec3::new(5.0, 6.0, -7.0)), Vec3::new(-45.0, 34.0, -3.0) ); } #[test] fn len_sqr() { assert_eq!(Vec2::new(2, 3).len_sqr(), 2 * 2 + 3 * 3); assert_eq!(Vec3::new(2, 3, 4).len_sqr(), 2 * 2 + 3 * 3 + 4 * 4); assert_eq!( Vec4::new(2, 3, 4, 5).len_sqr(), 2 * 2 + 3 * 3 + 4 * 4 + 5 * 5 ); } #[test] fn len() { assert_abs_diff_eq!( Vec2::new(2.0, 3.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32).sqrt() ); assert_abs_diff_eq!( Vec3::new(2.0, 3.0, 4.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32).sqrt() ); assert_abs_diff_eq!( Vec4::new(2.0, 3.0, 4.0, 5.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32 + 5.0f32 * 5.0f32).sqrt() ); } #[test] fn normalized() { assert_abs_diff_eq!(Vec2::new(1.0, 1.0).normalized().len(), 1.0); assert_abs_diff_eq!(Vec3::new(1.0, 1.0, 1.0).normalized().len(), 1.0); assert_abs_diff_eq!(Vec4::new(1.0, 1.0, 1.0, 1.0).normalized().len(), 1.0); } #[test] fn min() { let a = Vec2::new(0, 2); let b = Vec2::new(3, 1); assert_eq!(a.min(b), Vec2::new(0, 1)); assert_eq!(a.min(b), b.min(a)); let a = Vec3::new(0, 2, 4); let b = Vec3::new(3, 1, 5); assert_eq!(a.min(b), Vec3::new(0, 1, 4)); assert_eq!(a.min(b), b.min(a)); let a = Vec4::new(0, 2, 4, 7); let b = Vec4::new(3, 1, 5, 6); assert_eq!(a.min(b), Vec4::new(0, 1, 4, 6)); assert_eq!(a.min(b), b.min(a)); } #[test] fn max() { let a = Vec2::new(0, 2); let b = Vec2::new(3, 1); assert_eq!(a.max(b), Vec2::new(3, 2)); assert_eq!(a.max(b), b.max(a)); let a = Vec3::new(0, 2, 4); let b = Vec3::new(3, 1, 5); assert_eq!(a.max(b), Vec3::new(3, 2, 5)); assert_eq!(a.max(b), b.max(a)); let a = Vec4::new(0, 2, 4, 7); let b = Vec4::new(3, 1, 5, 6); assert_eq!(a.max(b), Vec4::new(3, 2, 5, 7)); assert_eq!(a.max(b), b.max(a)); } #[test] fn min_comp() { assert_eq!(Vec2::new(0.0, 1.0).min_comp(), 0.0); assert_eq!(Vec2::new(1.0, 0.0).min_comp(), 0.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).min_comp(), 0.0); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).min_comp(), 0.0); } #[test] fn max_comp() { assert_eq!(Vec2::new(0.0, 1.0).max_comp(), 1.0); assert_eq!(Vec2::new(1.0, 0.0).max_comp(), 1.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).max_comp(), 2.0); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).max_comp(), 3.0); } #[test] fn max_dimension() { assert_eq!(Vec2::new(0.0, 1.0).max_dimension(), 1); assert_eq!(Vec2::new(1.0, 0.0).max_dimension(), 0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).max_dimension(), 2); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).max_dimension(), 3); } #[test] fn permutation() { assert_eq!(Vec2::new(2.0, 3.0).permuted(1, 0), Vec2::new(3.0, 2.0)); assert_eq!( Vec3::new(3.0, 4.0, 5.0).permuted(1, 2, 0), Vec3::new(4.0, 5.0, 3.0) ); assert_eq!( Vec4::new(4.0, 5.0, 6.0, 7.0).permuted(1, 2, 3, 0), Vec4::new(5.0, 6.0, 7.0, 4.0) ); } #[test] fn from() { assert_eq!(Vec2::from(2), Vec2::new(2, 2)); assert_eq!(Vec3::from(2), Vec3::new(2, 2, 2)); assert_eq!(Vec4::from(2), Vec4::new(2, 2, 2, 2)); assert_eq!( Vec3::from(Normal::new(1.0, 2.0, 3.0)), Vec3::new(1.0, 2.0, 3.0) ); assert_eq!( Vec3::from(Point3::new(1.0, 2.0, 3.0)), Vec3::new(1.0, 2.0, 3.0) ); } #[test] fn index() { let v = Vec2::new(0.0, 1.0); assert_eq!(v.x, v[0]); assert_eq!(v.y, v[1]); let v = Vec3::new(0.0, 1.0, 2.0); assert_eq!(v.x, v[0]); let v = Vec4::new(0.0, 1.0, 2.0, 3.0); assert_eq!(v.x, v[0]); let mut v = Vec2::zeros(); v[0] = 1.0; v[1] = 2.0; assert_eq!(v[0], 1.0); assert_eq!(v[1], 2.0); let mut v = Vec3::zeros(); v[0] = 1.0; assert_eq!(v[0], 1.0); let mut v = Vec4::zeros(); v[0] = 1.0; assert_eq!(v[0], 1.0); } #[test] fn neg() { assert_eq!(-Vec2::new(1, 2), Vec2::new(-1, -2)); assert_eq!(-Vec3::new(1, 2, 3), Vec3::new(-1, -2, -3)); assert_eq!(-Vec4::new(1, 2, 3, 4), Vec4::new(-1, -2, -3, -4)); } #[test] fn add() { assert_eq!(Vec2::new(1, 2) + Vec2::new(4, 6), Vec2::new(5, 8)); assert_eq!(Vec3::new(1, 2, 3) + Vec3::new(4, 6, 7), Vec3::new(5, 8, 10)); assert_eq!( Vec4::new(1, 2, 3, 4) + Vec4::new(5, 7, 9, 10), Vec4::new(6, 9, 12, 14) ); assert_eq!(Vec2::new(1, 2) + 3, Vec2::new(4, 5)); assert_eq!(Vec3::new(1, 2, 3) + 4, Vec3::new(5, 6, 7)); assert_eq!(Vec4::new(1, 2, 3, 4) + 5, Vec4::new(6, 7, 8, 9)); } #[test] fn sub() { assert_eq!(Vec2::new(5, 5) - Vec2::new(1, 2), Vec2::new(4, 3)); assert_eq!(Vec3::new(7, 7, 7) - Vec3::new(1, 2, 3), Vec3::new(6, 5, 4)); assert_eq!( Vec4::new(9, 9, 9, 9) - Vec4::new(1, 2, 3, 4), Vec4::new(8, 7, 6, 5) ); assert_eq!(Vec2::new(3, 2) - 2, Vec2::new(1, 0)); assert_eq!(Vec3::new(7, 6, 5) - 4, Vec3::new(3, 2, 1)); assert_eq!(Vec4::new(9, 8, 7, 6) - 5, Vec4::new(4, 3, 2, 1)); } #[test] fn mul() { assert_eq!(Vec2::new(2, 3) * 4, Vec2::new(8, 12)); assert_eq!(Vec3::new(2, 3, 4) * 5, Vec3::new(10, 15, 20)); assert_eq!(Vec4::new(2, 3, 4, 5) * 6, Vec4::new(12, 18, 24, 30)); } #[test] fn div() { assert_eq!(Vec2::new(8, 12) / 4, Vec2::new(2, 3)); assert_eq!(Vec3::new(10, 15, 20) / 5, Vec3::new(2, 3, 4)); assert_eq!(Vec4::new(12, 18, 24, 30) / 6, Vec4::new(2, 3, 4, 5)); } #[test] fn add_assign() { let mut v = Vec2::new(1, 2); v += Vec2::new(4, 6); assert_eq!(v, Vec2::new(5, 8)); let mut v = Vec3::new(1, 2, 3); v += Vec3::new(4, 6, 7); assert_eq!(v, Vec3::new(5, 8, 10)); let mut v = Vec4::new(1, 2, 3, 4); v += Vec4::new(5, 7, 9, 10); assert_eq!(v, Vec4::new(6, 9, 12, 14)); let mut v = Vec2::new(1, 2); v += 3; assert_eq!(v, Vec2::new(4, 5)); let mut v = Vec3::new(1, 2, 3); v += 4; assert_eq!(v, Vec3::new(5, 6, 7)); let mut v = Vec4::new(1, 2, 3, 4); v += 5; assert_eq!(v, Vec4::new(6, 7, 8, 9)); } #[test] fn sub_assign() { let mut v = Vec2::new(5, 5); v -= Vec2::new(1, 2); assert_eq!(v, Vec2::new(4, 3)); let mut v = Vec3::new(7, 7, 7); v -= Vec3::new(1, 2, 3); assert_eq!(v, Vec3::new(6, 5, 4)); let mut v = Vec4::new(9, 9, 9, 9); v -= Vec4::new(1, 2, 3, 4); assert_eq!(v, Vec4::new(8, 7, 6, 5)); let mut v = Vec2::new(3, 2); v -= 2; assert_eq!(v, Vec2::new(1, 0)); let mut v = Vec3::new(7, 6, 5); v -= 4; assert_eq!(v, Vec3::new(3, 2, 1)); let mut v = Vec4::new(9, 8, 7, 6); v -= 5; assert_eq!(v, Vec4::new(4, 3, 2, 1)); } #[test] fn mul_assign() { let mut v = Vec2::new(2, 3); v *= 4; assert_eq!(v, Vec2::new(8, 12)); let mut v = Vec3::new(2, 3, 4); v *= 5; assert_eq!(v, Vec3::new(10, 15, 20)); let mut v = Vec4::new(2, 3, 4, 5); v *= 6; assert_eq!(v, Vec4::new(12, 18, 24, 30)); } #[test] fn div_assign() { let mut v = Vec2::new(8, 12); v /= 4; assert_eq!(v, Vec2::new(2, 3)); let mut v = Vec3::new(10, 15, 20); v /= 5; assert_eq!(v, Vec3::new(2, 3, 4)); let mut v = Vec4::new(12, 18, 24, 30); v /= 6; assert_eq!(v, Vec4::new(2, 3, 4, 5)); } #[test] fn abs_diff_eq() { assert_abs_diff_eq!(&Vec2::<f32>::zeros(), &Vec2::<f32>::zeros()); assert_abs_diff_ne!(&Vec2::<f32>::zeros(), &Vec2::<f32>::ones()); assert_abs_diff_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_abs_diff_eq!(&Vec2::new(1.0, 1.0), &Vec2::zeros(), epsilon = 1.0); } #[test] fn relative_eq() { assert_relative_eq!(&Vec2::<f32>::zeros(), &Vec2::<f32>::zeros()); assert_relative_ne!(&Vec2::<f32>::zeros(), &Vec2::<f32>::ones()); assert_relative_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_relative_eq!(&Vec2::new(1.0, 1.0), &Vec2::zeros(), epsilon = 1.0,); assert_relative_eq!( &Vec2::new(2.0, 2.0), &Vec2::ones(), epsilon = 0.0, max_relative = 0.5 ); } }
#[cfg(test)] mod tests { use approx::{assert_abs_diff_eq, assert_abs_diff_ne, assert_relative_eq, assert_relative_ne}; use std::panic; use yuki::math::{Normal, Point3, Vec2, Vec3, Vec4}; #[test] fn new() { let v = Vec2::new(0.0, 1.0); assert_eq!(v.x, 0.0); assert_eq!(v.y, 1.0); assert_eq!(Vec2::new(0.0, 1.0), v); let v = Vec3::new(0.0, 1.0, 2.0); assert_eq!(v.x, 0.0); assert_eq!(v.y, 1.0); assert_eq!(v.z, 2.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0), v); let v = Vec4::new(0.0, 1.0, 2.0, 3.0); assert_eq!(v.x, 0.0f32); assert_eq!(v.y, 1.0f32); assert_eq!(v.z, 2.0f32); assert_eq!(v.w, 3.0f32); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0), v); } #[test] fn zeros() { assert_eq!(Vec2::zeros(), Vec2::new(0, 0)); assert_eq!(Vec3::zeros(), Vec3::new(0, 0, 0)); assert_eq!(Vec4::zeros(), Vec4::new(0, 0, 0, 0)); } #[test] fn ones() { assert_eq!(Vec2::ones(), Vec2::new(1, 1)); assert_eq!(Vec3::ones(), Vec3::new(1, 1, 1)); assert_eq!(Vec4::ones(), Vec4::new(1, 1, 1, 1)); } #[test] fn has_nans() { let result = panic::catch_unwind(|| Vec2::new(f32::NAN, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(0.0, f32::NAN)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec3::new(f32::NAN, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec4::new(f32::NAN, 0.0, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(f32::NAN, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(0.0, f32::NAN)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec3::new(f32::NAN, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec4::new(f32::NAN, 0.0, 0.0, 0.0)); assert!(result.is_err()); } #[test] fn dot() { assert_eq!(Vec2::new(2, 3).dot(Vec2::new(4, 5)), 2 * 4 + 3 * 5); assert_eq!( Vec3::new(2, 3, 4).dot(Vec3::new(5, 6, 7)), 2 * 5 + 3 * 6 + 4 * 7 ); assert_eq!( Vec4::new(2, 3, 4, 5).dot(Vec4::new(6, 7, 8, 9)), 2 * 6 + 3 * 7 + 4 * 8 + 5 * 9 ); assert_eq!( Vec3::new(2.0, 3.0, 4.0).dot_n(Normal::new(5.0, 6.0, 7.0)), 2.0 * 5.0 + 3.0 * 6.0 + 4.0 * 7.0 ); } #[test] fn cross() { assert_eq!( Vec3::new(2.0, 3.0, 4.0).cross(Vec3::new(5.0, 6.0, -7.0)), Vec3::new(-45.0, 34.0, -3.0) ); } #[test] fn len_sqr() { assert_eq!(Vec2::new(2, 3).len_sqr(), 2 * 2 + 3 * 3); assert_eq!(Vec3::new(2, 3, 4).len_sqr(), 2 * 2 + 3 * 3 + 4 * 4); assert_eq!( Vec4::new(2, 3, 4, 5).len_sqr(), 2 * 2 + 3 * 3 + 4 * 4 + 5 * 5 ); } #[test] fn len() { assert_abs_diff_eq!( Vec2::new(2.0, 3.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32).sqrt() ); assert_abs_diff_eq!( Vec3::new(2.0, 3.0, 4.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32).sqrt() ); assert_abs_diff_eq!( Vec4::new(2.0, 3.0, 4.0, 5.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32 + 5.0f32 * 5.0f32).sqrt() ); } #[test] fn normalized() { assert_abs_diff_eq!(Vec2::new(1.0, 1.0).normalized().len(), 1.0); assert_abs_diff_eq!(Vec3::new(1.0, 1.0, 1.0).normalized().len(), 1.0); assert_abs_diff_eq!(Vec4::new(1.0, 1.0, 1.0, 1.0).normalized().len(), 1.0); } #[test] fn min() { let a = Vec2::new(0, 2); let b = Vec2::new(3, 1); assert_eq!(a.min(b), Vec2::new(0, 1)); assert_eq!(a.min(b), b.min(a)); let a = Vec3::new(0, 2, 4); let b = Vec3::new(3, 1, 5); assert_eq!(a.min(b), Vec3::new(0, 1, 4)); assert_eq!(a.min(b), b.min(a)); let a = Vec4::new(0, 2, 4, 7); let b = Vec4::new(3, 1, 5, 6); assert_eq!(a.min(b), Vec4::new(0, 1, 4, 6)); assert_eq!(a.min(b), b.min(a)); } #[test] fn max() { let a = Vec2::new(0, 2); let b = Vec2::new(3, 1); assert_eq!(a.max(b), Vec2::new(3, 2)); assert_eq!(a.max(b), b.max(a)); let a = Vec3::new(0, 2, 4); let b = Vec3::new(3, 1, 5); assert_eq!(a.max(b), Vec3::new(3, 2, 5)); assert_eq!(a.max(b), b.max(a)); let a = Vec4::new(0, 2, 4, 7); let b = Vec4::new(3, 1, 5, 6); assert_eq!(a.max(b), Vec4::new(3, 2, 5, 7)); assert_eq!(a.max(b), b.max(a)); } #[test] fn min_comp() { assert_eq!(Vec2::new(0.0, 1.0).min_comp(), 0.0); assert_eq!(Vec2::new(1.0, 0.0).min_comp(), 0.0);
#[test] fn max_comp() { assert_eq!(Vec2::new(0.0, 1.0).max_comp(), 1.0); assert_eq!(Vec2::new(1.0, 0.0).max_comp(), 1.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).max_comp(), 2.0); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).max_comp(), 3.0); } #[test] fn max_dimension() { assert_eq!(Vec2::new(0.0, 1.0).max_dimension(), 1); assert_eq!(Vec2::new(1.0, 0.0).max_dimension(), 0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).max_dimension(), 2); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).max_dimension(), 3); } #[test] fn permutation() { assert_eq!(Vec2::new(2.0, 3.0).permuted(1, 0), Vec2::new(3.0, 2.0)); assert_eq!( Vec3::new(3.0, 4.0, 5.0).permuted(1, 2, 0), Vec3::new(4.0, 5.0, 3.0) ); assert_eq!( Vec4::new(4.0, 5.0, 6.0, 7.0).permuted(1, 2, 3, 0), Vec4::new(5.0, 6.0, 7.0, 4.0) ); } #[test] fn from() { assert_eq!(Vec2::from(2), Vec2::new(2, 2)); assert_eq!(Vec3::from(2), Vec3::new(2, 2, 2)); assert_eq!(Vec4::from(2), Vec4::new(2, 2, 2, 2)); assert_eq!( Vec3::from(Normal::new(1.0, 2.0, 3.0)), Vec3::new(1.0, 2.0, 3.0) ); assert_eq!( Vec3::from(Point3::new(1.0, 2.0, 3.0)), Vec3::new(1.0, 2.0, 3.0) ); } #[test] fn index() { let v = Vec2::new(0.0, 1.0); assert_eq!(v.x, v[0]); assert_eq!(v.y, v[1]); let v = Vec3::new(0.0, 1.0, 2.0); assert_eq!(v.x, v[0]); let v = Vec4::new(0.0, 1.0, 2.0, 3.0); assert_eq!(v.x, v[0]); let mut v = Vec2::zeros(); v[0] = 1.0; v[1] = 2.0; assert_eq!(v[0], 1.0); assert_eq!(v[1], 2.0); let mut v = Vec3::zeros(); v[0] = 1.0; assert_eq!(v[0], 1.0); let mut v = Vec4::zeros(); v[0] = 1.0; assert_eq!(v[0], 1.0); } #[test] fn neg() { assert_eq!(-Vec2::new(1, 2), Vec2::new(-1, -2)); assert_eq!(-Vec3::new(1, 2, 3), Vec3::new(-1, -2, -3)); assert_eq!(-Vec4::new(1, 2, 3, 4), Vec4::new(-1, -2, -3, -4)); } #[test] fn add() { assert_eq!(Vec2::new(1, 2) + Vec2::new(4, 6), Vec2::new(5, 8)); assert_eq!(Vec3::new(1, 2, 3) + Vec3::new(4, 6, 7), Vec3::new(5, 8, 10)); assert_eq!( Vec4::new(1, 2, 3, 4) + Vec4::new(5, 7, 9, 10), Vec4::new(6, 9, 12, 14) ); assert_eq!(Vec2::new(1, 2) + 3, Vec2::new(4, 5)); assert_eq!(Vec3::new(1, 2, 3) + 4, Vec3::new(5, 6, 7)); assert_eq!(Vec4::new(1, 2, 3, 4) + 5, Vec4::new(6, 7, 8, 9)); } #[test] fn sub() { assert_eq!(Vec2::new(5, 5) - Vec2::new(1, 2), Vec2::new(4, 3)); assert_eq!(Vec3::new(7, 7, 7) - Vec3::new(1, 2, 3), Vec3::new(6, 5, 4)); assert_eq!( Vec4::new(9, 9, 9, 9) - Vec4::new(1, 2, 3, 4), Vec4::new(8, 7, 6, 5) ); assert_eq!(Vec2::new(3, 2) - 2, Vec2::new(1, 0)); assert_eq!(Vec3::new(7, 6, 5) - 4, Vec3::new(3, 2, 1)); assert_eq!(Vec4::new(9, 8, 7, 6) - 5, Vec4::new(4, 3, 2, 1)); } #[test] fn mul() { assert_eq!(Vec2::new(2, 3) * 4, Vec2::new(8, 12)); assert_eq!(Vec3::new(2, 3, 4) * 5, Vec3::new(10, 15, 20)); assert_eq!(Vec4::new(2, 3, 4, 5) * 6, Vec4::new(12, 18, 24, 30)); } #[test] fn div() { assert_eq!(Vec2::new(8, 12) / 4, Vec2::new(2, 3)); assert_eq!(Vec3::new(10, 15, 20) / 5, Vec3::new(2, 3, 4)); assert_eq!(Vec4::new(12, 18, 24, 30) / 6, Vec4::new(2, 3, 4, 5)); } #[test] fn add_assign() { let mut v = Vec2::new(1, 2); v += Vec2::new(4, 6); assert_eq!(v, Vec2::new(5, 8)); let mut v = Vec3::new(1, 2, 3); v += Vec3::new(4, 6, 7); assert_eq!(v, Vec3::new(5, 8, 10)); let mut v = Vec4::new(1, 2, 3, 4); v += Vec4::new(5, 7, 9, 10); assert_eq!(v, Vec4::new(6, 9, 12, 14)); let mut v = Vec2::new(1, 2); v += 3; assert_eq!(v, Vec2::new(4, 5)); let mut v = Vec3::new(1, 2, 3); v += 4; assert_eq!(v, Vec3::new(5, 6, 7)); let mut v = Vec4::new(1, 2, 3, 4); v += 5; assert_eq!(v, Vec4::new(6, 7, 8, 9)); } #[test] fn sub_assign() { let mut v = Vec2::new(5, 5); v -= Vec2::new(1, 2); assert_eq!(v, Vec2::new(4, 3)); let mut v = Vec3::new(7, 7, 7); v -= Vec3::new(1, 2, 3); assert_eq!(v, Vec3::new(6, 5, 4)); let mut v = Vec4::new(9, 9, 9, 9); v -= Vec4::new(1, 2, 3, 4); assert_eq!(v, Vec4::new(8, 7, 6, 5)); let mut v = Vec2::new(3, 2); v -= 2; assert_eq!(v, Vec2::new(1, 0)); let mut v = Vec3::new(7, 6, 5); v -= 4; assert_eq!(v, Vec3::new(3, 2, 1)); let mut v = Vec4::new(9, 8, 7, 6); v -= 5; assert_eq!(v, Vec4::new(4, 3, 2, 1)); } #[test] fn mul_assign() { let mut v = Vec2::new(2, 3); v *= 4; assert_eq!(v, Vec2::new(8, 12)); let mut v = Vec3::new(2, 3, 4); v *= 5; assert_eq!(v, Vec3::new(10, 15, 20)); let mut v = Vec4::new(2, 3, 4, 5); v *= 6; assert_eq!(v, Vec4::new(12, 18, 24, 30)); } #[test] fn div_assign() { let mut v = Vec2::new(8, 12); v /= 4; assert_eq!(v, Vec2::new(2, 3)); let mut v = Vec3::new(10, 15, 20); v /= 5; assert_eq!(v, Vec3::new(2, 3, 4)); let mut v = Vec4::new(12, 18, 24, 30); v /= 6; assert_eq!(v, Vec4::new(2, 3, 4, 5)); } #[test] fn abs_diff_eq() { assert_abs_diff_eq!(&Vec2::<f32>::zeros(), &Vec2::<f32>::zeros()); assert_abs_diff_ne!(&Vec2::<f32>::zeros(), &Vec2::<f32>::ones()); assert_abs_diff_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_abs_diff_eq!(&Vec2::new(1.0, 1.0), &Vec2::zeros(), epsilon = 1.0); } #[test] fn relative_eq() { assert_relative_eq!(&Vec2::<f32>::zeros(), &Vec2::<f32>::zeros()); assert_relative_ne!(&Vec2::<f32>::zeros(), &Vec2::<f32>::ones()); assert_relative_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_relative_eq!(&Vec2::new(1.0, 1.0), &Vec2::zeros(), epsilon = 1.0,); assert_relative_eq!( &Vec2::new(2.0, 2.0), &Vec2::ones(), epsilon = 0.0, max_relative = 0.5 ); } }
assert_eq!(Vec3::new(0.0, 1.0, 2.0).min_comp(), 0.0); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).min_comp(), 0.0); }
function_block-function_prefixed
[ { "content": "// Returns the refracted direction for `wi` and `n` or `None` if total internal reflection happens.\n\nfn refract(wi: Vec3<f32>, n: Normal<f32>, eta: f32) -> Option<Vec3<f32>> {\n\n let cos_theta_i = n.dot_v(wi);\n\n let sin_2_theta_i = (1.0 - cos_theta_i * cos_theta_i).max(0.0);\n\n let sin_2_theta_t = eta * eta * sin_2_theta_i;\n\n\n\n let total_internal_reflection = sin_2_theta_t >= 1.0;\n\n if total_internal_reflection {\n\n return None;\n\n }\n\n\n\n let cos_theta_t = (1.0 - sin_2_theta_t).sqrt();\n\n Some(-wi * eta + Vec3::from(n) * (eta * cos_theta_i - cos_theta_t))\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 0, "score": 176125.6128062465 }, { "content": "// From https://www.pbr-book.org/3ed-2018/Geometry_and_Transformations/Vectors#CoordinateSystemfromaVector\n\n/// Creates perpendicular vectors for `v`.\n\n///\n\n/// `v` is expecte to be normalized.\n\npub fn coordinate_system<T: common::FloatValueType>(v: Vec3<T>) -> (Vec3<T>, Vec3<T>) {\n\n let v1 = if v.x.abs() > v.y.abs() {\n\n Vec3::new(-v.z, T::zero(), v.x) / (v.x * v.x + v.z * v.z).sqrt()\n\n } else {\n\n Vec3::new(T::zero(), v.z, -v.y) / (v.y * v.y + v.z + v.z)\n\n };\n\n let v2 = v.cross(v1);\n\n (v1, v2)\n\n}\n", "file_path": "yuki/src/math/mod.rs", "rank": 1, "score": 156598.22327066818 }, { "content": "fn reflect(wo: Vec3<f32>, n: Vec3<f32>) -> Vec3<f32> {\n\n -wo + n * 2.0 * wo.dot(n)\n\n}\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 2, "score": 154125.25610237243 }, { "content": "fn same_hemisphere(w: Vec3<f32>, wp: Vec3<f32>) -> bool {\n\n w.z * wp.z > 0.0\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 3, "score": 147587.28493781475 }, { "content": "/// Creates a world-to-camera [`Transform`] with the camera at `pos` looking at `target` with `up` as the up vector.\n\npub fn look_at<T>(pos: Point3<T>, target: Point3<T>, up: Vec3<T>) -> Transform<T>\n\nwhere\n\n T: FloatValueType,\n\n{\n\n let dir = (target - pos).normalized();\n\n let right = up.normalized().cross(dir).normalized();\n\n let new_up = dir.cross(right);\n\n let camera_to_world = Matrix4x4::new([\n\n [right.x, new_up.x, dir.x, pos.x],\n\n [right.y, new_up.y, dir.y, pos.y],\n\n [right.z, new_up.z, dir.z, pos.z],\n\n [T::zero(), T::zero(), T::zero(), T::one()],\n\n ]);\n\n\n\n Transform::new_full(camera_to_world.inverted(), camera_to_world)\n\n}\n", "file_path": "yuki/src/math/transforms.rs", "rank": 4, "score": 143914.65042188126 }, { "content": "fn sin_2_theta(w: Vec3<f32>) -> f32 {\n\n (1.0 - cos_2_theta(w)).max(0.0)\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 5, "score": 140070.09028989257 }, { "content": "fn sin_phi(w: Vec3<f32>) -> f32 {\n\n let sin_theta = sin_theta(w);\n\n if sin_theta == 0.0 {\n\n 1.0\n\n } else {\n\n (w.y / sin_theta).clamp(-1.0, 1.0)\n\n }\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 6, "score": 140070.09028989257 }, { "content": "fn tan_theta(w: Vec3<f32>) -> f32 {\n\n sin_theta(w) / cos_theta(w)\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 7, "score": 140070.09028989257 }, { "content": "fn cos_phi(w: Vec3<f32>) -> f32 {\n\n let sin_theta = sin_theta(w);\n\n if sin_theta == 0.0 {\n\n 1.0\n\n } else {\n\n (w.x / sin_theta).clamp(-1.0, 1.0)\n\n }\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 8, "score": 140070.09028989257 }, { "content": "fn cos_2_phi(w: Vec3<f32>) -> f32 {\n\n cos_phi(w) * cos_phi(w)\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 9, "score": 140070.09028989257 }, { "content": "fn sin_2_phi(w: Vec3<f32>) -> f32 {\n\n sin_phi(w) * sin_phi(w)\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 10, "score": 140070.09028989257 }, { "content": "fn cos_2_theta(w: Vec3<f32>) -> f32 {\n\n w.z * w.z\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 11, "score": 140070.09028989257 }, { "content": "fn tan_2_theta(w: Vec3<f32>) -> f32 {\n\n sin_2_theta(w) / cos_2_theta(w)\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 12, "score": 140070.09028989257 }, { "content": "fn cos_theta(w: Vec3<f32>) -> f32 {\n\n w.z\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 13, "score": 140070.09028989257 }, { "content": "fn sin_theta(w: Vec3<f32>) -> f32 {\n\n sin_2_theta(w).sqrt()\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 14, "score": 140070.09028989257 }, { "content": "pub fn find_min_max(film: &Mutex<Film>, channel: HeatmapChannel) -> Result<(f32, f32), DrawError> {\n\n yuki_trace!(\"find_min_max: Waiting for lock on film\");\n\n let film = film.lock().map_err(DrawError::FilmPoison)?;\n\n yuki_trace!(\"find_min_max: Acquired film\");\n\n\n\n let px_accessor: Box<dyn Fn(Spectrum<f32>) -> f32> = match &channel {\n\n HeatmapChannel::Red | HeatmapChannel::Green | HeatmapChannel::Blue => {\n\n Box::new(|px: Spectrum<f32>| px[channel as usize])\n\n }\n\n HeatmapChannel::Luminance => {\n\n Box::new(|px: Spectrum<f32>| 0.2126 * px.r + 0.7152 * px.g + 0.0722 * px.b)\n\n }\n\n };\n\n\n\n // TODO: This is slow for large films. Do we care?\n\n let ret = film\n\n .pixels()\n\n .iter()\n\n .fold((f32::MAX, f32::MIN), |(min, max), &px| {\n\n let v = px_accessor(px);\n\n (min.min(v), max.max(v))\n\n });\n\n\n\n yuki_trace!(\"find_min_max: Releasing film\");\n\n Ok(ret)\n\n}\n", "file_path": "yuki/src/app/renderpasses/tonemap.rs", "rank": 15, "score": 130566.49159212077 }, { "content": "pub fn cosine_sample_hemisphere(u: Point2<f32>) -> Vec3<f32> {\n\n let d = concentric_sample_disk(u);\n\n let z = (1.0 - d.x * d.x - d.y * d.y).max(0.0).sqrt();\n\n Vec3::new(d.x, d.y, z)\n\n}\n\n\n", "file_path": "yuki/src/sampling/mod.rs", "rank": 16, "score": 129554.44257463129 }, { "content": "/// Returns `true` if the value was changed.\n\nfn u32_picker(ui: &imgui::Ui, label: &ImStr, v: &mut u32, min: u32, max: u32, speed: f32) -> bool {\n\n let mut vi = *v as i64;\n\n\n\n let value_changed = imgui::Drag::new(label)\n\n .range((min as i64)..=(max as i64))\n\n .flags(imgui::SliderFlags::ALWAYS_CLAMP)\n\n .speed(speed)\n\n .build(ui, &mut vi);\n\n\n\n *v = u32::try_from(vi).unwrap();\n\n\n\n value_changed\n\n}\n\n\n", "file_path": "yuki/src/app/ui.rs", "rank": 17, "score": 128748.94221036689 }, { "content": "/// Returns `true` if the value was changed.\n\nfn u16_picker(ui: &imgui::Ui, label: &ImStr, v: &mut u16, min: u16, max: u16, speed: f32) -> bool {\n\n let mut vi = *v as i32;\n\n\n\n let value_changed = imgui::Drag::new(label)\n\n .range((min as i32)..=(max as i32))\n\n .flags(imgui::SliderFlags::ALWAYS_CLAMP)\n\n .speed(speed)\n\n .build(ui, &mut vi);\n\n\n\n *v = u16::try_from(vi).unwrap();\n\n\n\n value_changed\n\n}\n\n\n", "file_path": "yuki/src/app/ui.rs", "rank": 18, "score": 128748.94221036689 }, { "content": "pub fn load(settings: &SceneLoadSettings) -> Result<(Scene, CameraParameters, FilmSettings)> {\n\n let dir_path = settings.path.parent().unwrap().to_path_buf();\n\n let file = std::fs::File::open(settings.path.to_str().unwrap())?;\n\n let file_buf = std::io::BufReader::new(file);\n\n\n\n let mut meshes = Vec::new();\n\n let mut shapes = Vec::new();\n\n let mut materials: HashMap<String, Arc<dyn Material>> = HashMap::new();\n\n let mut lights: Vec<Arc<dyn Light>> = Vec::new();\n\n let mut background = Spectrum::zeros();\n\n let mut camera_params = CameraParameters::default();\n\n let mut film_settings = FilmSettings::default();\n\n let mut parser = EventReader::new(file_buf);\n\n let mut indent = String::new();\n\n let mut ignore_level: Option<u32> = None;\n\n loop {\n\n match parser.next() {\n\n Ok(evt) => match evt {\n\n XmlEvent::StartDocument {\n\n version,\n", "file_path": "yuki/src/scene/mitsuba/mod.rs", "rank": 19, "score": 116329.47120029958 }, { "content": "fn setup_logger() -> Result<(), fern::InitError> {\n\n fern::Dispatch::new()\n\n .format(|out, message, record| {\n\n out.finish(format_args!(\n\n \"{}[{}][{}:{}] {}\",\n\n chrono::Local::now().format(\"[%Y-%m-%d][%H:%M:%S:%3f]\"),\n\n record.level(),\n\n record.target(),\n\n record.line().unwrap_or(0),\n\n message\n\n ));\n\n })\n\n // .level(log::LevelFilter::Trace)\n\n // .level(log::LevelFilter::Debug)\n\n .level(log::LevelFilter::Info)\n\n // .level(log::LevelFilter::Warn)\n\n // .level(log::LevelFilter::Error)\n\n .filter(|meta| meta.target().starts_with(\"yuki\"))\n\n .chain(std::io::stdout())\n\n .chain(std::fs::File::create(\"yuki.log\")?)\n\n .apply()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "yuki/src/main.rs", "rank": 20, "score": 114136.2797273196 }, { "content": "fn spherical_direction(sin_theta: f32, cos_theta: f32, phi: f32) -> Vec3<f32> {\n\n Vec3::new(sin_theta * phi.cos(), sin_theta * phi.sin(), cos_theta)\n\n}\n\n\n", "file_path": "yuki/src/materials/bsdfs/mod.rs", "rank": 21, "score": 114073.85507760587 }, { "content": "fn load_settings() -> Result<app::InitialSettings, serde_yaml::Error> {\n\n match File::open(\"settings.yaml\") {\n\n Ok(file) => {\n\n let reader = BufReader::new(file);\n\n let settings = serde_yaml::from_reader(reader)?;\n\n yuki_info!(\"Found settings\");\n\n Ok(settings)\n\n }\n\n Err(why) => {\n\n yuki_info!(\"Could not load settings: {}\", why);\n\n Ok(app::InitialSettings::default())\n\n }\n\n }\n\n}\n", "file_path": "yuki/src/main.rs", "rank": 22, "score": 102750.49774363507 }, { "content": "pub fn exr_path(scene: &Scene) -> Result<PathBuf, String> {\n\n match std::env::current_dir() {\n\n Ok(mut path) => {\n\n let now = chrono::Local::now();\n\n let timestamp = format!(\n\n \"{:04}{:02}{:02}_{:02}{:02}{:02}\",\n\n now.year(),\n\n now.month(),\n\n now.day(),\n\n now.hour(),\n\n now.minute(),\n\n now.second()\n\n );\n\n let filename = format!(\"{}_{}.exr\", scene.name, timestamp);\n\n path.push(filename);\n\n\n\n Ok(path)\n\n }\n\n Err(why) => Err(format!(\n\n \"Error getting current working directory: {:?}\",\n\n why\n\n )),\n\n }\n\n}\n\n\n", "file_path": "yuki/src/app/util.rs", "rank": 23, "score": 98707.14793177624 }, { "content": "/// Creates a new `Transform` that is a translation by `delta`.\n\npub fn translation<T>(delta: Vec3<T>) -> Transform<T>\n\nwhere\n\n T: FloatValueType,\n\n{\n\n let m = Matrix4x4::new([\n\n [T::one(), T::zero(), T::zero(), delta.x],\n\n [T::zero(), T::one(), T::zero(), delta.y],\n\n [T::zero(), T::zero(), T::one(), delta.z],\n\n [T::zero(), T::zero(), T::zero(), T::one()],\n\n ]);\n\n let m_inv = Matrix4x4::new([\n\n [T::one(), T::zero(), T::zero(), -delta.x],\n\n [T::zero(), T::one(), T::zero(), -delta.y],\n\n [T::zero(), T::zero(), T::one(), -delta.z],\n\n [T::zero(), T::zero(), T::zero(), T::one()],\n\n ]);\n\n\n\n Transform::new_full(m, m_inv)\n\n}\n\n\n", "file_path": "yuki/src/math/transforms.rs", "rank": 24, "score": 97956.32417959801 }, { "content": "/// Returns `true` if the value was changed.\n\nfn vec2_u16_picker(\n\n ui: &imgui::Ui,\n\n label: &ImStr,\n\n v: &mut Vec2<u16>,\n\n min: u16,\n\n max: u16,\n\n speed: f32,\n\n) -> bool {\n\n let mut vi = [v.x as i32, v.y as i32];\n\n\n\n let value_changed = imgui::Drag::new(label)\n\n .range((min as i32)..=(max as i32))\n\n .flags(imgui::SliderFlags::ALWAYS_CLAMP)\n\n .speed(speed)\n\n .build_array(ui, &mut vi);\n\n\n\n v.x = u16::try_from(vi[0]).unwrap();\n\n v.y = u16::try_from(vi[1]).unwrap();\n\n\n\n value_changed\n\n}\n\n\n", "file_path": "yuki/src/app/ui.rs", "rank": 25, "score": 97309.99617700661 }, { "content": "/// Creates a new `Transform` that is a rotation of euler angles `theta`.\n\npub fn rotation_euler<T>(theta: Vec3<T>) -> Transform<T>\n\nwhere\n\n T: FloatValueType,\n\n{\n\n &rotation_x(theta.x) * &(&rotation_y(theta.y) * &rotation_z(theta.z))\n\n}\n\n\n", "file_path": "yuki/src/math/transforms.rs", "rank": 26, "score": 96230.1487501789 }, { "content": "pub fn load(\n\n settings: &SceneLoadSettings,\n\n) -> Result<(Scene, CameraParameters, FilmSettings), LoadError> {\n\n // TODO: Very large files should be read on the fly, not as a whole\n\n let input: Vec<u8> = std::fs::read_to_string(&settings.path)\n\n .map_err(LoadError::Io)?\n\n .bytes()\n\n .collect();\n\n\n\n let parent_path = settings.path.parent().unwrap();\n\n\n\n let mut lexer = Lexer::new(&input);\n\n\n\n let mut render_options = RenderOptions::default();\n\n\n\n let mut graphics_state = GraphicsState::default();\n\n let mut current_transform: Transform<f32> = Transform::default();\n\n let mut active_transform_bits = TransformBits::all();\n\n\n\n let mut transform_stack = Vec::new();\n", "file_path": "yuki/src/scene/pbrt/mod.rs", "rank": 27, "score": 94496.39549679741 }, { "content": "/// Creates a new `Transform` that is a rotation of `theta` radians around `axis`.\n\npub fn rotation<T>(theta: T, axis: Vec3<T>) -> Transform<T>\n\nwhere\n\n T: FloatValueType,\n\n{\n\n let a = axis.normalized();\n\n let cos_theta = theta.cos();\n\n let sin_theta = theta.sin();\n\n let m = Matrix4x4::new([\n\n [\n\n a.x * a.x + (T::one() - a.x * a.x) * cos_theta,\n\n a.x * a.y * (T::one() - cos_theta) - a.z * sin_theta,\n\n a.x * a.z * (T::one() - cos_theta) + a.y * sin_theta,\n\n T::zero(),\n\n ],\n\n [\n\n a.x * a.y * (T::one() - cos_theta) + a.z * sin_theta,\n\n a.y * a.y + (T::one() - a.y * a.y) * cos_theta,\n\n a.y * a.z * (T::one() - cos_theta) - a.x * sin_theta,\n\n T::zero(),\n\n ],\n", "file_path": "yuki/src/math/transforms.rs", "rank": 28, "score": 91054.58083978307 }, { "content": "pub fn normal_impl(item: &DeriveInput) -> TokenStream {\n\n let vec_type = &item.ident;\n\n\n\n let parsed_generics = match parse_generics(&item.generics) {\n\n Ok(v) => v,\n\n Err(errors) => {\n\n return combined_error(\"Impl Normal\", item.ident.span(), errors).to_compile_error();\n\n }\n\n };\n\n\n\n let member_ops = vec_normal_members_impl(&item.data, vec_type, &parsed_generics.generic_param);\n\n\n\n vec_like_impl(\n\n &item.data,\n\n vec_type,\n\n parsed_generics,\n\n Some(member_ops),\n\n None,\n\n )\n\n}\n", "file_path": "yuki_derive/src/impl_normal.rs", "rank": 29, "score": 89975.71186599755 }, { "content": "pub fn parse_rgb(attributes: &[OwnedAttribute], expected_name: &str) -> Result<Spectrum<f32>> {\n\n let mut v = Spectrum::zeros();\n\n let name = find_attr!(attributes, \"name\").as_str();\n\n if name != expected_name {\n\n return Err(format!(\"Expected rgb to be '{}', got '{}'\", expected_name, name).into());\n\n }\n\n for (i, c) in find_attr!(attributes, \"value\")\n\n .split(' ')\n\n .map(|c| c.parse::<f32>().unwrap())\n\n .enumerate()\n\n {\n\n v[i] = c;\n\n }\n\n Ok(v)\n\n}\n", "file_path": "yuki/src/scene/mitsuba/common.rs", "rank": 30, "score": 88735.7733671776 }, { "content": "pub fn parse_generics(generics: &Generics) -> Result<ParsedGenerics, Vec<(&str, Option<Span>)>> {\n\n // We expect a struct with the form\n\n // struct Type<T>\n\n // where\n\n // T: Bounds\n\n let mut generic_param = None;\n\n let mut errors = vec![];\n\n for g in generics.params.iter() {\n\n match g {\n\n GenericParam::Type(t) => {\n\n if generic_param.is_some() {\n\n errors.push((\"Only one generic type param supported\", Some(t.span())));\n\n }\n\n generic_param = Some(t.ident.clone());\n\n }\n\n GenericParam::Lifetime(l) => {\n\n errors.push((\"Lifetimes not supported\", Some(l.span())));\n\n }\n\n GenericParam::Const(c) => {\n\n errors.push((\"Consts not supported\", Some(c.span())));\n", "file_path": "yuki_derive/src/common.rs", "rank": 31, "score": 88418.20986659077 }, { "content": "pub fn vec_normal_members_impl(\n\n data: &Data,\n\n vec_type: &Ident,\n\n generic_param: &Ident,\n\n) -> TokenStream {\n\n let dot_ret = per_component_tokens(\n\n data,\n\n &|c: &Option<Ident>, f: &Field| quote_spanned!(f.span() => self.#c * other.#c),\n\n &|recurse| quote!( #generic_param::zero() #(+ #recurse)*),\n\n );\n\n\n\n let str_type = vec_type.to_string();\n\n let dot_doc = format!(\n\n \"Calculates the dot product of this `{0}` and another `{0}`.\",\n\n str_type\n\n );\n\n let len_sqr_doc = format!(\"Calculates the squared length of this `{0}`.\", str_type);\n\n let len_doc = format!(\"Calculates the length of this `{0}`.\", str_type);\n\n let normalized_doc = format!(\"Returns a new `{0}` with this `{0}` normalized.\", str_type);\n\n\n", "file_path": "yuki_derive/src/impl_vec_like.rs", "rank": 32, "score": 86575.5135156335 }, { "content": " #[test]\n\n fn zeros() {\n\n assert_eq!(Normal::zeros(), Normal::new(0.0, 0.0, 0.0));\n\n }\n\n\n\n #[test]\n\n fn ones() {\n\n assert_eq!(Normal::ones(), Normal::new(1.0, 1.0, 1.0));\n\n }\n\n\n\n #[test]\n\n fn has_nans() {\n\n // Test constructor as it should panic\n\n let result = panic::catch_unwind(|| Normal::new(f32::NAN, 0.0, 0.0));\n\n assert!(result.is_err());\n\n\n\n // Test shorthand constructor\n\n let result = panic::catch_unwind(|| Normal::new(f32::NAN, 0.0, 0.0));\n\n assert!(result.is_err());\n\n }\n", "file_path": "tests/src/normal.rs", "rank": 33, "score": 81722.67086168552 }, { "content": " #[test]\n\n fn dot() {\n\n assert_eq!(\n\n Normal::new(2.0, 3.0, 4.0).dot(Normal::new(5.0, 6.0, 7.0)),\n\n 2.0 * 5.0 + 3.0 * 6.0 + 4.0 * 7.0\n\n );\n\n assert_eq!(\n\n Normal::new(2.0, 3.0, 4.0).dot_v(Vec3::new(5.0, 6.0, 7.0)),\n\n 2.0 * 5.0 + 3.0 * 6.0 + 4.0 * 7.0\n\n );\n\n }\n\n\n\n #[test]\n\n fn len_sqr() {\n\n assert_eq!(\n\n Normal::new(2.0, 3.0, 4.0).len_sqr(),\n\n 2.0 * 2.0 + 3.0 * 3.0 + 4.0 * 4.0\n\n );\n\n }\n\n\n", "file_path": "tests/src/normal.rs", "rank": 34, "score": 81718.296583551 }, { "content": "\n\n #[test]\n\n fn from() {\n\n assert_eq!(\n\n Normal::from(Vec3::new(1.0, 2.0, 3.0)),\n\n Normal::new(1.0, 2.0, 3.0)\n\n );\n\n }\n\n\n\n #[test]\n\n fn index() {\n\n let v = Normal::new(0.0, 1.0, 2.0);\n\n assert_eq!(v.x, v[0]);\n\n\n\n let mut v = Normal::zeros();\n\n v[0] = 1.0;\n\n assert_eq!(v[0], 1.0);\n\n }\n\n\n\n #[test]\n", "file_path": "tests/src/normal.rs", "rank": 35, "score": 81717.65477531966 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use approx::{abs_diff_eq, assert_abs_diff_eq, relative_eq};\n\n use std::panic;\n\n\n\n use yuki::math::{Normal, Vec3};\n\n\n\n // Test the Normal specific methods and merely the existence of methods shared\n\n // with Vec* since vector tests already validate permutations for those\n\n // Aim is to check everything we expect is implemented and works as expected.\n\n\n\n #[test]\n\n fn new() {\n\n let v = Normal::new(0.0, 1.0, 2.0);\n\n assert_eq!(v.x, 0.0);\n\n assert_eq!(v.y, 1.0);\n\n assert_eq!(v.z, 2.0);\n\n assert_eq!(Normal::new(0.0, 1.0, 2.0), v);\n\n }\n\n\n", "file_path": "tests/src/normal.rs", "rank": 36, "score": 81717.27953451283 }, { "content": " #[test]\n\n fn len() {\n\n assert_abs_diff_eq!(\n\n Normal::new(2.0, 3.0, 4.0).len(),\n\n (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32).sqrt()\n\n );\n\n }\n\n\n\n #[test]\n\n fn normalized() {\n\n assert_abs_diff_eq!(Normal::new(1.0, 1.0, 1.0).normalized().len(), 1.0);\n\n }\n\n\n\n #[test]\n\n fn permutation() {\n\n assert_eq!(\n\n Normal::new(3.0, 4.0, 5.0).permuted(1, 2, 0),\n\n Normal::new(4.0, 5.0, 3.0)\n\n );\n\n }\n", "file_path": "tests/src/normal.rs", "rank": 37, "score": 81714.62842528561 }, { "content": " v /= 5.0;\n\n assert_eq!(v, Normal::new(2.0, 3.0, 4.0));\n\n }\n\n\n\n #[test]\n\n fn abs_diff_eq() {\n\n assert!(abs_diff_eq!(\n\n &Normal::<f32>::zeros(),\n\n &Normal::<f32>::zeros()\n\n ));\n\n }\n\n\n\n #[test]\n\n fn relative_eq() {\n\n assert!(relative_eq!(\n\n &Normal::<f32>::zeros(),\n\n &Normal::<f32>::zeros()\n\n ));\n\n }\n\n}\n", "file_path": "tests/src/normal.rs", "rank": 38, "score": 81712.93732161606 }, { "content": " #[test]\n\n fn mul() {\n\n assert_eq!(\n\n Normal::new(2.0, 3.0, 4.0) * 5.0,\n\n Normal::new(10.0, 15.0, 20.0)\n\n );\n\n }\n\n\n\n #[test]\n\n fn div() {\n\n assert_eq!(\n\n Normal::new(10.0, 15.0, 20.0) / 5.0,\n\n Normal::new(2.0, 3.0, 4.0)\n\n );\n\n }\n\n\n\n #[test]\n\n fn add_assign() {\n\n let mut v = Normal::new(1.0, 2.0, 3.0);\n\n v += Normal::new(4.0, 6.0, 7.0);\n", "file_path": "tests/src/normal.rs", "rank": 39, "score": 81710.91670496497 }, { "content": " assert_eq!(v, Normal::new(5.0, 8.0, 10.0));\n\n }\n\n\n\n #[test]\n\n fn sub_assign() {\n\n let mut v = Normal::new(7.0, 7.0, 7.0);\n\n v -= Normal::new(1.0, 2.0, 3.0);\n\n assert_eq!(v, Normal::new(6.0, 5.0, 4.0));\n\n }\n\n\n\n #[test]\n\n fn mul_assign() {\n\n let mut v = Normal::new(2.0, 3.0, 4.0);\n\n v *= 5.0;\n\n assert_eq!(v, Normal::new(10.0, 15.0, 20.0));\n\n }\n\n\n\n #[test]\n\n fn div_assign() {\n\n let mut v = Normal::new(10.0, 15.0, 20.0);\n", "file_path": "tests/src/normal.rs", "rank": 40, "score": 81710.63528239739 }, { "content": " fn neg() {\n\n assert_eq!(-Normal::new(1.0, 2.0, 3.0), Normal::new(-1.0, -2.0, -3.0));\n\n }\n\n\n\n #[test]\n\n fn add() {\n\n assert_eq!(\n\n Normal::new(1.0, 2.0, 3.0) + Normal::new(4.0, 6.0, 7.0),\n\n Normal::new(5.0, 8.0, 10.0)\n\n );\n\n }\n\n\n\n #[test]\n\n fn sub() {\n\n assert_eq!(\n\n Normal::new(7.0, 7.0, 7.0) - Normal::new(1.0, 2.0, 3.0),\n\n Normal::new(6.0, 5.0, 4.0)\n\n );\n\n }\n\n\n", "file_path": "tests/src/normal.rs", "rank": 41, "score": 81710.5756281928 }, { "content": "pub fn concentric_sample_disk(u: Point2<f32>) -> Point2<f32> {\n\n let offset = u * 2.0 - Vec2::new(1.0, 1.0);\n\n if offset == Point2::zeros() {\n\n return Point2::zeros();\n\n }\n\n\n\n let (theta, r) = if offset.x.abs() > offset.y.abs() {\n\n (\n\n std::f32::consts::FRAC_PI_4 * (offset.y / offset.x),\n\n offset.x,\n\n )\n\n } else {\n\n (\n\n std::f32::consts::FRAC_PI_2 - std::f32::consts::FRAC_PI_4 * (offset.x / offset.y),\n\n offset.y,\n\n )\n\n };\n\n\n\n Point2::new(theta.cos(), theta.sin()) * r\n\n}\n", "file_path": "yuki/src/sampling/mod.rs", "rank": 42, "score": 77419.44489100858 }, { "content": "fn try_split_param_def(def: &str) -> Option<(String, String)> {\n\n let mut split = def.split_whitespace();\n\n match split.next_tuple() {\n\n Some((type_name, param_name)) => {\n\n if split.count() == 0 {\n\n Some((type_name.into(), param_name.into()))\n\n } else {\n\n None\n\n }\n\n }\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "yuki/src/scene/pbrt/mod.rs", "rank": 43, "score": 75138.41039012902 }, { "content": "fn sampled_spectrum_into_rgb(lambda: &[f32], samples: &[f32]) -> Spectrum<f32> {\n\n assert!(\n\n lambda.len() == samples.len(),\n\n \"Sample count doesn't match the number of wavelengths\"\n\n );\n\n if !lambda.is_sorted() {\n\n let mut sorted_pairs: Vec<(&f32, &f32)> = lambda.iter().zip(samples.iter()).collect();\n\n sorted_pairs.sort_unstable_by(|p0, p1| p0.0.partial_cmp(p1.0).unwrap());\n\n\n\n let mut sorted_lambda = vec![0.0; lambda.len()];\n\n let mut sorted_samples = vec![0.0; samples.len()];\n\n for (i, p) in sorted_pairs.iter().enumerate() {\n\n sorted_lambda[i] = *p.0;\n\n sorted_samples[i] = *p.1;\n\n }\n\n\n\n sampled_spectrum_into_rgb(&sorted_lambda, &sorted_samples);\n\n };\n\n\n\n // Riemann sum\n", "file_path": "yuki/src/scene/pbrt/mod.rs", "rank": 44, "score": 74097.62257021407 }, { "content": "pub fn film_or_new(film: &Arc<Mutex<Film>>, settings: FilmSettings) -> Arc<Mutex<Film>> {\n\n yuki_trace!(\"film_or_new: Waiting for lock on film (res)\");\n\n let film_res = film.lock().unwrap().res();\n\n yuki_trace!(\"film_or_new: Acquired and released film (res)\");\n\n\n\n if settings.clear || film_res != settings.res {\n\n assert!(\n\n settings.res.x >= settings.tile_dim && settings.res.y >= settings.tile_dim,\n\n \"Film resolution is smaller than tile size\"\n\n );\n\n\n\n yuki_trace!(\"film_or_new: Creating new film\");\n\n\n\n let new_film = Arc::new(Mutex::new(Film::new(settings.res)));\n\n yuki_trace!(\"film_or_new: Releasing film\");\n\n new_film\n\n } else {\n\n yuki_trace!(\"film_or_new: Waiting for lock on film (move_generation)\");\n\n film.lock().unwrap().move_generation();\n\n yuki_trace!(\"film_or_new: Acquired and released film (move_generation)\");\n\n\n\n yuki_debug!(\n\n \"film_or_new: New film generation {}\",\n\n film.lock().unwrap().generation\n\n );\n\n\n\n Arc::clone(film)\n\n }\n\n}\n\n\n", "file_path": "yuki/src/film.rs", "rank": 45, "score": 68513.34011629553 }, { "content": "fn main() {\n\n let s = Matrix4x4::new([\n\n [2.0, 0.0, 0.0, 0.0],\n\n [0.0, 3.0, 0.0, 0.0],\n\n [0.0, 0.0, 4.0, 0.0],\n\n [0.0, 0.0, 0.0, 1.0],\n\n ]);\n\n let r = Matrix4x4::new([\n\n [-0.6024969, 0.6975837, -0.3877816, 0.0],\n\n [-0.1818856, -0.5930915, -0.7843214, 0.0],\n\n [-0.7771198, -0.4020193, 0.4842162, 0.0],\n\n [0.0, 0.0, 0.0, 1.0],\n\n ]);\n\n let t = Matrix4x4::new([\n\n [1.0, 0.0, 0.0, 2.0],\n\n [0.0, 1.0, 0.0, 3.0],\n\n [0.0, 0.0, 1.0, 4.0],\n\n [0.0, 0.0, 0.0, 1.0],\n\n ]);\n\n let p = Matrix4x4::new([\n", "file_path": "bench/src/main.rs", "rank": 46, "score": 65861.13758670363 }, { "content": "fn main() {\n\n if let Err(why) = setup_logger() {\n\n win_dbg_logger::output_debug_string(&format!(\"{}\", why));\n\n panic!(\"{}\", why);\n\n };\n\n\n\n // Let's catch panic messages ourselves and output everywhere\n\n std::panic::set_hook(Box::new(|info| {\n\n let location_str = if let Some(location) = info.location() {\n\n format!(\"{}:{}\", location.file(), location.line())\n\n } else {\n\n yuki_error!(\"No location for panic!\");\n\n \"\".into()\n\n };\n\n let payload = match info.payload().downcast_ref::<&'static str>() {\n\n Some(s) => s,\n\n None => match info.payload().downcast_ref::<String>() {\n\n Some(s) => s,\n\n None => \"Panic payload is not &'static str or String\",\n\n },\n", "file_path": "yuki/src/main.rs", "rank": 47, "score": 65861.13758670363 }, { "content": "fn launch_manager(\n\n to_parent: Sender<RenderManagerMessage>,\n\n from_parent: Receiver<Option<RenderManagerPayload>>,\n\n) -> JoinHandle<()> {\n\n std::thread::spawn(move || {\n\n yuki_trace!(\"Render manager: Launch threads\");\n\n // TODO: Keep track of how physical vs logical behaves with optimizations\n\n let thread_count = num_cpus::get();\n\n let (child_send, from_children) = channel();\n\n let children: HashMap<usize, (Sender<Option<RenderThreadPayload>>, JoinHandle<_>)> = (0\n\n ..thread_count)\n\n .map(|thread| {\n\n let (to_child, child_receive) = channel();\n\n let child_send = child_send.clone();\n\n (\n\n thread,\n\n (\n\n to_child,\n\n std::thread::spawn(move || {\n\n launch_worker(thread, &child_send, &child_receive);\n", "file_path": "yuki/src/renderer.rs", "rank": 48, "score": 64400.9970284774 }, { "content": "fn generate_tiles(\n\n res: Vec2<u16>,\n\n tile_dim: u16,\n\n film_gen: u64,\n\n film_id: u32,\n\n) -> HashMap<(u16, u16), FilmTile> {\n\n // Collect tiles spanning the whole image hashed by their tile coordinates\n\n let mut tiles = HashMap::new();\n\n let dim = tile_dim;\n\n yuki_trace!(\"generate_tiles: Generating tiles\");\n\n for j in (0..res.y).step_by(dim as usize) {\n\n for i in (0..res.x).step_by(dim as usize) {\n\n // Limit tiles to film dimensions\n\n let max_x = (i + dim).min(res.x);\n\n let max_y = (j + dim).min(res.y);\n\n\n\n tiles.insert(\n\n (i / dim, j / dim),\n\n FilmTile::new(\n\n Bounds2::new(Point2::new(i, j), Point2::new(max_x, max_y)),\n", "file_path": "yuki/src/film.rs", "rank": 49, "score": 64400.9970284774 }, { "content": "fn launch_worker(\n\n thread_id: usize,\n\n to_parent: &Sender<RenderThreadMessage>,\n\n from_parent: &Receiver<Option<RenderThreadPayload>>,\n\n) {\n\n yuki_debug!(\"Render thread {}: Begin\", thread_id);\n\n\n\n 'thread: loop {\n\n let mut thread_info = ThreadInfo {\n\n render_id: 0,\n\n thread_id,\n\n };\n\n let mut payload: Option<RenderThreadPayload> = None;\n\n\n\n // Blocking recv to avoid spinlock when there is no need to message to parent\n\n let mut newest_msg = match from_parent.recv() {\n\n Ok(msg) => Some(Ok(msg)),\n\n Err(RecvError {}) => {\n\n panic!(\"Render thread {}: Receive channel disconnected\", thread_id)\n\n }\n", "file_path": "yuki/src/renderer.rs", "rank": 50, "score": 64400.9970284774 }, { "content": "fn outward_spiral(\n\n mut tiles: HashMap<(u16, u16), FilmTile>,\n\n res: Vec2<u16>,\n\n tile_dim: u16,\n\n) -> VecDeque<FilmTile> {\n\n // Algo adapted from https://stackoverflow.com/a/398302\n\n\n\n let h_tiles = ((res.x as f32) / (tile_dim as f32)).ceil() as i32;\n\n let v_tiles = ((res.y as f32) / (tile_dim as f32)).ceil() as i32;\n\n let center_x = (h_tiles / 2) - (1 - h_tiles % 2);\n\n let center_y = (v_tiles / 2) - (1 - v_tiles % 2);\n\n let max_dim = h_tiles.max(v_tiles);\n\n\n\n let mut x = 0;\n\n let mut y = 0;\n\n let mut dx = 0;\n\n let mut dy = -1;\n\n let mut tile_queue = VecDeque::new();\n\n yuki_trace!(\"outward_spiral: Collecting queue\");\n\n for _ in 0..(max_dim * max_dim) {\n", "file_path": "yuki/src/film.rs", "rank": 51, "score": 64400.9970284774 }, { "content": "fn stratified_sample_2d(\n\n samples: &mut [Point2<f32>],\n\n n_samples: Vec2<u16>,\n\n jitter: bool,\n\n rng: &mut Pcg32,\n\n) {\n\n let d = Vec2::new(1.0 / (n_samples.x as f32), 1.0 / (n_samples.y as f32));\n\n for y in 0..n_samples.y as usize {\n\n let row_index = y * (n_samples.x as usize);\n\n for x in 0..n_samples.x as usize {\n\n let (jx, jy) = if jitter {\n\n (rng.sample(Standard), rng.sample(Standard))\n\n } else {\n\n (0.5, 0.5)\n\n };\n\n let index = row_index + x;\n\n samples[index] = Point2::new(\n\n (((x as f32) + jx) * d.x).min(ONE_MINUS_EPSILON),\n\n (((y as f32) + jy) * d.y).min(ONE_MINUS_EPSILON),\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "yuki/src/sampling/stratified.rs", "rank": 52, "score": 63046.580250816995 }, { "content": "#[must_use]\n\nfn launch_debug_ray(\n\n cursor_state: &CursorState,\n\n display: &glium::Display,\n\n film: &Arc<Mutex<Film>>,\n\n film_settings: FilmSettings,\n\n scene: &Arc<Scene>,\n\n camera_params: CameraParameters,\n\n scene_integrator: IntegratorType,\n\n sampler: SamplerType,\n\n) -> Option<Vec<IntegratorRay>> {\n\n let window_px = cursor_state.position;\n\n yuki_info!(\n\n \"main_loop: Debug ray initiated at window px ({},{})\",\n\n window_px.x,\n\n window_px.y\n\n );\n\n\n\n let (film_w, film_h) = {\n\n yuki_trace!(\"get_film_res: Waiting for lock on film\");\n\n let film = film.lock().unwrap();\n", "file_path": "yuki/src/app/window.rs", "rank": 53, "score": 61791.53626384263 }, { "content": "fn apply_tone_map(\n\n mut tone_map: ToneMapType,\n\n film: &Mutex<Film>,\n\n film_settings: FilmSettings,\n\n) -> (usize, usize, Vec<Spectrum<f32>>) {\n\n let event_loop = EventLoop::new();\n\n let context = expect!(\n\n ContextBuilder::new().build_headless(\n\n &event_loop,\n\n PhysicalSize::new(film_settings.res.x as u32, film_settings.res.y as u32)\n\n ),\n\n \"Failed to create headless context\"\n\n );\n\n let backend = expect!(Headless::new(context), \"Failed to create headless backend\");\n\n\n\n let mut tone_map_film = expect!(\n\n ToneMapFilm::new(&backend),\n\n \"Failed to create tone map render pass\"\n\n );\n\n\n", "file_path": "yuki/src/app/headless.rs", "rank": 54, "score": 61786.80579335716 }, { "content": "/// Returns `true` if camera settings were changed.\n\nfn generate_scene_settings(\n\n ui: &imgui::Ui<'_>,\n\n scene: &Scene,\n\n camera_params: &mut CameraParameters,\n\n load_settings: &mut SceneLoadSettings,\n\n) -> bool {\n\n let mut changed = false;\n\n imgui::TreeNode::new(im_str!(\"Scene\"))\n\n .default_open(true)\n\n .build(ui, || {\n\n imgui::TreeNode::new(im_str!(\"Camera\"))\n\n .default_open(true)\n\n .build(ui, || {\n\n changed |= imgui::Drag::new(im_str!(\"Position\"))\n\n .speed(0.1)\n\n .display_format(im_str!(\"%.1f\"))\n\n .build_array(ui, camera_params.position.array_mut());\n\n\n\n changed |= imgui::Drag::new(im_str!(\"Target\"))\n\n .speed(0.1)\n", "file_path": "yuki/src/app/ui.rs", "rank": 55, "score": 61786.80579335716 }, { "content": "fn handle_mouse_gestures(\n\n window_size: glutin::dpi::PhysicalSize<u32>,\n\n camera_params: &mut CameraParameters,\n\n mouse_gesture: &mut Option<MouseGesture>,\n\n camera_offset: &mut Option<CameraOffset>,\n\n) -> bool {\n\n match &mouse_gesture {\n\n Some(MouseGesture {\n\n start_position,\n\n current_position,\n\n gesture,\n\n }) => {\n\n match gesture {\n\n MouseGestureType::TrackBall => {\n\n // Adapted from Max Liani\n\n // https://maxliani.wordpress.com/2021/06/08/offline-to-realtime-camera-manipulation/\n\n let drag_scale = 1.0 / 400.0;\n\n let drag = (*current_position - *start_position) * drag_scale;\n\n\n\n let from_target = camera_params.position - camera_params.target;\n", "file_path": "yuki/src/app/window.rs", "rank": 56, "score": 61786.80579335716 }, { "content": "fn handle_scroll_event(\n\n delta: MouseScrollDelta,\n\n camera_params: CameraParameters,\n\n camera_offset: &mut Option<CameraOffset>,\n\n) {\n\n if camera_offset.is_none() {\n\n let to_target = camera_params.target - camera_params.position;\n\n let dist_target = to_target.len();\n\n let fwd = to_target / dist_target;\n\n\n\n let scroll_scale = dist_target * 0.1;\n\n let scroll = match delta {\n\n MouseScrollDelta::LineDelta(_, y) => y,\n\n MouseScrollDelta::PixelDelta(delta) => delta.y as f32,\n\n };\n\n\n\n let offset = CameraOffset {\n\n position: fwd * scroll * scroll_scale,\n\n ..CameraOffset::default()\n\n };\n", "file_path": "yuki/src/app/window.rs", "rank": 57, "score": 61786.80579335716 }, { "content": "pub fn load(\n\n path: &Path,\n\n material: &Arc<dyn Material>,\n\n transform: Option<Transform<f32>>,\n\n) -> Result<PlyResult> {\n\n let file = match std::fs::File::open(path.to_str().unwrap()) {\n\n Ok(f) => f,\n\n Err(e) => {\n\n yuki_error!(\"Could not open '{}'\", path.to_string_lossy());\n\n return Err(e.into());\n\n }\n\n };\n\n let mut file_buf = std::io::BufReader::new(file);\n\n\n\n let header =\n\n ply_rs::parser::Parser::<ply_rs::ply::DefaultElement>::new().read_header(&mut file_buf)?;\n\n\n\n if !is_valid(&header) {\n\n return Err(\"PLY: Unsupported content\".into());\n\n }\n", "file_path": "yuki/src/scene/ply.rs", "rank": 58, "score": 61053.17164934641 }, { "content": "fn abs_diff_eq(\n\n data: Data,\n\n vec_type: &Ident,\n\n generic_param: &Ident,\n\n impl_generics: ImplGenerics,\n\n type_generics: TypeGenerics,\n\n where_clause: Option<&WhereClause>,\n\n) -> TokenStream {\n\n let abs_diff_eq_tokens = per_component_tokens(\n\n &data,\n\n &|c: &Option<Ident>, f: &Field| {\n\n quote_spanned! { f.span() =>\n\n self.#c.abs_diff_eq(&other.#c, epsilon)\n\n }\n\n },\n\n &|recurse| quote!(#(#recurse)&&*),\n\n );\n\n\n\n quote! {\n\n impl #impl_generics approx::AbsDiffEq for #vec_type #type_generics\n", "file_path": "yuki_derive/src/derive_trait.rs", "rank": 59, "score": 60612.08858830633 }, { "content": "fn per_component_tokens(\n\n component_count: u32,\n\n component_tokens: &dyn Fn(&Ident) -> TokenStream,\n\n combined_tokens: &dyn Fn(ComponentStreams) -> TokenStream,\n\n) -> TokenStream {\n\n let components = [\n\n Ident::new(\"x\", Span::call_site()),\n\n Ident::new(\"y\", Span::call_site()),\n\n Ident::new(\"z\", Span::call_site()),\n\n ];\n\n combined_tokens((0..component_count).map(&(|c| component_tokens(&components[c as usize]))))\n\n}\n", "file_path": "yuki_derive/src/impl_bounds.rs", "rank": 60, "score": 60612.08858830633 }, { "content": "/// Expects attrs ('Trait' 'Other' 'Output')\n\npub fn vec_op(\n\n attr: proc_macro::TokenStream,\n\n item: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let VecOpAttr {\n\n op_trait,\n\n other,\n\n output,\n\n } = parse_macro_input!(attr as VecOpAttr);\n\n let item = parse_macro_input!(item as DeriveInput);\n\n\n\n let impl_tokens = impl_vec_op::vec_op(op_trait, other, Some(&output), &item);\n\n let tokens = quote! {\n\n #item\n\n #impl_tokens\n\n };\n\n\n\n // Can be used to print the tokens\n\n // panic!(impl_tokens.to_string());\n\n // panic!(tokens.to_string());\n\n\n\n proc_macro::TokenStream::from(tokens)\n\n}\n\n\n", "file_path": "yuki_derive/src/lib.rs", "rank": 61, "score": 59793.39719188658 }, { "content": "pub fn combined_error(\n\n prefix: &str,\n\n default_span: Span,\n\n errors: Vec<(&str, Option<Span>)>,\n\n) -> syn::Error {\n\n return errors\n\n .iter()\n\n .map(|&(err, span)| {\n\n syn::Error::new(span.unwrap_or(default_span), format!(\"{}: {}\", prefix, err))\n\n })\n\n .reduce(|mut acc, err| {\n\n acc.combine(err);\n\n acc\n\n })\n\n .unwrap();\n\n}\n\n\n", "file_path": "yuki_derive/src/common.rs", "rank": 62, "score": 59793.39719188658 }, { "content": "pub fn write_exr(\n\n width: usize,\n\n height: usize,\n\n pixels: &[Spectrum<f32>],\n\n path: &Path,\n\n) -> Result<(), String> {\n\n yuki_info!(\"Writing out EXR\");\n\n match exr::prelude::write_rgb_file(&path, width, height, |x, y| {\n\n let px = pixels[y * width + x];\n\n (px.r, px.g, px.b)\n\n }) {\n\n Ok(_) => {\n\n yuki_info!(\"EXR written to '{}'\", path.to_string_lossy());\n\n Ok(())\n\n }\n\n Err(why) => Err(format!(\n\n \"Error writing EXR to '{}': {:?}\",\n\n path.to_string_lossy(),\n\n why\n\n )),\n\n }\n\n}\n", "file_path": "yuki/src/app/util.rs", "rank": 63, "score": 59793.39719188658 }, { "content": "pub fn relative_eq(\n\n data: Data,\n\n vec_type: &Ident,\n\n generic_param: &Ident,\n\n impl_generics: ImplGenerics,\n\n type_generics: TypeGenerics,\n\n where_clause: Option<&WhereClause>,\n\n) -> TokenStream {\n\n let relative_eq_tokens = per_component_tokens(\n\n &data,\n\n &|c: &Option<Ident>, f: &Field| {\n\n quote_spanned! { f.span() =>\n\n self.#c.relative_eq(&other.#c, epsilon, max_relative)\n\n }\n\n },\n\n &|recurse| quote!(#(#recurse)&&*),\n\n );\n\n\n\n quote! {\n\n impl #impl_generics approx::RelativeEq for #vec_type #type_generics\n", "file_path": "yuki_derive/src/derive_trait.rs", "rank": 64, "score": 58618.67998683575 }, { "content": "/// Expects attrs ('Trait' 'Other')\n\npub fn vec_assign_op(\n\n attr: proc_macro::TokenStream,\n\n item: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let VecAssignOpAttr { op_trait, other } = parse_macro_input!(attr as VecAssignOpAttr);\n\n let item = parse_macro_input!(item as DeriveInput);\n\n\n\n let impl_tokens = impl_vec_op::vec_op(op_trait, other, None, &item);\n\n let tokens = quote! {\n\n #item\n\n #impl_tokens\n\n };\n\n\n\n // Can be used to print the tokens\n\n // panic!(impl_tokens.to_string());\n\n // panic!(tokens.to_string());\n\n\n\n proc_macro::TokenStream::from(tokens)\n\n}\n\n\n", "file_path": "yuki_derive/src/lib.rs", "rank": 65, "score": 58618.67998683575 }, { "content": "pub fn per_component_tokens(\n\n data: &Data,\n\n component_tokens: &dyn Fn(&Option<Ident>, &Field) -> TokenStream,\n\n combined_tokens: &dyn Fn(ComponentStreams) -> TokenStream,\n\n) -> TokenStream {\n\n match data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => {\n\n combined_tokens(fields.named.iter().map(\n\n &(|f: &syn::Field| {\n\n let name = &f.ident;\n\n // Use correct field span to get potential error on correct line\n\n component_tokens(name, f)\n\n }),\n\n ))\n\n }\n\n _ => unimplemented!(),\n\n },\n\n Data::Enum(_) | Data::Union(_) => unimplemented!(),\n\n }\n\n}\n\n\n", "file_path": "yuki_derive/src/common.rs", "rank": 66, "score": 58618.67998683575 }, { "content": "pub fn try_load_scene(\n\n settings: &SceneLoadSettings,\n\n) -> Result<(Arc<Scene>, CameraParameters, FilmSettings, f32), String> {\n\n if settings.path.exists() {\n\n match settings.path.extension() {\n\n Some(ext) => match ext.to_str().unwrap() {\n\n \"ply\" => match Scene::ply(settings) {\n\n Ok((scene, camera_params, film_settings, total_secs)) => {\n\n yuki_info!(\n\n \"PLY loaded from {}\",\n\n settings.path.file_name().unwrap().to_str().unwrap()\n\n );\n\n Ok((Arc::new(scene), camera_params, film_settings, total_secs))\n\n }\n\n Err(why) => Err(format!(\"Loading PLY failed: {}\", why)),\n\n },\n\n \"xml\" => match Scene::mitsuba(settings) {\n\n Ok((scene, camera_params, film_settings, total_secs)) => {\n\n yuki_info!(\n\n \"Mitsuba 2.0 scene loaded from {}\",\n", "file_path": "yuki/src/app/util.rs", "rank": 67, "score": 58618.67998683575 }, { "content": "pub fn vec_op(\n\n op_trait: Ident,\n\n other: Ident,\n\n output: Option<&Ident>,\n\n item: &DeriveInput,\n\n) -> TokenStream {\n\n let trait_info = TraitInfo::new(&op_trait.to_string());\n\n let TraitInfo {\n\n ident: trait_ident,\n\n is_scalar_op,\n\n is_assign_op,\n\n ..\n\n } = &trait_info;\n\n\n\n if *is_scalar_op {\n\n return syn::Error::new(op_trait.span(), \"Scalar ops not supported\").to_compile_error();\n\n }\n\n assert!(*is_assign_op == output.is_none());\n\n\n\n let generics = add_trait_bound(&item.generics, quote!(#trait_ident));\n", "file_path": "yuki_derive/src/impl_vec_op.rs", "rank": 68, "score": 57520.6870244536 }, { "content": "pub fn impl_vec_op_tokens(\n\n item_data: &Data,\n\n type_ident: &Ident,\n\n other_tokens: TokenStream,\n\n output_ident: Option<&Ident>,\n\n parsed_generics: ParsedGenerics,\n\n trait_info: TraitInfo,\n\n) -> TokenStream {\n\n let TraitInfo {\n\n ident: trait_ident,\n\n op_ident,\n\n is_scalar_op,\n\n ..\n\n } = trait_info;\n\n\n\n let component_tokens = |c: &Option<Ident>, f: &Field| {\n\n if is_scalar_op {\n\n {\n\n quote_spanned! {f.span() =>\n\n self.#c.#op_ident(other)\n", "file_path": "yuki_derive/src/common.rs", "rank": 69, "score": 57520.6870244536 }, { "content": "fn bench_mul(m: &Matrix4x4<f32>) {\n\n let mut m = m.clone();\n\n let start = Instant::now();\n\n for _ in 0..ITERATIONS {\n\n m = &m * &m;\n\n if m.m[0][0] == 0.0 {\n\n panic!(\"We only wanted to force the loop to be executed!\")\n\n }\n\n }\n\n let elapsed_ns = start.elapsed().as_nanos();\n\n let elapsed_ms = (elapsed_ns as f64) * 1e-6;\n\n let us_per_invert = (elapsed_ns as f64) * 1e-3 / (ITERATIONS as f64);\n\n println!(\n\n \"Full took {:4.1} ms total, {:0.4} us per invert\",\n\n elapsed_ms, us_per_invert\n\n );\n\n}\n\n\n", "file_path": "bench/src/main.rs", "rank": 70, "score": 56576.26525564502 }, { "content": "fn bench_full(m: &Matrix4x4<f32>) {\n\n let mut m = m.clone();\n\n let start = Instant::now();\n\n for _ in 0..ITERATIONS {\n\n m = m.inverted();\n\n if m.m[0][0].is_nan() {\n\n panic!(\"We only wanted to force the loop to be executed!\")\n\n }\n\n }\n\n let elapsed_ns = start.elapsed().as_nanos();\n\n let elapsed_ms = (elapsed_ns as f64) * 1e-6;\n\n let us_per_invert = (elapsed_ns as f64) * 1e-3 / (ITERATIONS as f64);\n\n println!(\n\n \"Full took {:4.1} ms total, {:0.4} us per invert\",\n\n elapsed_ms, us_per_invert\n\n );\n\n}\n\n\n", "file_path": "bench/src/main.rs", "rank": 71, "score": 56576.26525564502 }, { "content": "pub fn vec_like_impl(\n\n data: &Data,\n\n vec_type: &Ident,\n\n parsed_generics: ParsedGenerics,\n\n member_ops: Option<TokenStream>,\n\n post_impl: Option<TokenStream>,\n\n) -> TokenStream {\n\n let ParsedGenerics {\n\n generic_param,\n\n impl_generics,\n\n type_generics,\n\n where_clause,\n\n } = parsed_generics;\n\n\n\n let new_args = per_component_tokens(\n\n data,\n\n &|c: &Option<Ident>, f: &Field| quote_spanned!(f.span() => #c: #generic_param),\n\n &|recurse| quote!(#(#recurse),*),\n\n );\n\n let new_init = per_component_tokens(\n", "file_path": "yuki_derive/src/impl_vec_like.rs", "rank": 72, "score": 56492.13937917641 }, { "content": "fn find_param_values<'a, T: Clone>(\n\n name: &str,\n\n params: &'a [ParamSetItem<T>],\n\n default: &'a [T],\n\n) -> &'a [T] {\n\n for param in params {\n\n if param.name.as_str() == name {\n\n return &param.values;\n\n }\n\n }\n\n default\n\n}\n", "file_path": "yuki/src/scene/pbrt/param_set.rs", "rank": 73, "score": 52309.518712061006 }, { "content": "pub fn x_fit_1931(lambda: f32) -> f32 {\n\n let t1 = (lambda - 442.0) * if lambda < 442.0 { 0.0624 } else { 0.0374 };\n\n let t2 = (lambda - 599.8) * if lambda < 599.8 { 0.0264 } else { 0.0323 };\n\n let t3 = (lambda - 501.1) * if lambda < 501.1 { 0.0490 } else { 0.0382 };\n\n 0.362 * (-0.5 * t1 * t1).exp() + 1.056 * (-0.5 * t2 * t2).exp() - 0.065 * (-0.5 * t3 * t3).exp()\n\n}\n\n\n", "file_path": "yuki/src/scene/pbrt/cie.rs", "rank": 74, "score": 51958.211203203435 }, { "content": "pub fn y_fit_1931(lambda: f32) -> f32 {\n\n let t1 = (lambda - 568.8) * if lambda < 568.8 { 0.0213 } else { 0.0247 };\n\n let t2 = (lambda - 530.9) * if lambda < 530.9 { 0.0613 } else { 0.0322 };\n\n 0.821 * (-0.5 * t1 * t1).exp() + 0.286 * (-0.5 * t2 * t2).exp()\n\n}\n\n\n", "file_path": "yuki/src/scene/pbrt/cie.rs", "rank": 75, "score": 51958.211203203435 }, { "content": "pub fn z_fit_1931(lambda: f32) -> f32 {\n\n let t1 = (lambda - 437.0) * if lambda < 437.0 { 0.0845 } else { 0.0278 };\n\n let t2 = (lambda - 459.0) * if lambda < 459.0 { 0.0385 } else { 0.0725 };\n\n 1.217 * (-0.5 * t1 * t1).exp() + 0.681 * (-0.5 * t2 * t2).exp()\n\n}\n", "file_path": "yuki/src/scene/pbrt/cie.rs", "rank": 76, "score": 51958.211203203435 }, { "content": "fn parse_constant_emitter<T: std::io::Read>(\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<Spectrum<f32>> {\n\n let mut radiance = Spectrum::zeros();\n\n\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n _: &mut i32,\n\n _: &mut Option<u32>|\n\n -> Result<()> {\n\n let data_type = name.local_name.as_str();\n\n match data_type {\n\n \"rgb\" => {\n\n radiance = parse_rgb(&attributes, \"radiance\")?;\n\n }\n\n _ => return Err(format!(\"Unknown constant emitter data type '{}'\", data_type).into()),\n\n }\n\n Ok(())\n\n });\n\n\n\n Ok(radiance)\n\n}\n\n\n", "file_path": "yuki/src/scene/mitsuba/emitter.rs", "rank": 77, "score": 50084.66604623357 }, { "content": "fn parse_point_light<T: std::io::Read>(\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<Arc<PointLight>> {\n\n let mut position = Point3::<f32>::zeros();\n\n let mut intensity = Spectrum::zeros();\n\n\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n _: &mut i32,\n\n _: &mut Option<u32>|\n\n -> Result<()> {\n\n let data_type = name.local_name.as_str();\n\n match data_type {\n\n \"point\" => {\n\n if find_attr!(&attributes, \"name\").as_str() != \"position\" {\n\n return Err(\n\n \"Expected 'name': 'filename' as first mesh 'string' attribute\".into(),\n\n );\n\n }\n", "file_path": "yuki/src/scene/mitsuba/emitter.rs", "rank": 78, "score": 50084.66604623357 }, { "content": "fn parse_spot_light<T: std::io::Read>(\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<Arc<SpotLight>> {\n\n let mut light_to_world = Transform::default();\n\n let mut intensity = Spectrum::zeros();\n\n let mut total_width_degrees = 0.0;\n\n let mut falloff_start_degrees = 0.0;\n\n\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n level: &mut i32,\n\n _: &mut Option<u32>|\n\n -> Result<()> {\n\n let data_type = name.local_name.as_str();\n\n match data_type {\n\n \"float\" => match find_attr!(&attributes, \"name\").as_str() {\n\n \"cutoff_angle\" => {\n\n total_width_degrees = find_attr!(&attributes, \"value\").parse()?;\n\n }\n", "file_path": "yuki/src/scene/mitsuba/emitter.rs", "rank": 79, "score": 50084.66604623357 }, { "content": "pub fn parse<T: std::io::Read>(\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<CameraParameters> {\n\n let mut fov_axis = String::new();\n\n let mut fov_angle = 0.0;\n\n let mut transform = Transform::default();\n\n\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n level: &mut i32,\n\n ignore_level: &mut Option<u32>|\n\n -> Result<()> {\n\n let data_type = name.local_name.as_str();\n\n match data_type {\n\n \"string\" => {\n\n let (attr_name, attr_value) = (\n\n find_attr!(&attributes, \"name\").as_str(),\n\n find_attr!(&attributes, \"value\"),\n\n );\n", "file_path": "yuki/src/scene/mitsuba/sensor.rs", "rank": 80, "score": 49826.079124138385 }, { "content": "fn is_valid(header: &ply_rs::ply::Header) -> bool {\n\n let mut content = PlyContent::new();\n\n for (name, element) in &header.elements {\n\n match name.as_str() {\n\n \"vertex\" => {\n\n let mut props = HashSet::new();\n\n for (name, _) in &element.properties {\n\n props.insert(name.clone());\n\n }\n\n content.vertex = Some(props);\n\n }\n\n \"face\" => {\n\n let mut props = HashSet::new();\n\n for (name, _) in &element.properties {\n\n props.insert(name.clone());\n\n }\n\n content.face = Some(props);\n\n }\n\n _ => yuki_info!(\"PLY: Unknown element '{}'\", name),\n\n }\n", "file_path": "yuki/src/scene/ply.rs", "rank": 81, "score": 49826.079124138385 }, { "content": "pub fn parse<T: std::io::Read>(\n\n dir_path: &Path,\n\n materials: &HashMap<String, Arc<dyn Material>>,\n\n attributes: &[OwnedAttribute],\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<ply::PlyResult> {\n\n let data_type = find_attr!(attributes, \"type\").as_str();\n\n if data_type != \"ply\" {\n\n return Err(format!(\"Unexpected shape type '{}'!\", data_type).into());\n\n }\n\n let mut transform = Transform::default();\n\n let mut ply_abspath = None;\n\n let mut material_id = None;\n\n // TODO: Parse whole shape first, load with constructed material after\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n level: &mut i32,\n\n _: &mut Option<u32>|\n\n -> Result<()> {\n", "file_path": "yuki/src/scene/mitsuba/shape.rs", "rank": 82, "score": 49826.079124138385 }, { "content": "pub fn parse<T: std::io::Read>(\n\n attributes: &[OwnedAttribute],\n\n parser: &mut EventReader<T>,\n\n indent: String,\n\n) -> Result<Option<Emitter>> {\n\n let attr_type = find_attr!(attributes, \"type\");\n\n let ret = match attr_type.as_str() {\n\n \"constant\" => Some(Emitter::Background {\n\n color: parse_constant_emitter(parser, indent)?,\n\n }),\n\n \"point\" => Some(Emitter::Light {\n\n light: parse_point_light(parser, indent)?,\n\n }),\n\n \"spot\" => Some(Emitter::Light {\n\n light: parse_spot_light(parser, indent)?,\n\n }),\n\n _ => None,\n\n };\n\n Ok(ret)\n\n}\n\n\n", "file_path": "yuki/src/scene/mitsuba/emitter.rs", "rank": 83, "score": 49826.079124138385 }, { "content": "pub fn parse<T: std::io::Read>(\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<Transform<f32>> {\n\n let mut transform = Transform::default();\n\n\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n _: &mut i32,\n\n _: &mut Option<u32>|\n\n -> Result<()> {\n\n let data_type = name.local_name.as_str();\n\n match data_type {\n\n \"rotate\" => {\n\n let axis = {\n\n let mut axis = Vec3::new(0.0, 0.0, 0.0);\n\n if let Some(v) = try_find_attr!(&attributes, \"x\") {\n\n axis.x = v.parse()?;\n\n }\n\n if let Some(v) = try_find_attr!(&attributes, \"y\") {\n", "file_path": "yuki/src/scene/mitsuba/transform.rs", "rank": 84, "score": 49826.079124138385 }, { "content": "pub fn spectrum_impl(item: &DeriveInput) -> TokenStream {\n\n let vec_type = &item.ident;\n\n\n\n let parsed_generics = match parse_generics(&item.generics) {\n\n Ok(v) => v,\n\n Err(errors) => {\n\n return combined_error(\"Impl Spectrum\", item.ident.span(), errors).to_compile_error();\n\n }\n\n };\n\n\n\n let member_ops = {\n\n let data = &item.data;\n\n let generic_param = &parsed_generics.generic_param;\n\n\n\n let is_black_ret = per_component_tokens(\n\n data,\n\n &|c: &Option<Ident>, f: &Field| quote_spanned!(f.span() => self.#c == #generic_param::zero()),\n\n &|recurse| quote!(#(#recurse)&&*),\n\n );\n\n\n", "file_path": "yuki_derive/src/impl_spectrum.rs", "rank": 85, "score": 49229.06538081323 }, { "content": "pub fn point_impl(item: &DeriveInput) -> TokenStream {\n\n let point_type = &item.ident;\n\n\n\n let parsed_generics = match parse_generics(&item.generics) {\n\n Ok(v) => v,\n\n Err(errors) => {\n\n return combined_error(\"Impl Point\", item.ident.span(), errors).to_compile_error();\n\n }\n\n };\n\n let ParsedGenerics {\n\n generic_param,\n\n impl_generics,\n\n type_generics,\n\n where_clause,\n\n } = &parsed_generics;\n\n\n\n let str_type = point_type.to_string();\n\n let dist_doc = format!(\n\n \"Calculates the distance between this `{0}` and another `{0}`.\",\n\n str_type\n", "file_path": "yuki_derive/src/impl_point.rs", "rank": 86, "score": 49229.06538081323 }, { "content": "pub fn vec_impl(item: &DeriveInput) -> TokenStream {\n\n let vec_type = &item.ident;\n\n\n\n let parsed_generics = match parse_generics(&item.generics) {\n\n Ok(v) => v,\n\n Err(errors) => {\n\n return combined_error(\"Impl Vec\", item.ident.span(), errors).to_compile_error();\n\n }\n\n };\n\n\n\n let member_ops = vec_normal_members_impl(&item.data, vec_type, &parsed_generics.generic_param);\n\n\n\n let from_args = per_component_tokens(\n\n &item.data,\n\n &|_c: &Option<Ident>, f: &Field| quote_spanned!(f.span() => v),\n\n &|recurse| quote!(#(#recurse),*),\n\n );\n\n\n\n let signed_abs_impl = abs_impl(vec_type, item);\n\n let ParsedGenerics {\n", "file_path": "yuki_derive/src/impl_vec.rs", "rank": 87, "score": 49229.06538081323 }, { "content": "pub fn bounds_impl(item: &DeriveInput) -> TokenStream {\n\n let bounds_ident = &item.ident;\n\n\n\n let generics = add_trait_bound(&item.generics, quote!(num::cast::ToPrimitive));\n\n let ParsedGenerics {\n\n generic_param,\n\n impl_generics,\n\n type_generics,\n\n where_clause,\n\n } = match parse_generics(&generics) {\n\n Ok(v) => v,\n\n Err(errors) => {\n\n return combined_error(\"Impl Point\", item.ident.span(), errors).to_compile_error();\n\n }\n\n };\n\n\n\n let bounds_str = bounds_ident.to_string();\n\n let component_count = match bounds_str.chars().last().unwrap().to_digit(10) {\n\n Some(c) => c,\n\n None => {\n", "file_path": "yuki_derive/src/impl_bounds.rs", "rank": 88, "score": 49229.06538081323 }, { "content": "// TODO: This does not support glass now. Make \"bsdf\" -parsing generic, move it out of mod.rs\n\npub fn parse_twosided<T: std::io::Read>(\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<Arc<dyn Material>> {\n\n let mut material: Arc<dyn Material> = Arc::new(Matte::new(\n\n Arc::new(ConstantTexture::new(Spectrum::ones())),\n\n Arc::new(ConstantTexture::new(0.0)),\n\n ));\n\n\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n level: &mut i32,\n\n _: &mut Option<u32>|\n\n -> Result<()> {\n\n let data_type = name.local_name.as_str();\n\n match data_type {\n\n \"bsdf\" => {\n\n material = parse_diffuse(parser, indent.clone())?;\n\n *level -= 1;\n\n indent.truncate(indent.len() - 2);\n", "file_path": "yuki/src/scene/mitsuba/material.rs", "rank": 89, "score": 48922.231072158436 }, { "content": "pub fn parse_dielectric<T: std::io::Read>(\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<Arc<dyn Material>> {\n\n let mut int_ior = BK7_GLASS_IOR;\n\n let mut ext_ior = AIR_IOR;\n\n let mut specular_reflectance = Arc::new(ConstantTexture::new(Spectrum::ones()));\n\n let mut specular_transmittance = Arc::new(ConstantTexture::new(Spectrum::ones()));\n\n\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n _: &mut i32,\n\n _: &mut Option<u32>|\n\n -> Result<()> {\n\n let data_type = name.local_name.as_str();\n\n match data_type {\n\n \"rgb\" => {\n\n if let Ok(v) = parse_rgb(&attributes, \"specular_reflectance\") {\n\n specular_reflectance = Arc::new(ConstantTexture::new(v));\n\n } else if let Ok(v) = parse_rgb(&attributes, \"specular_transmittance\") {\n", "file_path": "yuki/src/scene/mitsuba/material.rs", "rank": 90, "score": 48918.022698042376 }, { "content": "pub fn parse_diffuse<T: std::io::Read>(\n\n parser: &mut EventReader<T>,\n\n mut indent: String,\n\n) -> Result<Arc<dyn Material>> {\n\n let mut reflectance = Arc::new(ConstantTexture::new(Spectrum::new(0.5, 0.5, 0.5)));\n\n\n\n parse_element!(parser, indent, |name: &OwnedName,\n\n attributes: Vec<OwnedAttribute>,\n\n _: &mut i32,\n\n _: &mut Option<u32>|\n\n -> Result<()> {\n\n let data_type = name.local_name.as_str();\n\n match data_type {\n\n \"rgb\" => {\n\n reflectance =\n\n Arc::new(ConstantTexture::new(parse_rgb(&attributes, \"reflectance\")?));\n\n }\n\n _ => return Err(format!(\"Unknown light data type '{}'\", data_type).into()),\n\n }\n\n Ok(())\n", "file_path": "yuki/src/scene/mitsuba/material.rs", "rank": 91, "score": 48918.022698042376 }, { "content": "pub fn render(exr_path: &Path, settings: InitialSettings) {\n\n let load_settings = settings.load_settings.unwrap_or_default();\n\n\n\n let (scene, camera_params, scene_film_settings, _) =\n\n expect!(try_load_scene(&load_settings), \"Scene loading failed\");\n\n\n\n let film_settings = settings.film_settings.unwrap_or(scene_film_settings);\n\n let render_settings = settings.render_settings.unwrap_or_default();\n\n let sampler = settings.sampler.unwrap_or_default();\n\n let scene_integrator = settings.scene_integrator.unwrap_or_default();\n\n let tone_map = settings.tone_map.unwrap_or_default();\n\n\n\n let film = Arc::new(Mutex::new(Film::new(film_settings.res)));\n\n let mut renderer = Renderer::new();\n\n\n\n let render_start = Instant::now();\n\n renderer.launch(\n\n scene,\n\n camera_params,\n\n Arc::clone(&film),\n", "file_path": "yuki/src/app/headless.rs", "rank": 92, "score": 48918.022698042376 }, { "content": "/// Creates a new `Transform` that is a rotation of `theta` radians around the x-axis.\n\npub fn rotation_x<T>(theta: T) -> Transform<T>\n\nwhere\n\n T: FloatValueType,\n\n{\n\n let cos_theta = theta.cos();\n\n let sin_theta = theta.sin();\n\n let m = Matrix4x4::new([\n\n [T::one(), T::zero(), T::zero(), T::zero()],\n\n [T::zero(), cos_theta, -sin_theta, T::zero()],\n\n [T::zero(), sin_theta, cos_theta, T::zero()],\n\n [T::zero(), T::zero(), T::zero(), T::one()],\n\n ]);\n\n\n\n Transform::new_full(m, m.transposed())\n\n}\n\n\n", "file_path": "yuki/src/math/transforms.rs", "rank": 93, "score": 48789.30679272453 }, { "content": "/// Creates a new `Transform` that is a rotation of `theta` radians around the y-axis.\n\npub fn rotation_y<T>(theta: T) -> Transform<T>\n\nwhere\n\n T: FloatValueType,\n\n{\n\n let cos_theta = theta.cos();\n\n let sin_theta = theta.sin();\n\n let m = Matrix4x4::new([\n\n [cos_theta, T::zero(), sin_theta, T::zero()],\n\n [T::zero(), T::one(), T::zero(), T::zero()],\n\n [-sin_theta, T::zero(), cos_theta, T::zero()],\n\n [T::zero(), T::zero(), T::zero(), T::one()],\n\n ]);\n\n\n\n Transform::new_full(m, m.transposed())\n\n}\n\n\n", "file_path": "yuki/src/math/transforms.rs", "rank": 94, "score": 48789.30679272453 }, { "content": "/// Creates a new `Transform` that is a rotation of `theta` radians around the z-axis.\n\npub fn rotation_z<T>(theta: T) -> Transform<T>\n\nwhere\n\n T: FloatValueType,\n\n{\n\n let cos_theta = theta.cos();\n\n let sin_theta = theta.sin();\n\n let m = Matrix4x4::new([\n\n [cos_theta, -sin_theta, T::zero(), T::zero()],\n\n [sin_theta, cos_theta, T::zero(), T::zero()],\n\n [T::zero(), T::zero(), T::one(), T::zero()],\n\n [T::zero(), T::zero(), T::zero(), T::one()],\n\n ]);\n\n\n\n Transform::new_full(m, m.transposed())\n\n}\n\n\n", "file_path": "yuki/src/math/transforms.rs", "rank": 95, "score": 48789.30679272453 }, { "content": "pub fn approx(input: DeriveInput, name: &str) -> TokenStream {\n\n let trait_ident = Ident::new(name, Span::call_site());\n\n\n\n let ParsedGenerics { generic_param, .. } = match parse_generics(&input.generics) {\n\n Ok(v) => v,\n\n Err(errors) => {\n\n return combined_error(&format!(\"Derive '{}'\", name), input.ident.span(), errors)\n\n .to_compile_error();\n\n }\n\n };\n\n\n\n let generics = add_trait_bound(&input.generics, quote! { #trait_ident });\n\n let generics = add_trait_bound(&generics, quote! { AbsDiffEq<Epsilon = #generic_param> });\n\n let ParsedGenerics {\n\n generic_param,\n\n impl_generics,\n\n type_generics,\n\n where_clause,\n\n } = match parse_generics(&generics) {\n\n Ok(v) => v,\n", "file_path": "yuki_derive/src/derive_trait.rs", "rank": 96, "score": 46214.07158392093 }, { "content": "pub fn neg(input: DeriveInput, _: &str) -> TokenStream {\n\n let generics = add_trait_bound(&input.generics, quote!(num::traits::Signed));\n\n\n\n let ParsedGenerics {\n\n impl_generics,\n\n type_generics,\n\n where_clause,\n\n ..\n\n } = match parse_generics(&generics) {\n\n Ok(v) => v,\n\n Err(errors) => {\n\n return combined_error(\"Derive 'Neg'\", input.ident.span(), errors).to_compile_error();\n\n }\n\n };\n\n let type_ident = &input.ident;\n\n\n\n let negated_components = per_component_tokens(\n\n &input.data,\n\n &|c: &Option<Ident>, f: &Field| quote_spanned! {f.span() => #c: -self.#c },\n\n &|recurse| quote!(#(#recurse,)*),\n", "file_path": "yuki_derive/src/derive_math_op.rs", "rank": 97, "score": 46214.07158392093 }, { "content": "pub fn index(input: DeriveInput, name: &str) -> TokenStream {\n\n let TraitInfo {\n\n ident: trait_ident,\n\n op_ident,\n\n ..\n\n } = TraitInfo::new(name);\n\n\n\n let ParsedGenerics {\n\n generic_param,\n\n impl_generics,\n\n type_generics,\n\n where_clause,\n\n } = match parse_generics(&input.generics) {\n\n Ok(v) => v,\n\n Err(errors) => {\n\n return combined_error(\"Derive'Index'\", input.ident.span(), errors).to_compile_error();\n\n }\n\n };\n\n let type_ident = &input.ident;\n\n\n", "file_path": "yuki_derive/src/derive_trait.rs", "rank": 98, "score": 46214.07158392093 }, { "content": "fn render_status_messages(status: &RenderStatus, render_start: Instant) -> Vec<String> {\n\n let elapsed_s = render_start.elapsed().as_secs_f32();\n\n\n\n match *status {\n\n RenderStatus::Finished { ray_count } => {\n\n vec![\n\n format!(\"Render finished in {:.2}s\", elapsed_s),\n\n format!(\"{:.2} Mrays/s\", ((ray_count as f32) / elapsed_s) * 1e-6),\n\n ]\n\n }\n\n RenderStatus::Progress {\n\n active_threads,\n\n tiles_done,\n\n tiles_total,\n\n approx_remaining_s,\n\n current_rays_per_s,\n\n } => {\n\n vec![\n\n format!(\"Render threads running: {}\", active_threads),\n\n format!(\n\n \"{:.1}s elapsed, ~{:.0}s remaining\",\n\n elapsed_s, approx_remaining_s\n\n ),\n\n format!(\"{}/{} tiles\", tiles_done, tiles_total),\n\n format!(\"{:.2} Mrays/s\", current_rays_per_s * 1e-6),\n\n ]\n\n }\n\n }\n\n}\n", "file_path": "yuki/src/app/window.rs", "rank": 99, "score": 45450.6117916939 } ]
Rust
wiz/wiz/src/build.rs
ChanTsune/wiz
199d0f4698822a177ede8015bf8e04f190f39934
use crate::core::dep::{resolve_manifest_dependencies, ResolvedDependencyTree}; use crate::core::error::CliError; use crate::core::load_project; use crate::core::workspace::Workspace; use clap::ArgMatches; use std::collections::{BTreeSet, HashMap, HashSet}; use std::env; use std::error::Error; use std::fs::create_dir_all; use std::path::PathBuf; use wiz_utils::topological_sort::topological_sort; pub(crate) const COMMAND_NAME: &str = "build"; pub(crate) fn command(_: &str, options: &ArgMatches) -> Result<(), Box<dyn Error>> { let manifest_path = options.value_of("manifest-path"); let another_std = options.value_of("std"); let ws = load_project(manifest_path)?; let resolved_dependencies = resolve_manifest_dependencies(&ws.cws, &ws.get_manifest()?, another_std)?; println!("{:?}", resolved_dependencies); let target_dir = if let Some(target_dir) = options.value_of("target-dir") { let d = PathBuf::from(target_dir); if d.exists() && !d.is_dir() { return Err(Box::from(CliError::from(format!( "{} is not directory", d.display() )))); } else { d } } else { env::current_dir()?.join("target") }; create_dir_all(&target_dir)?; let wlib_paths = compile_dependencies(&ws, resolved_dependencies, target_dir.to_str().unwrap())?; let mut args = vec![ws.cws.to_str().unwrap()]; args.extend(["--out-dir", target_dir.to_str().unwrap()]); args.extend([ "--name", ws.cws.file_name().and_then(|p| p.to_str()).unwrap(), ]); args.extend(["--type", "bin"]); for wlib_path in wlib_paths.iter() { args.extend(["--library", wlib_path]); } if let Some(target_triple) = options.value_of("target-triple") { args.extend(["--target-triple", target_triple]); }; super::subcommand::execute("wizc", &args) } #[derive(Clone, Debug, Eq, PartialEq, Hash)] struct Task { name: String, version: String, src_path: String, } fn dependency_list(dependencies: ResolvedDependencyTree) -> HashMap<Task, HashSet<Task>> { fn dependency_list( result: &mut HashMap<Task, HashSet<Task>>, dep: ResolvedDependencyTree, ) -> Task { let ResolvedDependencyTree { name, version, src_path, dependencies, } = dep; let task = Task { name, version, src_path, }; let dependencies = dependencies .into_iter() .map(|d| dependency_list(result, d)) .collect(); result.insert(task.clone(), dependencies); task } let mut result = HashMap::new(); for dependency in dependencies.dependencies { dependency_list(&mut result, dependency); } result } fn compile_dependencies( ws: &Workspace, dependencies: ResolvedDependencyTree, target_dir: &str, ) -> Result<BTreeSet<String>, Box<dyn Error>> { let mut wlib_paths = BTreeSet::new(); let dependen_list = dependency_list(dependencies); let dep_list = topological_sort(dependen_list.clone())?; for dep in dep_list.into_iter().flatten() { let dep_wlib_paths = dependen_list .get(&dep) .unwrap() .iter() .map(|d| format!("{}/{}.wlib", target_dir, d.name)) .collect::<Vec<_>>(); let mut args = vec![dep.src_path.as_str()]; args.extend(["--out-dir", target_dir]); args.extend(["--name", dep.name.as_str()]); args.extend(["--type", "lib"]); for wlib_path in dep_wlib_paths.iter() { args.extend(["--library", wlib_path]); } let output = super::subcommand::output("wizc", &args)?; println!("{}", String::from_utf8_lossy(&output.stdout)); if !output.stderr.is_empty() { eprintln!("{}", String::from_utf8_lossy(&output.stderr)); } if !output.status.success() { return Err(Box::new(CliError::from(format!( "compile failed {:?}", dep.name )))); } wlib_paths.extend(dep_wlib_paths); wlib_paths.insert(format!("{}/{}.wlib", target_dir, dep.name)); } Ok(wlib_paths) }
use crate::core::dep::{resolve_manifest_dependencies, ResolvedDependencyTree}; use crate::core::error::CliError; use crate::core::load_project; use crate::core::workspace::Workspace; use clap::ArgMatches; use std::collections::{BTreeSet, HashMap, HashSet}; use std::env; use std::error::Error; use std::fs::create_dir_all; use std::path::PathBuf; use wiz_utils::topological_sort::topological_sort; pub(crate) const COMMAND_NAME: &str = "build"; pub(crate) fn command(_: &str, options: &ArgMatches) -> Result<(), Box<dyn Error>> { let manifest_path = options.value_of("manifest-path"); let another_std = options.value_of("std"); let ws = load_project(manifest_path)?; let resolved_dependencies = resolve_manifest_dependencies(&ws.cws, &ws.get_manifest()?, another_std)?; println!("{:?}", resolved_dependencies); let target_dir = if let Some(target_dir) = options.value_of("target-dir") { let d = PathBuf::from(target_dir); if d.exists() && !d.is_dir() { return Err(Box::from(CliError::from(format!( "{} is not directory", d.display() )))); } else { d } } else { env::current_dir()?.join("target") }; create_dir_all(&target_dir)?; let wlib_paths = compile_dependencies(&ws, resolved_dependencies, target_dir.to_str().unwrap())?; let mut args = vec![ws.cws.to_str().unwrap()]; args.extend(["--out-dir", target_dir.to_str().unwrap()]); args.extend([ "--name", ws.cws.file_name().and_then(|p| p.to_str()).unwrap(), ]); args.extend(["--type", "bin"]); for wlib_path in wlib_paths.iter() { args.extend(["--library", wlib_path]); } if let Some(target_triple) = options.value_of("target-triple") { args.extend(["--target-triple", target_triple]); }; super::subcommand::execute("wizc", &args) } #[derive(Clone, Debug, Eq, PartialEq, Hash)] struct Task { name: String, version: String, src_path: String, } fn dependency_list(dependencies: ResolvedDependencyTree) -> HashMap<Task, HashSet<Task>> { fn dependency_list( result: &mut HashMap<Task, HashSet<T
let mut result = HashMap::new(); for dependency in dependencies.dependencies { dependency_list(&mut result, dependency); } result } fn compile_dependencies( ws: &Workspace, dependencies: ResolvedDependencyTree, target_dir: &str, ) -> Result<BTreeSet<String>, Box<dyn Error>> { let mut wlib_paths = BTreeSet::new(); let dependen_list = dependency_list(dependencies); let dep_list = topological_sort(dependen_list.clone())?; for dep in dep_list.into_iter().flatten() { let dep_wlib_paths = dependen_list .get(&dep) .unwrap() .iter() .map(|d| format!("{}/{}.wlib", target_dir, d.name)) .collect::<Vec<_>>(); let mut args = vec![dep.src_path.as_str()]; args.extend(["--out-dir", target_dir]); args.extend(["--name", dep.name.as_str()]); args.extend(["--type", "lib"]); for wlib_path in dep_wlib_paths.iter() { args.extend(["--library", wlib_path]); } let output = super::subcommand::output("wizc", &args)?; println!("{}", String::from_utf8_lossy(&output.stdout)); if !output.stderr.is_empty() { eprintln!("{}", String::from_utf8_lossy(&output.stderr)); } if !output.status.success() { return Err(Box::new(CliError::from(format!( "compile failed {:?}", dep.name )))); } wlib_paths.extend(dep_wlib_paths); wlib_paths.insert(format!("{}/{}.wlib", target_dir, dep.name)); } Ok(wlib_paths) }
ask>>, dep: ResolvedDependencyTree, ) -> Task { let ResolvedDependencyTree { name, version, src_path, dependencies, } = dep; let task = Task { name, version, src_path, }; let dependencies = dependencies .into_iter() .map(|d| dependency_list(result, d)) .collect(); result.insert(task.clone(), dependencies); task }
function_block-function_prefixed
[ { "content": "pub fn parse_from_string(src: &str, name: Option<&str>) -> Result<WizFile> {\n\n match file(Span::from(src)) {\n\n Ok((s, f)) => {\n\n if !s.is_empty() {\n\n let location = Location::new(s.location_offset(), s.location_line());\n\n Err(ParseError::from(get_error_location_src(src, &location)))\n\n } else {\n\n Ok(WizFile {\n\n name: name.unwrap_or_default().to_string(),\n\n syntax: f,\n\n })\n\n }\n\n }\n\n Err(_) => Err(ParseError::from(String::new())),\n\n }\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz.rs", "rank": 0, "score": 387062.35235361016 }, { "content": "pub fn read_package_from_path(path: &Path, name: Option<&str>) -> Result<SourceSet> {\n\n let dir = fs::read_dir(path)?;\n\n for item in dir {\n\n let dir_entry = item.unwrap();\n\n if let Some(\"src\") = dir_entry.file_name().to_str() {\n\n return Ok(SourceSet::Dir {\n\n name: name\n\n .or_else(|| path.file_name().and_then(|p| p.to_str()))\n\n .unwrap_or_default()\n\n .to_string(),\n\n items: match read_package_files(dir_entry.path().as_path())? {\n\n SourceSet::File(_) => unreachable!(),\n\n SourceSet::Dir { name: _, items } => items,\n\n },\n\n });\n\n }\n\n println!(\"{}\", dir_entry.path().to_str().unwrap());\n\n }\n\n Ok(SourceSet::Dir {\n\n name: path\n\n .file_name()\n\n .and_then(|p| p.to_str())\n\n .unwrap()\n\n .to_string(),\n\n items: vec![],\n\n })\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz.rs", "rank": 2, "score": 325700.73002048326 }, { "content": "fn position(name: &str) -> Arg {\n\n Arg::new(name)\n\n}\n\n\n", "file_path": "wiz/wizc_cli/src/lib.rs", "rank": 3, "score": 299559.10040680086 }, { "content": "fn long(name: &str) -> Arg {\n\n Arg::new(name).long(name)\n\n}\n\n\n", "file_path": "wiz/wizc_cli/src/lib.rs", "rank": 4, "score": 299559.10040680086 }, { "content": "fn short(name: &str, s: char) -> Arg {\n\n Arg::new(name).short(s)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::PathBuf;\n\n\n\n #[test]\n\n fn test_parse_arg() {\n\n let app = super::app(\"test\");\n\n let matches = app.get_matches_from(&[\"test\", \"main.wiz\"]);\n\n let config = super::Config::from(&matches);\n\n assert_eq!(config.input().to_path_buf(), PathBuf::from(\"main.wiz\"));\n\n }\n\n}\n", "file_path": "wiz/wizc_cli/src/lib.rs", "rank": 5, "score": 289293.8322826198 }, { "content": "fn check_raw_str(s: &str, expected_hashes: u16, expected_err: Option<RawStrError>) {\n\n let s = &format!(\"r{}\", s);\n\n let mut cursor = Cursor::new(s);\n\n cursor.bump();\n\n let (n_hashes, err) = cursor.raw_double_quoted_string(0);\n\n assert_eq!(n_hashes, expected_hashes);\n\n assert_eq!(err, expected_err);\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/tests.rs", "rank": 6, "score": 284970.8446405674 }, { "content": "fn run_compiler(session: &mut Session, config: Config) -> result::Result<(), Box<dyn Error>> {\n\n let output = config.output();\n\n let out_dir = config.out_dir();\n\n let paths = config.paths();\n\n let input = config.input();\n\n let out_dir = out_dir\n\n .map(PathBuf::from)\n\n .unwrap_or_else(|| env::current_dir().unwrap());\n\n let build_type = config.type_().unwrap_or(BuildType::Binary);\n\n\n\n let mlir_out_dir = out_dir.join(\"mlir\");\n\n\n\n let id_parse_files = \"parse files\";\n\n session.start(id_parse_files);\n\n\n\n let input_source = if input.is_dir() {\n\n read_package_from_path(input, config.name())?\n\n } else {\n\n SourceSet::File(parse_from_file_path(input)?)\n\n };\n", "file_path": "wiz/wizc/src/main.rs", "rank": 7, "score": 278114.90292822616 }, { "content": "fn get_error_location_src(src: &str, location: &Location) -> String {\n\n let line_offset = get_line_offset(src, location);\n\n let error_line = src\n\n .lines()\n\n .nth(location.line() as usize - 1)\n\n .unwrap_or_default();\n\n format!(\n\n \"{} | {}\\n{}^\",\n\n location.line(),\n\n error_line,\n\n \" \".repeat(location.line().to_string().len() + 3 + line_offset)\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_from_string() {\n\n let result = parse_from_string(\"unknown_token\", None);\n\n if let Err(e) = result {\n\n assert_eq!(e.to_string(), \"1 | unknown_token\\n ^\");\n\n } else {\n\n unreachable!();\n\n }\n\n }\n\n}\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz.rs", "rank": 8, "score": 238690.30440014607 }, { "content": "/// Takes a contents of a char literal (without quotes), and returns an\n\n/// unescaped char or an error\n\npub fn unescape_char(literal_text: &str) -> Result<char, (usize, EscapeError)> {\n\n let mut chars = literal_text.chars();\n\n unescape_char_or_byte(&mut chars, Mode::Char)\n\n .map_err(|err| (literal_text.len() - chars.as_str().len(), err))\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/unescape.rs", "rank": 9, "score": 233233.33874456555 }, { "content": "/// Takes a contents of a byte literal (without quotes), and returns an\n\n/// unescaped byte or an error.\n\npub fn unescape_byte(literal_text: &str) -> Result<u8, (usize, EscapeError)> {\n\n let mut chars = literal_text.chars();\n\n unescape_char_or_byte(&mut chars, Mode::Byte)\n\n .map(byte_from_char)\n\n .map_err(|err| (literal_text.len() - chars.as_str().len(), err))\n\n}\n\n\n\n/// What kind of literal do we parse.\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Mode {\n\n Char,\n\n Str,\n\n Byte,\n\n ByteStr,\n\n RawStr,\n\n RawByteStr,\n\n}\n\n\n\nimpl Mode {\n\n pub fn in_single_quotes(self) -> bool {\n", "file_path": "wiz/wiz_lexar/src/unescape.rs", "rank": 10, "score": 233233.33874456555 }, { "content": "fn main() -> result::Result<(), Box<dyn Error>> {\n\n println!(\"{:?}\", env::args());\n\n let app = wizc_cli::app(\"wizc\");\n\n let matches = app.get_matches();\n\n let config = Config::from(&matches);\n\n\n\n let mut session = Session::new();\n\n session.timer(\"compile\", |s| run_compiler(s, config))\n\n}\n\n\n", "file_path": "wiz/wizc/src/main.rs", "rank": 11, "score": 229157.63140649904 }, { "content": "fn unescape_char_or_byte(chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> {\n\n let first_char = chars.next().ok_or(EscapeError::ZeroChars)?;\n\n let res = scan_escape(first_char, chars, mode)?;\n\n if chars.next().is_some() {\n\n return Err(EscapeError::MoreThanOneChar);\n\n }\n\n Ok(res)\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/unescape.rs", "rank": 12, "score": 227225.4248542469 }, { "content": "fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> {\n\n if first_char != '\\\\' {\n\n // Previous character was not a slash, and we don't expect it to be\n\n // an escape-only character.\n\n return match first_char {\n\n '\\t' | '\\n' => Err(EscapeError::EscapeOnlyChar),\n\n '\\r' => Err(EscapeError::BareCarriageReturn),\n\n '\\'' if mode.in_single_quotes() => Err(EscapeError::EscapeOnlyChar),\n\n '\"' if mode.in_double_quotes() => Err(EscapeError::EscapeOnlyChar),\n\n _ => {\n\n if mode.is_bytes() && !first_char.is_ascii() {\n\n // Byte literal can't be a non-ascii character.\n\n return Err(EscapeError::NonAsciiCharInByte);\n\n }\n\n Ok(first_char)\n\n }\n\n };\n\n }\n\n\n\n // Previous character is '\\\\', try to unescape it.\n", "file_path": "wiz/wiz_lexar/src/unescape.rs", "rank": 14, "score": 219020.5186688563 }, { "content": "fn cli() -> Result<(), Box<dyn Error>> {\n\n let app = Command::new(\"wiz\")\n\n .version(crate_version!())\n\n .about(\"Wiz's package manager\")\n\n .arg_required_else_help(true)\n\n .allow_external_subcommands(true)\n\n .subcommand(\n\n Command::new(new::COMMAND_NAME)\n\n .about(\"Create a new wiz package at <path>\")\n\n .arg(Arg::new(\"path\").required(true)),\n\n )\n\n .subcommand(\n\n Command::new(init::COMMAND_NAME)\n\n .about(\"Create a new wiz package in an current directory\")\n\n .arg(\n\n Arg::new(\"overwrite\")\n\n .long(\"overwrite\")\n\n .help(\"Overwrite files for target Directory\"),\n\n ),\n\n )\n", "file_path": "wiz/wiz/src/main.rs", "rank": 15, "score": 215311.7177031958 }, { "content": "/// The passed string is lexically an identifier.\n\npub fn is_ident(string: &str) -> bool {\n\n let mut chars = string.chars();\n\n if let Some(start) = chars.next() {\n\n is_id_start(start) && chars.all(is_id_continue)\n\n } else {\n\n false\n\n }\n\n}\n\n\n\nimpl Cursor<'_> {\n\n /// Parses a token from the input string.\n\n fn advance_token(&mut self) -> Token {\n\n let first_char = self.bump().unwrap();\n\n let token_kind = match first_char {\n\n // Slash, comment or block comment.\n\n '/' => match self.first() {\n\n '/' => self.line_comment(),\n\n '*' => self.block_comment(),\n\n _ => TokenKind::Slash,\n\n },\n", "file_path": "wiz/wiz_lexar/src/lib.rs", "rank": 16, "score": 213649.87695319977 }, { "content": "pub fn app(name: &str) -> Command {\n\n Command::new(name)\n\n .arg(position(\"input\").required(true))\n\n .arg(long(\"name\").takes_value(true))\n\n .arg(\n\n long(\"type\")\n\n .takes_value(true)\n\n .possible_values(BuildType::all_str()),\n\n )\n\n .arg(short(\"output\", 'o').takes_value(true))\n\n .arg(long(\"out-dir\").takes_value(true))\n\n .arg(long(\"target-triple\").takes_value(true))\n\n .arg(\n\n short(\"path\", 'p')\n\n .takes_value(true)\n\n .multiple_occurrences(true),\n\n )\n\n .arg(short(\"L\", 'L').takes_value(true).multiple_occurrences(true))\n\n .arg(long(\"library\").takes_value(true).multiple_occurrences(true))\n\n .arg(\n\n long(\"emit\")\n\n .takes_value(true)\n\n .possible_values(&[\"llvm-ir\", \"object\"]),\n\n )\n\n}\n\n\n", "file_path": "wiz/wizc_cli/src/lib.rs", "rank": 17, "score": 213612.69774306967 }, { "content": "fn check_lexing(src: &str, expect: String) {\n\n let actual: String = tokenize(src)\n\n .map(|token| format!(\"{:?}\\n\", token))\n\n .collect();\n\n assert_eq!(actual, expect)\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/tests.rs", "rank": 18, "score": 211489.86711942081 }, { "content": "pub fn parse_from_file_path_str(path: &str) -> Result<WizFile> {\n\n let p = Path::new(path);\n\n parse_from_file_path(p)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz.rs", "rank": 19, "score": 207907.7897929278 }, { "content": "/// Takes a contents of a string literal (without quotes) and produces a\n\n/// sequence of escaped characters or errors.\n\nfn unescape_str_or_byte_str<F>(src: &str, mode: Mode, callback: &mut F)\n\nwhere\n\n F: FnMut(Range<usize>, Result<char, EscapeError>),\n\n{\n\n assert!(mode.in_double_quotes());\n\n let initial_len = src.len();\n\n let mut chars = src.chars();\n\n while let Some(first_char) = chars.next() {\n\n let start = initial_len - chars.as_str().len() - first_char.len_utf8();\n\n\n\n let unescaped_char = match first_char {\n\n '\\\\' => {\n\n let second_char = chars.clone().next();\n\n match second_char {\n\n Some('\\n') => {\n\n skip_ascii_whitespace(&mut chars, start, callback);\n\n continue;\n\n }\n\n _ => scan_escape(first_char, &mut chars, mode),\n\n }\n", "file_path": "wiz/wiz_lexar/src/unescape.rs", "rank": 20, "score": 207708.48854347802 }, { "content": "/// `wizc` allows files to have a shebang, e.g. \"#!/usr/bin/wizrun\",\n\n/// but shebang isn't a part of wiz syntax.\n\npub fn strip_shebang(input: &str) -> Option<usize> {\n\n // Shebang must start with `#!` literally, without any preceding whitespace.\n\n // For simplicity we consider any line starting with `#!` a shebang,\n\n // regardless of restrictions put on shebangs by specific platforms.\n\n if let Some(input_tail) = input.strip_prefix(\"#!\") {\n\n // Ok, this is a shebang but if the next non-whitespace token is `[`,\n\n // then it may be valid Wiz code, so consider it Wiz code.\n\n let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok| {\n\n !matches!(\n\n tok,\n\n TokenKind::Whitespace\n\n | TokenKind::LineComment { doc_style: None }\n\n | TokenKind::BlockComment {\n\n doc_style: None,\n\n ..\n\n }\n\n )\n\n });\n\n if next_non_whitespace_token != Some(TokenKind::OpenBracket) {\n\n // No other choice than to consider this a shebang.\n\n return Some(2 + input_tail.lines().next().unwrap_or_default().len());\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/lib.rs", "rank": 21, "score": 207149.84247946815 }, { "content": "pub fn function_value_name<I>(s: I) -> IResult<I, String>\n\nwhere\n\n I: Slice<RangeFrom<usize>> + InputIter + InputTake + InputLength + Clone,\n\n <I as InputIter>::Item: AsChar,\n\n{\n\n identifier(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 22, "score": 205835.2205769866 }, { "content": "/// Takes a contents of a string literal (without quotes) and produces a\n\n/// sequence of characters or errors.\n\n/// NOTE: Raw strings do not perform any explicit character escaping, here we\n\n/// only translate CRLF to LF and produce errors on bare CR.\n\nfn unescape_raw_str_or_byte_str<F>(literal_text: &str, mode: Mode, callback: &mut F)\n\nwhere\n\n F: FnMut(Range<usize>, Result<char, EscapeError>),\n\n{\n\n assert!(mode.in_double_quotes());\n\n let initial_len = literal_text.len();\n\n\n\n let mut chars = literal_text.chars();\n\n while let Some(curr) = chars.next() {\n\n let start = initial_len - chars.as_str().len() - curr.len_utf8();\n\n\n\n let result = match curr {\n\n '\\r' => Err(EscapeError::BareCarriageReturnInRawString),\n\n c if mode.is_bytes() && !c.is_ascii() => Err(EscapeError::NonAsciiCharInByteString),\n\n c => Ok(c),\n\n };\n\n let end = initial_len - chars.as_str().len();\n\n\n\n callback(start..end, result);\n\n }\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/unescape.rs", "rank": 23, "score": 204659.99280293108 }, { "content": "pub trait Syntax: Debug + Eq + PartialEq + Clone {\n\n fn with_leading_trivia(self, trivia: Trivia) -> Self;\n\n fn with_trailing_trivia(self, trivia: Trivia) -> Self;\n\n fn span(&self) -> Location {\n\n Location::default()\n\n }\n\n fn id(&self) -> NodeId {\n\n NodeId::DUMMY\n\n }\n\n}\n", "file_path": "wiz/wiz_syntax/src/syntax.rs", "rank": 24, "score": 199448.64686955302 }, { "content": "pub fn read(path: &Path) -> Result<Manifest, Box<dyn Error>> {\n\n let file = std::fs::read_to_string(path)?;\n\n let manifest = toml::from_str(&file)?;\n\n Ok(manifest)\n\n}\n\n\n", "file_path": "wiz/wiz/src/core/manifest.rs", "rank": 25, "score": 195779.44941107306 }, { "content": "/// Creates an iterator that produces tokens from the input string.\n\npub fn tokenize(mut input: &str) -> impl Iterator<Item = Token> + '_ {\n\n std::iter::from_fn(move || {\n\n if input.is_empty() {\n\n return None;\n\n }\n\n let token = first_token(input);\n\n input = &input[token.len..];\n\n Some(token)\n\n })\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/lib.rs", "rank": 26, "score": 194437.57204879884 }, { "content": "pub fn write(path: &Path, manifest: &Manifest) -> Result<(), Box<dyn Error>> {\n\n let file = toml::to_string(manifest)?;\n\n std::fs::write(path, file)?;\n\n Ok(())\n\n}\n", "file_path": "wiz/wiz/src/core/manifest.rs", "rank": 27, "score": 191910.22904527435 }, { "content": "/// Takes a contents of a literal (without quotes) and produces a\n\n/// sequence of escaped characters or errors.\n\n/// Values are returned through invoking of the provided callback.\n\npub fn unescape_literal<F>(literal_text: &str, mode: Mode, callback: &mut F)\n\nwhere\n\n F: FnMut(Range<usize>, Result<char, EscapeError>),\n\n{\n\n match mode {\n\n Mode::Char | Mode::Byte => {\n\n let mut chars = literal_text.chars();\n\n let result = unescape_char_or_byte(&mut chars, mode);\n\n // The Chars iterator moved forward.\n\n callback(0..(literal_text.len() - chars.as_str().len()), result);\n\n }\n\n Mode::Str | Mode::ByteStr => unescape_str_or_byte_str(literal_text, mode, callback),\n\n // NOTE: Raw strings do not perform any explicit character escaping, here we\n\n // only translate CRLF to LF and produce errors on bare CR.\n\n Mode::RawStr | Mode::RawByteStr => {\n\n unescape_raw_str_or_byte_str(literal_text, mode, callback)\n\n }\n\n }\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/unescape.rs", "rank": 28, "score": 188937.42086962127 }, { "content": "/// Takes a contents of a byte, byte string or raw byte string (without quotes)\n\n/// and produces a sequence of bytes or errors.\n\n/// Values are returned through invoking of the provided callback.\n\npub fn unescape_byte_literal<F>(literal_text: &str, mode: Mode, callback: &mut F)\n\nwhere\n\n F: FnMut(Range<usize>, Result<u8, EscapeError>),\n\n{\n\n assert!(mode.is_bytes());\n\n unescape_literal(literal_text, mode, &mut |range, result| {\n\n callback(range, result.map(byte_from_char));\n\n })\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/unescape.rs", "rank": 29, "score": 187217.21513769682 }, { "content": "fn get_executable_path<P: AsRef<Path>>(executable: P) -> Result<PathBuf, Box<dyn Error>> {\n\n let mut path = env::current_exe()?;\n\n path.pop();\n\n path.push(&executable);\n\n if !path.exists() {\n\n return Err(Box::new(CliError::from(format!(\n\n \"command `{}` could not find\",\n\n executable.as_ref().display()\n\n ))));\n\n }\n\n Ok(path)\n\n}\n\n\n\npub(crate) fn execute(executable: &str, args: &[&str]) -> Result<(), Box<dyn Error>> {\n\n let executable_path = get_executable_path(executable)?;\n\n let mut command = Command::new(executable_path);\n\n command.args(args);\n\n let err = command.exec();\n\n let error = anyhow::Error::from(err).context(ProcessError::new(None));\n\n if let Some(perr) = error.downcast_ref::<ProcessError>() {\n", "file_path": "wiz/wiz/src/subcommand.rs", "rank": 30, "score": 184722.6981798173 }, { "content": "pub fn identifier<I>(s: I) -> IResult<I, String>\n\nwhere\n\n I: Slice<RangeFrom<usize>> + InputIter + InputTake + InputLength + Clone,\n\n <I as InputIter>::Item: AsChar,\n\n{\n\n alt((\n\n map(\n\n tuple((\n\n map(backticks, |r| r.to_string()),\n\n map(identifier_head, |r| r.to_string()),\n\n identifier_characters,\n\n map(backticks, |r| r.to_string()),\n\n )),\n\n |(a, b, c, d)| a + &*b + &*c + &*d,\n\n ),\n\n map(tuple((identifier_head, identifier_characters)), |(a, b)| {\n\n a.to_string() + &*b\n\n }),\n\n ))(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 31, "score": 167180.7887096532 }, { "content": "fn _block_comment<I>(input: I) -> IResult<I, String>\n\nwhere\n\n I: InputTake + FindSubstring<&'static str> + Compare<&'static str> + ToString + Clone,\n\n{\n\n map(\n\n permutation((\n\n block_comment_start,\n\n take_until_block_comment_end,\n\n block_comment_end,\n\n )),\n\n |(a, b, c): (I, I, I)| a.to_string() + &*b.to_string() + &*c.to_string(),\n\n )(input)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 32, "score": 166531.92548856582 }, { "content": "fn _line_comment<I>(input: I) -> IResult<I, String>\n\nwhere\n\n I: InputTake\n\n + Compare<&'static str>\n\n + Clone\n\n + InputLength\n\n + InputIter\n\n + Slice<RangeFrom<usize>>\n\n + ToString,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((line_comment_start, many0(none_of_newline), opt(newline))),\n\n |(s, c, e): (I, _, _)| {\n\n s.to_string() + &*String::from_iter(c) + &*e.map(|c| c.to_string()).unwrap_or_default()\n\n },\n\n )(input)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 33, "score": 166531.92548856582 }, { "content": "pub fn identifier_characters<I>(s: I) -> IResult<I, String>\n\nwhere\n\n I: Slice<RangeFrom<usize>> + InputIter + InputTake + InputLength + Clone,\n\n <I as InputIter>::Item: AsChar,\n\n{\n\n map(many0(identifier_character), String::from_iter)(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 34, "score": 165241.10664259162 }, { "content": "pub fn function_value_label<I>(s: I) -> IResult<I, String>\n\nwhere\n\n I: Slice<RangeFrom<usize>> + InputIter + InputTake + InputLength + Clone,\n\n <I as InputIter>::Item: AsChar,\n\n{\n\n identifier(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 35, "score": 165241.10664259162 }, { "content": "fn _doc_block_comment<I>(input: I) -> IResult<I, String>\n\nwhere\n\n I: InputTake + FindSubstring<&'static str> + Compare<&'static str> + ToString + Clone,\n\n{\n\n map(\n\n permutation((\n\n doc_block_comment_start,\n\n take_until_block_comment_end,\n\n block_comment_end,\n\n )),\n\n |(a, b, c): (I, I, I)| a.to_string() + &*b.to_string() + &*c.to_string(),\n\n )(input)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 36, "score": 164653.03512867005 }, { "content": "fn _doc_line_comment<I>(input: I) -> IResult<I, String>\n\nwhere\n\n I: InputTake\n\n + Compare<&'static str>\n\n + Clone\n\n + InputLength\n\n + InputIter\n\n + Slice<RangeFrom<usize>>\n\n + ToString,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((doc_line_comment_start, many0(none_of_newline), opt(newline))),\n\n |(s, c, e): (I, _, _)| {\n\n s.to_string() + &*String::from_iter(c) + &*e.map(|c| c.to_string()).unwrap_or_default()\n\n },\n\n )(input)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 37, "score": 164653.03512867005 }, { "content": "fn arg_std() -> Arg<'static> {\n\n Arg::new(\"std\")\n\n .long(\"std\")\n\n .takes_value(true)\n\n .help(\"Use another std library\")\n\n}\n\n\n", "file_path": "wiz/wiz/src/main.rs", "rank": 38, "score": 151314.76441658504 }, { "content": "fn arg_manifest_path() -> Arg<'static> {\n\n Arg::new(\"manifest-path\")\n\n .long(\"manifest-path\")\n\n .takes_value(true)\n\n .help(\"Path to the manifest file\")\n\n}\n\n\n", "file_path": "wiz/wiz/src/main.rs", "rank": 39, "score": 149975.69986419406 }, { "content": "fn arg_target_triple() -> Arg<'static> {\n\n Arg::new(\"target-triple\")\n\n .long(\"target-triple\")\n\n .takes_value(true)\n\n .help(\"Build target platform\")\n\n}\n\n\n", "file_path": "wiz/wiz/src/main.rs", "rank": 40, "score": 149975.69986419406 }, { "content": "pub fn name_space<I>(s: I) -> IResult<I, NameSpaceSyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar,\n\n{\n\n map(many1(name_space_element), |elements| NameSpaceSyntax {\n\n leading_trivia: Default::default(),\n\n elements,\n\n trailing_trivia: Default::default(),\n\n })(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/name_space.rs", "rank": 41, "score": 145324.93475139193 }, { "content": "// <use> ::= \"use\" <package_name> (\"as\" <identifier>)?\n\npub fn use_syntax<I>(s: I) -> IResult<I, UseSyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((\n\n use_keyword,\n\n whitespace1,\n\n opt(package_name),\n\n alt((identifier, map(tag(\"*\"), |i: I| i.to_string()))),\n\n opt(tuple((whitespace1, as_keyword, whitespace1, identifier))),\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 42, "score": 143707.92259701746 }, { "content": "// <struct_decl> ::= \"struct\" <identifier> <type_parameters>? <struct_body>\n\npub fn struct_syntax<I>(s: I) -> IResult<I, StructSyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(\n\n tuple((\n\n alt((struct_keyword, protocol_keyword)),\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 43, "score": 143637.81770630702 }, { "content": "// <package_name> ::= (<identifier> \"::\")*\n\npub fn package_name<I>(s: I) -> IResult<I, PackageName>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar,\n\n{\n\n map(many1(tuple((identifier, token(\"::\")))), |i| PackageName {\n\n names: i\n\n .into_iter()\n\n .map(|(name, sep)| PackageNameElement {\n\n name: TokenSyntax::from(name),\n\n sep,\n\n })\n\n .collect(),\n\n })(s)\n\n}\n\n\n\n//endregion\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 44, "score": 143472.85487070587 }, { "content": "pub fn name_space_element<I>(s: I) -> IResult<I, NameSpaceElementSyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar,\n\n{\n\n map(tuple((identifier, token(\"::\"))), |(i, separator)| {\n\n NameSpaceElementSyntax {\n\n name: TokenSyntax::from(i),\n\n separator,\n\n }\n\n })(s)\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/name_space.rs", "rank": 45, "score": 142222.82036377263 }, { "content": "// <struct_property> ::= <stored_property>\n\n// | <deinitializer>\n\n// | <member_function>\n\npub fn struct_property<I>(s: I) -> IResult<I, StructPropertySyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n alt((stored_property, deinitializer, member_function))(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 46, "score": 141877.42101340744 }, { "content": "// <arg_label> ::= <identifier> \":\"\n\npub fn arg_label_syntax<I>(s: I) -> IResult<I, ArgLabelSyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(\n\n tuple((identifier, whitespace0, token(\":\"))),\n\n |(label, ws, colon)| ArgLabelSyntax {\n\n label: TokenSyntax::from(label),\n\n colon: colon.with_leading_trivia(ws),\n\n },\n\n )(s)\n\n}\n\n\n\n/*\n\n<annotated_lambda> ::= <label>? <lambda_literal>\n\n*/\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/expression.rs", "rank": 47, "score": 140172.78816607667 }, { "content": "// <struct_body> ::= \"{\" <struct_properties> \"}\"\n\n// <struct_properties> ::= (<struct_property> (\"\\n\" <struct_property>)* \"\\n\"?)?\n\npub fn struct_body_syntax<I>(s: I) -> IResult<I, StructBodySyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(\n\n tuple((\n\n token(\"{\"),\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 48, "score": 140168.16131748687 }, { "content": "pub fn use_keyword<I>(s: I) -> IResult<I, I>\n\nwhere\n\n I: InputTake + Compare<&'static str>,\n\n{\n\n tag(\"use\")(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/keywords.rs", "rank": 49, "score": 136583.48644773304 }, { "content": "fn get_builtin_lib() -> &'static [&'static str] {\n\n &[\"core\", \"std\"]\n\n}\n\n\n", "file_path": "wiz/wizc/src/main.rs", "rank": 50, "score": 136070.58017302913 }, { "content": "pub fn return_expr<I>(s: I) -> IResult<I, Expr>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(\n\n tuple((return_keyword, opt(tuple((whitespace1, expr))))),\n\n |(r, e)| {\n\n Expr::Return(ReturnSyntax {\n\n return_keyword: r,\n\n value: e.map(|(ws, e)| Box::new(e.with_leading_trivia(ws))),\n\n })\n\n },\n\n )(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/expression.rs", "rank": 51, "score": 134541.00689898248 }, { "content": "pub fn token<T, Input, Error: ParseError<Input>>(\n\n tkn: T,\n\n) -> impl FnMut(Input) -> IResult<Input, TokenSyntax, Error>\n\nwhere\n\n Input: InputTake + Compare<T> + ToString,\n\n T: InputLength + Clone,\n\n{\n\n map(tag(tkn), TokenSyntax::from)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 52, "score": 134462.39498458098 }, { "content": "pub fn type_<I>(s: I) -> IResult<I, TypeName>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n alt((\n\n parenthesized_type,\n\n map(decorated_type, |t| TypeName::Decorated(Box::new(t))),\n\n type_reference,\n\n map(array_type_syntax, |a| TypeName::Array(Box::new(a))),\n\n // function_type,\n\n ))(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/type_.rs", "rank": 53, "score": 134395.75661352623 }, { "content": "pub fn name_expr<I>(s: I) -> IResult<I, Expr>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + Compare<&'static str>\n\n + Slice<Range<usize>>\n\n + FindSubstring<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((opt(name_space), identifier, opt(type_arguments))),\n\n |(name_space, name, type_arguments)| {\n\n Expr::Name(NameExprSyntax {\n\n name_space,\n\n name: TokenSyntax::from(name),\n\n type_arguments,\n\n })\n\n },\n\n )(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/expression.rs", "rank": 54, "score": 134395.75661352623 }, { "content": "/// Parses the first token from the provided input string.\n\npub fn first_token(input: &str) -> Token {\n\n debug_assert!(!input.is_empty());\n\n Cursor::new(input).advance_token()\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/lib.rs", "rank": 55, "score": 133118.08254346915 }, { "content": "//region use\n\npub fn use_decl<I>(s: I) -> IResult<I, DeclKind>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(use_syntax, DeclKind::Use)(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 56, "score": 132643.84708809358 }, { "content": "pub fn carriage_return<I>(s: I) -> IResult<I, TokenSyntax>\n\nwhere\n\n I: InputTake + Compare<&'static str> + ToString,\n\n{\n\n token(\"\\r\")(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/character.rs", "rank": 57, "score": 132601.1723220546 }, { "content": "pub fn return_keyword<I>(s: I) -> IResult<I, TokenSyntax>\n\nwhere\n\n I: InputTake + Compare<&'static str> + ToString,\n\n{\n\n token(\"return\")(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/keywords.rs", "rank": 58, "score": 132601.1723220546 }, { "content": "pub fn string_literal<I>(s: I) -> IResult<I, LiteralSyntax>\n\nwhere\n\n I: Clone\n\n + Offset\n\n + InputLength\n\n + InputTake\n\n + InputTakeAtPosition\n\n + Slice<RangeFrom<usize>>\n\n + InputIter\n\n + ToString\n\n + ExtendInto<Item = char, Extender = String>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((\n\n double_quote,\n\n opt(escaped_transform(\n\n not_double_quote_or_back_slash,\n\n '\\\\',\n\n alt((\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/expression.rs", "rank": 59, "score": 132591.74507182842 }, { "content": "pub fn value_argument<I>(s: I) -> IResult<I, CallArg>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(\n\n tuple((opt(arg_label_syntax), whitespace0, opt(token(\"*\")), expr)),\n\n |(arg_label, ws, is_vararg, arg)| match is_vararg {\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/expression.rs", "rank": 60, "score": 132591.74507182842 }, { "content": "pub fn struct_decl<I>(s: I) -> IResult<I, DeclKind>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(struct_syntax, DeclKind::Struct)(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 61, "score": 132587.02663787818 }, { "content": "pub fn struct_keyword<I>(s: I) -> IResult<I, TokenSyntax>\n\nwhere\n\n I: InputTake + Compare<&'static str> + ToString,\n\n{\n\n token(\"struct\")(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/keywords.rs", "rank": 62, "score": 132587.02663787818 }, { "content": "// <deinitializer> =:: \"deinit\" <function_body>\n\npub fn deinitializer<I>(s: I) -> IResult<I, StructPropertySyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(\n\n tuple((deinit_keyword, whitespace0, function_body)),\n\n |(deinit, ws, body)| {\n\n StructPropertySyntax::Deinit(DeinitializerSyntax {\n\n deinit_keyword: deinit,\n\n body: body.with_leading_trivia(ws),\n\n })\n\n },\n\n )(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 63, "score": 132587.02663787818 }, { "content": "pub fn user_type<I>(s: I) -> IResult<I, TypeName>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((\n\n many0(tuple((simple_user_type, token(\"::\")))),\n\n simple_user_type,\n\n )),\n\n |(name_space, type_name)| {\n\n if name_space.is_empty() {\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/type_.rs", "rank": 64, "score": 132458.234422441 }, { "content": "pub fn parenthesized_type<I>(s: I) -> IResult<I, TypeName>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((token(\"(\"), whitespace0, type_, whitespace0, token(\")\"))),\n\n |(open_paren, ows, type_, cws, close_paren)| {\n\n TypeName::Parenthesized(ParenthesizedTypeName {\n\n open_paren,\n\n type_name: Box::new(type_.with_leading_trivia(ows)),\n\n close_paren: close_paren.with_trailing_trivia(cws),\n\n })\n\n },\n\n )(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/type_.rs", "rank": 65, "score": 132458.234422441 }, { "content": "pub fn type_reference<I>(s: I) -> IResult<I, TypeName>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n user_type(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/type_.rs", "rank": 66, "score": 132458.234422441 }, { "content": "pub fn carriage_returns<I>(s: I) -> IResult<I, TriviaPiece>\n\nwhere\n\n I: InputTake + InputLength + Compare<&'static str> + ToString + Clone,\n\n{\n\n map(many1(carriage_return), |l| {\n\n TriviaPiece::CarriageReturns(l.len() as i64)\n\n })(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 67, "score": 130722.13423211462 }, { "content": "// <function_value_parameter> ::= (<function_value_label> <function_value_name> \":\" <type> (\"=\" <expr>)?) | \"self\"\n\npub fn function_value_parameter<I>(s: I) -> IResult<I, ArgDef>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n alt((\n\n map(\n\n tuple((\n\n opt(tuple((function_value_label, whitespace1))),\n\n function_value_name,\n\n whitespace0,\n\n token(\":\"),\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 68, "score": 130717.0770163348 }, { "content": "pub fn raw_string_literal<I>(s: I) -> IResult<I, LiteralSyntax>\n\nwhere\n\n I: InputTake\n\n + Compare<&'static str>\n\n + Clone\n\n + FindSubstring<&'static str>\n\n + Slice<RangeFrom<usize>>\n\n + InputIter\n\n + ToString,\n\n <I as InputIter>::Item: AsChar,\n\n{\n\n map(\n\n permutation((char('r'), double_quote, take_until(\"\\\"\"), double_quote)),\n\n |(r, a, b, c): (_, _, I, _)| LiteralSyntax::String {\n\n open_quote: TokenSyntax::from(r.to_string() + &*a.to_string()),\n\n value: b.to_string(),\n\n close_quote: TokenSyntax::from(c),\n\n },\n\n )(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/expression.rs", "rank": 69, "score": 130712.85471193265 }, { "content": "// <member_function> =:: <modifiers>? \"fun\" <identifier> <type_parameters>? <function_value_parameters> (\":\" <type>)? <type_constraints>? <function_body>?\n\npub fn member_function<I>(s: I) -> IResult<I, StructPropertySyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(function_syntax, StructPropertySyntax::Method)(s)\n\n}\n\n\n\n//endregion\n\n\n\n//region func\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 70, "score": 130708.21021836161 }, { "content": "// <stored_property> ::= (\"var\" | \"val\") <identifier> \":\" <type>\n\npub fn stored_property<I>(s: I) -> IResult<I, StructPropertySyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(stored_property_syntax, StructPropertySyntax::StoredProperty)(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 71, "score": 130708.21021836161 }, { "content": "pub fn decorated_type<I>(s: I) -> IResult<I, DecoratedTypeName>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((\n\n alt((token(\"*\"), ampersand)),\n\n alt((type_reference, parenthesized_type)),\n\n )),\n\n |(p, type_name)| DecoratedTypeName {\n\n decoration: p,\n\n type_: type_name,\n\n },\n\n )(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/type_.rs", "rank": 72, "score": 130581.4362457022 }, { "content": "fn check(source: &str, except: MLFile) {\n\n let ast = parse_from_string(source, Some(&except.name)).unwrap();\n\n\n\n let mut session = Session::new();\n\n\n\n let mut arena = ResolverArena::default();\n\n\n\n let mut ast2hlir = AstLowering::new(&mut session, &mut arena);\n\n\n\n let file = ast2hlir.file(ast);\n\n\n\n let mut resolver = TypeResolver::new(&mut session, &mut arena);\n\n let _ = resolver.preload_file(&file).unwrap();\n\n let hl_file = resolver.file(file).unwrap();\n\n\n\n let mut hlir2mlir = HLIR2MLIR::new(&mut arena);\n\n\n\n let f = hlir2mlir.convert_from_source_set(TypedSourceSet::File(hl_file));\n\n\n\n assert_eq!(f, except);\n\n}\n\n\n", "file_path": "wiz/wizc/src/middle_level_ir/tests.rs", "rank": 73, "score": 130396.05955947834 }, { "content": "pub fn value_arguments<I>(s: I) -> IResult<I, CallArgListSyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + Offset\n\n + InputTakeAtPosition\n\n + ExtendInto<Item = char, Extender = String>\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n <I as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n map(\n\n tuple((\n\n token(\"(\"),\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/expression.rs", "rank": 74, "score": 128891.94223776349 }, { "content": "pub fn simple_user_type<I>(s: I) -> IResult<I, SimpleTypeName>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + InputTake\n\n + InputLength\n\n + Clone\n\n + ToString\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(tuple((identifier, opt(type_arguments))), |(name, args)| {\n\n SimpleTypeName {\n\n name: TokenSyntax::from(name),\n\n type_args: args,\n\n }\n\n })(s)\n\n}\n\n\n\n// pub fn function_type(s: &str) -> IResult<&str, TypeName> {\n\n//\n\n// }\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/type_.rs", "rank": 75, "score": 128762.55139511873 }, { "content": "pub fn get_line_offset(s: &str, location: &Location) -> usize {\n\n let mut n = 1usize;\n\n let target_line = location.line() as usize;\n\n for (i, c) in s.char_indices() {\n\n if c == '\\n' {\n\n n += 1;\n\n continue;\n\n };\n\n if n == target_line {\n\n return location.offset() - i;\n\n };\n\n }\n\n return 0;\n\n}\n\n\n", "file_path": "wiz/wiz_span/src/lib.rs", "rank": 76, "score": 127598.71566044522 }, { "content": "fn read_package_files(path: &Path) -> Result<SourceSet> {\n\n Ok(if path.is_dir() {\n\n let dir = fs::read_dir(path)?;\n\n SourceSet::Dir {\n\n name: path\n\n .file_name()\n\n .and_then(|p| p.to_str())\n\n .unwrap()\n\n .to_string(),\n\n items: dir\n\n .into_iter()\n\n .map(|d| read_package_files(&*d.unwrap().path()))\n\n .collect::<Result<_>>()?,\n\n }\n\n } else {\n\n SourceSet::File(parse_from_file_path(path)?)\n\n })\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz.rs", "rank": 77, "score": 127289.80068241528 }, { "content": "pub fn carriage_return_line_feeds<I>(s: I) -> IResult<I, TriviaPiece>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + Clone\n\n + InputLength\n\n + InputIter\n\n + Compare<&'static str>,\n\n{\n\n map(many1(crlf), |l| {\n\n TriviaPiece::CarriageReturnLineFeeds(l.len() as i64)\n\n })(s)\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/lexical_structure.rs", "rank": 78, "score": 127135.36237242434 }, { "content": "pub fn function_value_parameters<I>(s: I) -> IResult<I, ArgDefListSyntax>\n\nwhere\n\n I: Slice<RangeFrom<usize>>\n\n + Slice<Range<usize>>\n\n + InputIter\n\n + Clone\n\n + InputLength\n\n + ToString\n\n + InputTake\n\n + FindSubstring<&'static str>\n\n + Compare<&'static str>,\n\n <I as InputIter>::Item: AsChar + Copy,\n\n{\n\n map(\n\n tuple((\n\n token(\"(\"),\n\n many0(tuple((\n\n whitespace0,\n\n function_value_parameter,\n\n whitespace0,\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz/declaration.rs", "rank": 79, "score": 127126.36484437884 }, { "content": "fn check(source: &str, typed_file: TypedFile) {\n\n let ast = parse_from_string(source, Some(&typed_file.name)).unwrap();\n\n\n\n let mut session = Session::new();\n\n\n\n let mut arena = ResolverArena::default();\n\n\n\n let mut ast2hlir = AstLowering::new(&mut session, &mut arena);\n\n\n\n let file = ast2hlir.file(ast);\n\n\n\n let mut resolver = TypeResolver::new(&mut session, &mut arena);\n\n let _ = resolver.preload_file(&file).unwrap();\n\n let f = resolver.file(file);\n\n\n\n assert_eq!(f, Ok(typed_file));\n\n}\n\n\n", "file_path": "wiz/wizc/src/high_level_ir/type_resolver/tests.rs", "rank": 80, "score": 126634.55662489342 }, { "content": "pub fn parse_from_file_path(path: &Path) -> Result<WizFile> {\n\n let s = read_to_string(path)?;\n\n parse_from_string(&*s, path.as_os_str().to_str())\n\n}\n\n\n", "file_path": "wiz/wiz_syntax_parser/src/parser/wiz.rs", "rank": 81, "score": 124641.06430409 }, { "content": "pub fn check<'a, T, P>(source: &'a str, parser: P, excepted: T)\n\nwhere\n\n P: Fn(Span<'a>) -> IResult<Span<'a>, T>,\n\n T: Debug + PartialEq,\n\n IResult<Span<'a>, T>: Debug,\n\n{\n\n let result = parser(Span::<'a>::from(source));\n\n if let Ok((_, actual)) = result {\n\n assert_eq!(actual, excepted);\n\n } else {\n\n panic!(\"{:?}\", result)\n\n }\n\n}\n", "file_path": "wiz/wiz_syntax_parser/src/parser/tests.rs", "rank": 82, "score": 116886.21293256394 }, { "content": "#[test]\n\nfn raw_string() {\n\n check_lexing(\n\n \"r###\\\"\\\"#a\\\\b\\x00c\\\"\\\"###\",\n\n r#\"\n\n Token { kind: Literal { kind: RawStr { n_hashes: 3, err: None }, suffix_start: 17 }, len: 17 }\n\n \"#.trim_indent(),\n\n )\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/tests.rs", "rank": 84, "score": 105808.65952269643 }, { "content": "#[test]\n\nfn test_return_integer() {\n\n type MainFunc = unsafe extern \"C\" fn() -> u8;\n\n let mlfile = MLFile {\n\n name: \"name\".to_string(),\n\n body: vec![MLDecl::Fun(MLFun {\n\n name: \"test\".to_string(),\n\n arg_defs: vec![],\n\n return_type: MLValueType::Primitive(MLPrimitiveType::UInt8),\n\n body: Some(MLFunBody {\n\n body: vec![\n\n MLStmt::Var(MLVar {\n\n is_mute: false,\n\n name: \"i\".to_string(),\n\n type_: MLType::Value(MLValueType::Primitive(MLPrimitiveType::UInt8)),\n\n value: MLExpr::Literal(MLLiteral {\n\n kind: MLLiteralKind::Integer(\"5\".to_string()),\n\n type_: MLValueType::Primitive(MLPrimitiveType::UInt8),\n\n }),\n\n }),\n\n MLStmt::Expr(MLExpr::Return(MLReturn {\n", "file_path": "wiz/wizc/src/llvm_ir/tests.rs", "rank": 85, "score": 102703.18340195266 }, { "content": "#[test]\n\nfn test_struct() {\n\n let source = r\"\n\n struct A {\n\n val a: Int64\n\n }\n\n \";\n\n\n\n check(\n\n source,\n\n MLFile {\n\n name: \"test\".to_string(),\n\n body: vec![MLDecl::Struct(MLStruct {\n\n name: \"test::A\".to_string(),\n\n fields: vec![MLField {\n\n name: \"a\".to_string(),\n\n type_: MLValueType::Primitive(MLPrimitiveType::Int64),\n\n }],\n\n })],\n\n },\n\n );\n\n}\n\n\n", "file_path": "wiz/wizc/src/middle_level_ir/tests.rs", "rank": 86, "score": 102687.81433973707 }, { "content": "#[test]\n\nfn test_naked_raw_str() {\n\n check_raw_str(r#\"\"abc\"\"#, 0, None);\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/tests.rs", "rank": 87, "score": 102631.1696443833 }, { "content": "#[test]\n\nfn test_return_global_constant() {\n\n type MainFunc = unsafe extern \"C\" fn() -> u8;\n\n let mlfile = MLFile {\n\n name: \"name\".to_string(),\n\n body: vec![\n\n MLDecl::Var(MLVar {\n\n is_mute: false,\n\n name: \"i\".to_string(),\n\n type_: MLType::Value(MLValueType::Primitive(MLPrimitiveType::UInt8)),\n\n value: MLExpr::Literal(MLLiteral {\n\n kind: MLLiteralKind::Integer(\"5\".to_string()),\n\n type_: MLValueType::Primitive(MLPrimitiveType::UInt8),\n\n }),\n\n }),\n\n MLDecl::Fun(MLFun {\n\n name: \"test\".to_string(),\n\n arg_defs: vec![],\n\n return_type: MLValueType::Primitive(MLPrimitiveType::UInt8),\n\n body: Some(MLFunBody {\n\n body: vec![MLStmt::Expr(MLExpr::Return(MLReturn {\n", "file_path": "wiz/wizc/src/llvm_ir/tests.rs", "rank": 88, "score": 101224.61356284571 }, { "content": "#[test]\n\nfn test_return_floating_point() {\n\n type MainFunc = unsafe extern \"C\" fn() -> f64;\n\n let mlfile = MLFile {\n\n name: \"name\".to_string(),\n\n body: vec![MLDecl::Fun(MLFun {\n\n name: \"test\".to_string(),\n\n arg_defs: vec![],\n\n return_type: MLValueType::Primitive(MLPrimitiveType::Double),\n\n body: Some(MLFunBody {\n\n body: vec![\n\n MLStmt::Var(MLVar {\n\n is_mute: false,\n\n name: \"d\".to_string(),\n\n type_: MLType::Value(MLValueType::Primitive(MLPrimitiveType::Double)),\n\n value: MLExpr::Literal(MLLiteral {\n\n kind: MLLiteralKind::FloatingPoint(\"5.1\".to_string()),\n\n type_: MLValueType::Primitive(MLPrimitiveType::Double),\n\n }),\n\n }),\n\n MLStmt::Expr(MLExpr::Return(MLReturn {\n", "file_path": "wiz/wizc/src/llvm_ir/tests.rs", "rank": 89, "score": 101224.61356284571 }, { "content": "#[test]\n\nfn test_return_integer_literal() {\n\n type MainFunc = unsafe extern \"C\" fn() -> u8;\n\n let mlfile = MLFile {\n\n name: \"name\".to_string(),\n\n body: vec![MLDecl::Fun(MLFun {\n\n name: \"test\".to_string(),\n\n arg_defs: vec![],\n\n return_type: MLValueType::Primitive(MLPrimitiveType::UInt8),\n\n body: Some(MLFunBody {\n\n body: vec![MLStmt::Expr(MLExpr::Return(MLReturn {\n\n value: Some(Box::new(MLExpr::Literal(MLLiteral {\n\n kind: MLLiteralKind::Integer(\"5\".to_string()),\n\n type_: MLValueType::Primitive(MLPrimitiveType::UInt8),\n\n }))),\n\n }))],\n\n }),\n\n })],\n\n };\n\n let module_name = &mlfile.name;\n\n let context = Context::create();\n", "file_path": "wiz/wizc/src/llvm_ir/tests.rs", "rank": 90, "score": 101224.61356284571 }, { "content": "#[test]\n\nfn test_struct_init() {\n\n let source = r\"\n\n struct A {\n\n val a: Int64\n\n }\n\n fun initA(): A {\n\n return A(a: 1)\n\n }\n\n \";\n\n\n\n check(\n\n source,\n\n MLFile {\n\n name: \"test\".to_string(),\n\n body: vec![\n\n MLDecl::Struct(MLStruct {\n\n name: \"test::A\".to_string(),\n\n fields: vec![MLField {\n\n name: \"a\".to_string(),\n\n type_: MLValueType::Primitive(MLPrimitiveType::Int64),\n", "file_path": "wiz/wizc/src/middle_level_ir/tests.rs", "rank": 91, "score": 101209.50581686516 }, { "content": "#[test]\n\nfn test_unescape_str_warn() {\n\n fn check(literal: &str, expected: &[(Range<usize>, Result<char, EscapeError>)]) {\n\n let mut unescaped = Vec::with_capacity(literal.len());\n\n unescape_literal(literal, Mode::Str, &mut |range, res| {\n\n unescaped.push((range, res))\n\n });\n\n assert_eq!(unescaped, expected);\n\n }\n\n\n\n // Check we can handle escaped newlines at the end of a file.\n\n check(\"\\\\\\n\", &[]);\n\n check(\"\\\\\\n \", &[]);\n\n\n\n check(\n\n \"\\\\\\n \\u{a0} x\",\n\n &[\n\n (0..5, Err(EscapeError::UnskippedWhitespaceWarning)),\n\n (3..5, Ok('\\u{a0}')),\n\n (5..6, Ok(' ')),\n\n (6..7, Ok('x')),\n", "file_path": "wiz/wiz_lexar/src/unescape/tests.rs", "rank": 92, "score": 101153.82423678259 }, { "content": "#[test]\n\nfn test_unescape_str_good() {\n\n fn check(literal_text: &str, expected: &str) {\n\n let mut buf = Ok(String::with_capacity(literal_text.len()));\n\n unescape_literal(literal_text, Mode::Str, &mut |range, c| {\n\n if let Ok(b) = &mut buf {\n\n match c {\n\n Ok(c) => b.push(c),\n\n Err(e) => buf = Err((range, e)),\n\n }\n\n }\n\n });\n\n let buf = buf.as_ref().map(|it| it.as_ref());\n\n assert_eq!(buf, Ok(expected))\n\n }\n\n\n\n check(\"foo\", \"foo\");\n\n check(\"\", \"\");\n\n check(\" \\t\\n\", \" \\t\\n\");\n\n\n\n check(\"hello \\\\\\n world\", \"hello world\");\n\n check(\"thread's\", \"thread's\")\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/unescape/tests.rs", "rank": 93, "score": 101153.82423678259 }, { "content": "#[test]\n\nfn test_unescape_raw_str() {\n\n fn check(literal: &str, expected: &[(Range<usize>, Result<char, EscapeError>)]) {\n\n let mut unescaped = Vec::with_capacity(literal.len());\n\n unescape_literal(literal, Mode::RawStr, &mut |range, res| {\n\n unescaped.push((range, res))\n\n });\n\n assert_eq!(unescaped, expected);\n\n }\n\n\n\n check(\n\n \"\\r\",\n\n &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))],\n\n );\n\n check(\n\n \"\\rx\",\n\n &[\n\n (0..1, Err(EscapeError::BareCarriageReturnInRawString)),\n\n (1..2, Ok('x')),\n\n ],\n\n );\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/unescape/tests.rs", "rank": 94, "score": 101153.82423678259 }, { "content": "#[test]\n\nfn test_return_integer_literal() {\n\n let source = r\"\n\n fun integer(): Int64 {\n\n return 1\n\n }\n\n \";\n\n\n\n check(\n\n source,\n\n MLFile {\n\n name: \"test\".to_string(),\n\n body: vec![\n\n MLDecl::Fun(MLFun {\n\n name: \"test::integer\".to_string(),\n\n arg_defs: vec![],\n\n return_type: MLValueType::Primitive(MLPrimitiveType::Int64),\n\n body: None,\n\n }),\n\n MLDecl::Fun(MLFun {\n\n name: \"test::integer\".to_string(),\n", "file_path": "wiz/wizc/src/middle_level_ir/tests.rs", "rank": 95, "score": 99795.4826166989 }, { "content": "#[test]\n\nfn test_return_floating_point_literal() {\n\n type MainFunc = unsafe extern \"C\" fn() -> f64;\n\n let mlfile = MLFile {\n\n name: \"name\".to_string(),\n\n body: vec![MLDecl::Fun(MLFun {\n\n name: \"test\".to_string(),\n\n arg_defs: vec![],\n\n return_type: MLValueType::Primitive(MLPrimitiveType::Double),\n\n body: Some(MLFunBody {\n\n body: vec![MLStmt::Expr(MLExpr::Return(MLReturn {\n\n value: Some(Box::new(MLExpr::Literal(MLLiteral {\n\n kind: MLLiteralKind::FloatingPoint(\"5.1\".to_string()),\n\n type_: MLValueType::Primitive(MLPrimitiveType::Double),\n\n }))),\n\n }))],\n\n }),\n\n })],\n\n };\n\n let module_name = &mlfile.name;\n\n let context = Context::create();\n", "file_path": "wiz/wizc/src/llvm_ir/tests.rs", "rank": 96, "score": 99795.4826166989 }, { "content": "#[test]\n\nfn test_unescape_raw_byte_str() {\n\n fn check(literal: &str, expected: &[(Range<usize>, Result<u8, EscapeError>)]) {\n\n let mut unescaped = Vec::with_capacity(literal.len());\n\n unescape_byte_literal(literal, Mode::RawByteStr, &mut |range, res| {\n\n unescaped.push((range, res))\n\n });\n\n assert_eq!(unescaped, expected);\n\n }\n\n\n\n check(\n\n \"\\r\",\n\n &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))],\n\n );\n\n check(\"🔮\", &[(0..4, Err(EscapeError::NonAsciiCharInByteString))]);\n\n check(\n\n \"🔮a\",\n\n &[\n\n (0..4, Err(EscapeError::NonAsciiCharInByteString)),\n\n (4..5, Ok(byte_from_char('a'))),\n\n ],\n\n );\n\n}\n", "file_path": "wiz/wiz_lexar/src/unescape/tests.rs", "rank": 97, "score": 99725.87678086443 }, { "content": "#[test]\n\nfn test_unescape_byte_str_good() {\n\n fn check(literal_text: &str, expected: &[u8]) {\n\n let mut buf = Ok(Vec::with_capacity(literal_text.len()));\n\n unescape_byte_literal(literal_text, Mode::ByteStr, &mut |range, c| {\n\n if let Ok(b) = &mut buf {\n\n match c {\n\n Ok(c) => b.push(c),\n\n Err(e) => buf = Err((range, e)),\n\n }\n\n }\n\n });\n\n let buf = buf.as_ref().map(|it| it.as_ref());\n\n assert_eq!(buf, Ok(expected))\n\n }\n\n\n\n check(\"foo\", b\"foo\");\n\n check(\"\", b\"\");\n\n check(\" \\t\\n\", b\" \\t\\n\");\n\n\n\n check(\"hello \\\\\\n world\", b\"hello world\");\n\n check(\"thread's\", b\"thread's\")\n\n}\n\n\n", "file_path": "wiz/wiz_lexar/src/unescape/tests.rs", "rank": 98, "score": 99725.87678086443 }, { "content": "#[test]\n\nfn test_struct_init() {\n\n let source = r\"\n\n struct A {\n\n val a: Int64\n\n }\n\n fun function(_ a: A) {\n\n val a = A(a:1)\n\n }\n\n \";\n\n\n\n check(\n\n source,\n\n TypedFile {\n\n name: \"test\".to_string(),\n\n uses: vec![],\n\n body: vec![\n\n TypedDecl {\n\n annotations: Default::default(),\n\n package: Package::from(&vec![\"test\"]),\n\n modifiers: vec![],\n", "file_path": "wiz/wizc/src/high_level_ir/type_resolver/tests.rs", "rank": 99, "score": 98398.74081209244 } ]
Rust
src/window.rs
TomasKralCZ/Leoric
df1c20319e4f0cc140dfbbce33a76dab0ac28188
use std::time::Instant; use egui::CtxRef; use egui_backend::{painter::Painter, DpiScaling, EguiStateHandler}; use egui_sdl2_gl::ShaderVersion; use eyre::{eyre, Result}; use sdl2::{ event::{Event, WindowEvent}, video::Window, video::{GLContext, GLProfile, SwapInterval}, EventPump, Sdl, VideoSubsystem, }; use egui_sdl2_gl as egui_backend; pub struct MyWindow { _sdl_context: Sdl, _video_subsystem: VideoSubsystem, window: Window, _gl_ctx: GLContext, pub event_pump: EventPump, pub egui_ctx: CtxRef, egui_state: EguiStateHandler, painter: Painter, start_time: Instant, pub width: u32, pub height: u32, } impl MyWindow { pub fn new(title: &str) -> Result<Self> { let sdl_context = sdl2::init().map_err(|e| eyre!("{e}"))?; let video_subsystem = sdl_context.video().map_err(|e| eyre!("{e}"))?; let size = video_subsystem .display_bounds(0) .map_err(|e| eyre!("{e}"))?; let width = (size.width() as f32 * 0.7) as u32; let height = (size.height() as f32 * 0.7) as u32; let window = video_subsystem .window(title, width, height) .opengl() .resizable() .position_centered() .allow_highdpi() .build()?; let gl_ctx = window.gl_create_context().map_err(|e| eyre!("{e}"))?; let gl_attr = video_subsystem.gl_attr(); gl_attr.set_context_major_version(4); gl_attr.set_context_minor_version(2); gl_attr.set_context_profile(GLProfile::Core); gl_attr.set_context_flags().debug().set(); gl_attr.set_double_buffer(true); window .subsystem() .gl_set_swap_interval(SwapInterval::Immediate) .map_err(|e| eyre!("{e}"))?; let shader_ver = ShaderVersion::Default; let custom_dpi = { if width <= 1280 && height <= 720 { 1.0 } else if width <= 1920 && height <= 1080 { 1.5 } else { 2.5 } }; let (painter, egui_state) = egui_backend::with_sdl2(&window, shader_ver, DpiScaling::Custom(custom_dpi)); let egui_ctx = egui::CtxRef::default(); let event_pump = sdl_context.event_pump().map_err(|e| eyre!("{e}"))?; Ok(Self { _sdl_context: sdl_context, _video_subsystem: video_subsystem, window, _gl_ctx: gl_ctx, event_pump, egui_ctx, egui_state, painter, start_time: Instant::now(), width, height, }) } pub fn begin_frame(&mut self) { self.egui_state.input.time = Some(self.start_time.elapsed().as_secs_f64()); self.egui_ctx.begin_frame(self.egui_state.input.take()); } pub fn end_frame(&mut self) -> bool { unsafe { gl::Disable(gl::DEPTH_TEST); gl::Disable(gl::CULL_FACE); gl::PolygonMode(gl::FRONT_AND_BACK, gl::FILL); } let (egui_output, paint_cmds) = self.egui_ctx.end_frame(); self.egui_state.process_output(&self.window, &egui_output); let paint_jobs = self.egui_ctx.tessellate(paint_cmds); if !egui_output.needs_repaint { /* if let Some(event) = self.event_pump.wait_event_timeout(5) { match event { Event::Quit { .. } => return true, _ => { self.egui_state .process_input(&self.window, event, &mut self.painter); } } } */ } else { self.painter .paint_jobs(None, paint_jobs, &self.egui_ctx.font_image()); self.window.gl_swap_window(); } for event in self.event_pump.poll_iter() { match event { Event::Quit { .. } => return true, Event::Window { timestamp: _, window_id: _, win_event: WindowEvent::Resized(new_width, new_height), } => { self.width = new_width as u32; self.height = new_height as u32; } _ => { self.egui_state .process_input(&self.window, event, &mut self.painter); } } } false } }
use std::time::Instant; use egui::CtxRef; use egui_backend::{painter::Painter, DpiScaling, EguiStateHandler}; use egui_sdl2_gl::ShaderVersion; use eyre::{eyre, Result}; use sdl2::{ event::{Event, WindowEvent}, video::Window, video::{GLContext, GLProfile, SwapInterval}, EventPump, Sdl, VideoSubsystem, }; use egui_sdl2_gl as egui_backend; pub struct MyWindow { _sdl_context: Sdl, _video_subsystem: VideoSubsystem, window: Window, _gl_ctx: GLContext, pub event_pump: EventPump, pub egui_ctx: CtxRef, egui_state: EguiStateHandler, painter: Painter, start_time: Instant, pub width: u32, pub height: u32, } impl MyWindow {
pub fn begin_frame(&mut self) { self.egui_state.input.time = Some(self.start_time.elapsed().as_secs_f64()); self.egui_ctx.begin_frame(self.egui_state.input.take()); } pub fn end_frame(&mut self) -> bool { unsafe { gl::Disable(gl::DEPTH_TEST); gl::Disable(gl::CULL_FACE); gl::PolygonMode(gl::FRONT_AND_BACK, gl::FILL); } let (egui_output, paint_cmds) = self.egui_ctx.end_frame(); self.egui_state.process_output(&self.window, &egui_output); let paint_jobs = self.egui_ctx.tessellate(paint_cmds); if !egui_output.needs_repaint { /* if let Some(event) = self.event_pump.wait_event_timeout(5) { match event { Event::Quit { .. } => return true, _ => { self.egui_state .process_input(&self.window, event, &mut self.painter); } } } */ } else { self.painter .paint_jobs(None, paint_jobs, &self.egui_ctx.font_image()); self.window.gl_swap_window(); } for event in self.event_pump.poll_iter() { match event { Event::Quit { .. } => return true, Event::Window { timestamp: _, window_id: _, win_event: WindowEvent::Resized(new_width, new_height), } => { self.width = new_width as u32; self.height = new_height as u32; } _ => { self.egui_state .process_input(&self.window, event, &mut self.painter); } } } false } }
pub fn new(title: &str) -> Result<Self> { let sdl_context = sdl2::init().map_err(|e| eyre!("{e}"))?; let video_subsystem = sdl_context.video().map_err(|e| eyre!("{e}"))?; let size = video_subsystem .display_bounds(0) .map_err(|e| eyre!("{e}"))?; let width = (size.width() as f32 * 0.7) as u32; let height = (size.height() as f32 * 0.7) as u32; let window = video_subsystem .window(title, width, height) .opengl() .resizable() .position_centered() .allow_highdpi() .build()?; let gl_ctx = window.gl_create_context().map_err(|e| eyre!("{e}"))?; let gl_attr = video_subsystem.gl_attr(); gl_attr.set_context_major_version(4); gl_attr.set_context_minor_version(2); gl_attr.set_context_profile(GLProfile::Core); gl_attr.set_context_flags().debug().set(); gl_attr.set_double_buffer(true); window .subsystem() .gl_set_swap_interval(SwapInterval::Immediate) .map_err(|e| eyre!("{e}"))?; let shader_ver = ShaderVersion::Default; let custom_dpi = { if width <= 1280 && height <= 720 { 1.0 } else if width <= 1920 && height <= 1080 { 1.5 } else { 2.5 } }; let (painter, egui_state) = egui_backend::with_sdl2(&window, shader_ver, DpiScaling::Custom(custom_dpi)); let egui_ctx = egui::CtxRef::default(); let event_pump = sdl_context.event_pump().map_err(|e| eyre!("{e}"))?; Ok(Self { _sdl_context: sdl_context, _video_subsystem: video_subsystem, window, _gl_ctx: gl_ctx, event_pump, egui_ctx, egui_state, painter, start_time: Instant::now(), width, height, }) }
function_block-full_function
[ { "content": "/// Create an opengl buffer with integer content.\n\n///\n\n/// 'buffer' is a reference to a slice of T.\n\n///\n\n/// 'components', 'attrib index' and 'typ' have the same meaning as the respective\n\n/// arguments in glVertexAttribPointer.\n\npub fn create_int_buf<T: Copy>(buffer: &[T], components: i32, attrib_index: u32, typ: u32) -> u32 {\n\n let mut id: u32 = 0;\n\n\n\n unsafe {\n\n gl::GenBuffers(1, &mut id as *mut _);\n\n gl::BindBuffer(gl::ARRAY_BUFFER, id);\n\n\n\n let buffer_size = buffer.len() * size_of::<T>();\n\n\n\n gl::BufferData(\n\n gl::ARRAY_BUFFER,\n\n buffer_size as isize,\n\n // The layout of Vec3 is #[repr(C)] (struct of 3 floats), so it should be correct\n\n buffer.as_ptr() as _,\n\n gl::STATIC_DRAW,\n\n );\n\n\n\n gl::VertexAttribIPointer(attrib_index, components, typ, 0, 0 as _);\n\n gl::EnableVertexAttribArray(attrib_index);\n\n }\n\n\n\n id\n\n}\n\n\n", "file_path": "src/ogl.rs", "rank": 0, "score": 67287.73125889972 }, { "content": "/// A struct that holds which transforms should be aplied to which nodes for the current frame\n\nstruct NodeAnimationTransform {\n\n /// Index of the node\n\n node: usize,\n\n /// Transform that should overwrite the node's current transform\n\n transform: AnimationTransform,\n\n}\n\n\n\nimpl NodeAnimationTransform {\n\n fn new(node: usize, transform: AnimationTransform) -> Self {\n\n Self { node, transform }\n\n }\n\n}\n", "file_path": "src/renderer.rs", "rank": 1, "score": 42276.816060642 }, { "content": "/// Creates the window, configures OpenGL, sets up the scene and begins the render loop.\n\nfn main() -> Result<()> {\n\n let mut window = MyWindow::new(\"PGRF2 Projekt - Skeletální Animace - Tomáš Král\")?;\n\n\n\n ogl::init_debug();\n\n\n\n let mut scene = setup_scene()?;\n\n let mut gui = Gui::new();\n\n let mut renderer = Renderer::new()?;\n\n let mut camera = Camera::new(\n\n Vec3::new(0.2, 3., 7.5),\n\n 0.05,\n\n 0.05,\n\n window.width,\n\n window.height,\n\n );\n\n\n\n 'render_loop: loop {\n\n handle_inputs(&mut window.event_pump, &mut camera);\n\n\n\n window.begin_frame();\n", "file_path": "src/main.rs", "rank": 2, "score": 41510.79533947509 }, { "content": "pub fn init_debug() {\n\n unsafe {\n\n gl::Enable(gl::DEBUG_OUTPUT);\n\n gl::Enable(gl::DEBUG_OUTPUT_SYNCHRONOUS);\n\n gl::DebugMessageCallback(Some(gl_debug_callback), ptr::null());\n\n gl::DebugMessageControl(\n\n gl::DONT_CARE,\n\n gl::DONT_CARE,\n\n gl::DONT_CARE,\n\n 0,\n\n ptr::null(),\n\n gl::TRUE,\n\n );\n\n };\n\n}\n\n\n\n/// The OpenGL debug callback.\n\n///\n\n/// 'extern \"system\"' specifies the correct ABI for all platforms\n\nextern \"system\" fn gl_debug_callback(\n", "file_path": "src/ogl.rs", "rank": 3, "score": 37197.18852330708 }, { "content": "/// Adds models to the scene\n\nfn setup_scene() -> Result<Vec<Model>> {\n\n let mut scene = Vec::new();\n\n\n\n let mut add = |path: &str| -> Result<()> {\n\n let start = std::time::Instant::now();\n\n\n\n let model = Model::from_gltf(path)?;\n\n\n\n let time = std::time::Instant::now().duration_since(start);\n\n println!(\"Loading '{path}' took '{time:?}'\");\n\n\n\n scene.push(model);\n\n Ok(())\n\n };\n\n\n\n add(\"resources/dancing_stormtrooper/Stormtrooper.gltf\")?;\n\n add(\"resources/animated_humanoid_robot/Droid.gltf\")?;\n\n add(\"resources/pakistan_girl_-_animated/Girl.gltf\")?;\n\n add(\"resources/toon_cat_free/Cat.gltf\")?;\n\n\n\n let cat = scene.last_mut().unwrap();\n\n cat.transform = Mat4::from_scale(Vec3::splat(0.015));\n\n\n\n Ok(scene)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 36362.04504523404 }, { "content": "pub trait UniformBufferElement {\n\n /// The binding port\n\n const BINDING: u32;\n\n /// Update buffer data using gl::BufferSubData\n\n fn update(&self);\n\n /// Allocate data for the element with gl::BufferData\n\n fn init_buffer(&self);\n\n}\n", "file_path": "src/ogl/uniform_buffer.rs", "rank": 5, "score": 34237.058429624565 }, { "content": "/// Create an opengl buffer with floating-point content.\n\n///\n\n/// 'buffer' is a reference to a slice of T.\n\n///\n\n/// 'components', 'attrib index' and 'typ' have the same meaning as the respective\n\n/// arguments in glVertexAttribPointer.\n\npub fn create_float_buf<T: Copy>(\n\n buffer: &[T],\n\n components: i32,\n\n attrib_index: u32,\n\n typ: u32,\n\n) -> u32 {\n\n let mut id: u32 = 0;\n\n\n\n unsafe {\n\n gl::GenBuffers(1, &mut id as *mut _);\n\n gl::BindBuffer(gl::ARRAY_BUFFER, id);\n\n\n\n let buffer_size = buffer.len() * size_of::<T>();\n\n\n\n gl::BufferData(\n\n gl::ARRAY_BUFFER,\n\n buffer_size as isize,\n\n // The layout of Vec3 is #[repr(C)] (struct of 3 floats), so this should be correct\n\n buffer.as_ptr() as _,\n\n gl::STATIC_DRAW,\n\n );\n\n\n\n gl::VertexAttribPointer(attrib_index, components, typ, gl::FALSE, 0, 0 as _);\n\n gl::EnableVertexAttribArray(attrib_index);\n\n }\n\n\n\n id\n\n}\n\n\n", "file_path": "src/ogl.rs", "rank": 6, "score": 32745.23829853312 }, { "content": "// TODO: do not create a new buffer every frame\n\n/// Draws the joints by their current world transforms\n\npub fn draw_joints(world_transforms: &[Mat4], shader: &Shader) {\n\n let mut positions = Vec::new();\n\n let texcoords = vec![Vec2::ZERO; world_transforms.len()];\n\n let normals = vec![Vec3::ZERO; world_transforms.len()];\n\n\n\n for trans in world_transforms {\n\n let pos = *trans * Vec4::new(0., 0., 0., 1.);\n\n positions.push(pos.xyz());\n\n }\n\n\n\n let mut vao = 0;\n\n\n\n unsafe {\n\n gl::GenVertexArrays(1, &mut vao);\n\n gl::BindVertexArray(vao);\n\n\n\n let _positions = ogl::create_float_buf(&positions, 3, ogl::POS_INDEX, gl::FLOAT);\n\n let _texcoords = ogl::create_float_buf(&texcoords, 2, ogl::TEXCOORDS_INDEX, gl::FLOAT);\n\n let _normals = ogl::create_float_buf(&normals, 3, ogl::NORMALS_INDEX, gl::FLOAT);\n\n\n", "file_path": "src/renderer/skeleton_mesh.rs", "rank": 16, "score": 28567.59947743975 }, { "content": "/// Drwas the bones of the joints specified by the joints array and their world_transforms array\n\npub fn draw_bones(world_transforms: &[Mat4], joints: &[Joint], shader: &Shader) {\n\n let mut positions = Vec::new();\n\n\n\n for (i, joint) in joints.iter().enumerate() {\n\n if let Some(parent) = joint.parent {\n\n let pos = world_transforms[i] * Vec4::new(0., 0., 0., 1.);\n\n positions.push(pos.xyz());\n\n\n\n let pos = world_transforms[parent] * Vec4::new(0., 0., 0., 1.);\n\n positions.push(pos.xyz());\n\n }\n\n }\n\n\n\n let texcoords = vec![Vec2::ZERO; positions.len()];\n\n let normals = vec![Vec3::ZERO; positions.len()];\n\n\n\n let mut vao = 0;\n\n\n\n unsafe {\n\n gl::GenVertexArrays(1, &mut vao);\n", "file_path": "src/renderer/skeleton_mesh.rs", "rank": 17, "score": 26446.23264279496 }, { "content": "/// Modifies camera state based on the mouse / keyboard inputs\n\nfn handle_inputs(event_pump: &mut EventPump, camera: &mut Camera) {\n\n let k = event_pump.keyboard_state();\n\n\n\n if k.is_scancode_pressed(Scancode::W) {\n\n camera.move_forward(1.0);\n\n }\n\n\n\n if k.is_scancode_pressed(Scancode::S) {\n\n camera.move_backward(1.0);\n\n }\n\n\n\n if k.is_scancode_pressed(Scancode::A) {\n\n camera.strafe_left(1.0);\n\n }\n\n\n\n if k.is_scancode_pressed(Scancode::D) {\n\n camera.strafe_right(1.0);\n\n }\n\n\n\n let mouse_state = event_pump.mouse_state();\n\n let mouse_x = mouse_state.x() as f32;\n\n let mouse_y = mouse_state.y() as f32;\n\n\n\n if mouse_state.right() {\n\n camera.adjust_look(mouse_x, mouse_y);\n\n } else {\n\n camera.set_x_y(mouse_x, mouse_y)\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 18, "score": 19700.10501723956 }, { "content": " /// Vertical angle from center\n\n zenith: f32,\n\n /// Signals that the view transformation needs to be recomputed\n\n changed: bool,\n\n /// Cache of the view matrix\n\n view_matrix: Mat4,\n\n}\n\n\n\nimpl Camera {\n\n /// Creates the camera\n\n pub fn new(\n\n pos: Vec3,\n\n move_speed: f32,\n\n look_sensitivity: f32,\n\n window_width: u32,\n\n window_height: u32,\n\n ) -> Self {\n\n Self {\n\n pos,\n\n dir: Vec3::new(0., 0., -1.),\n", "file_path": "src/camera.rs", "rank": 19, "score": 15.260062988810343 }, { "content": " pub weights: Vec<[f32; 4]>,\n\n}\n\n\n\nimpl PrimSkin {\n\n pub fn new(joints: Vec<[u32; 4]>, weights: Vec<[f32; 4]>) -> Self {\n\n Self { joints, weights }\n\n }\n\n}\n\n\n\n/// Vertex indices for a primitive.\n\n///\n\n/// Better than using generics here.\n\npub enum Indices {\n\n U32(Vec<u32>),\n\n U16(Vec<u16>),\n\n U8(Vec<u8>),\n\n}\n\n\n\nimpl Indices {\n\n /// The size (in bytes) of the buffer\n", "file_path": "src/model/mesh.rs", "rank": 20, "score": 12.276954864609284 }, { "content": "use eyre::{eyre, Context, Result};\n\nuse gl::types::GLenum;\n\nuse glam::{Mat4, Vec3, Vec4};\n\nuse std::{fs, ptr};\n\n\n\n/// Represents an OpenGL shader.\n\n///\n\n/// Allows setting uniforms with set_<> methods.\n\n///\n\n/// Use the `render` method for draw calls.\n\npub struct Shader {\n\n pub id: u32,\n\n}\n\n\n\nimpl Shader {\n\n /// Loads a vertex shader and a fragment shader from specified paths and tries to create a shader program\n\n pub fn from_file(vs_path: &str, fs_path: &str) -> Result<Shader> {\n\n let mut vs_src = fs::read(vs_path).wrap_err(\"Couldn't load the vertex shader file\")?;\n\n let mut fs_src = fs::read(fs_path).wrap_err(\"Couldn't load the fragment shader file\")?;\n\n\n", "file_path": "src/ogl/shader.rs", "rank": 21, "score": 11.97902129119591 }, { "content": "use std::time::Instant;\n\n\n\nuse eyre::{eyre, Result};\n\nuse glam::{Quat, Vec3};\n\nuse gltf::animation::{\n\n util::{ReadOutputs, Rotations},\n\n Interpolation,\n\n};\n\n\n\nuse super::DataBundle;\n\n\n\n/// Contains animation data and also the current state of the animation\n\npub struct Animations {\n\n pub animations: Vec<Animation>,\n\n pub animation_control: AnimationControl,\n\n}\n\n\n\n/// The type of current animation\n\npub enum AnimationControl {\n\n Loop {\n", "file_path": "src/model/animation.rs", "rank": 22, "score": 11.768158103346874 }, { "content": "//! PGRF2 project - skeletal animation\n\n//!\n\n//! `main` function is the entry-point\n\nuse std::{thread, time::Duration};\n\n\n\nuse camera::Camera;\n\nuse eyre::Result;\n\nuse glam::{Mat4, Vec3};\n\nuse gui::Gui;\n\nuse model::Model;\n\nuse renderer::Renderer;\n\nuse sdl2::{keyboard::Scancode, EventPump};\n\n\n\nuse window::MyWindow;\n\n\n\n/// A module for working with a basic free camera.\n\nmod camera;\n\n\n\n/// All of the code for drawing the GUI using egui.\n\nmod gui;\n", "file_path": "src/main.rs", "rank": 23, "score": 11.434512708298122 }, { "content": "use std::{ptr, time::Instant};\n\n\n\nuse eyre::Result;\n\nuse glam::{Mat4, Vec3, Vec4};\n\n\n\nuse crate::{\n\n camera::Camera,\n\n gui::Gui,\n\n model::{\n\n AnimationControl, AnimationTransform, Joint, Mesh, Model, Node, Primitive, PrimitiveTexture,\n\n },\n\n ogl::{shader::Shader, uniform_buffer::UniformBuffer},\n\n window::MyWindow,\n\n};\n\n\n\nmod joint_transforms;\n\nmod lighting;\n\nmod material;\n\nmod settings;\n\nmod skeleton_mesh;\n", "file_path": "src/renderer.rs", "rank": 24, "score": 10.946228050351152 }, { "content": "use eyre::Result;\n\nuse glam::Mat4;\n\n\n\nuse super::{DataBundle, Transform};\n\n\n\n/// A structure containing the joint data (the skeleton).\n\n/// The skeleton is represented as a tree in the gltf format.\n\n///\n\n/// I decided to store the joint hierarchy in a flat buffer.\n\n/// Every Joint has an Optional index to it's parent (the index of the root is `Option::None`).\n\n/// The parent nodes are always place *before* their child nodes in the buffer.\n\npub struct Joints {\n\n pub joints: Vec<Joint>,\n\n}\n\n\n\nimpl Joints {\n\n /// Creates the joint hierarchy from the gltf::Skin struct, gltf::Scene struct and the DataBundle\n\n pub fn from_gltf(\n\n bundle: &mut DataBundle,\n\n skin: &gltf::Skin,\n", "file_path": "src/model/joints.rs", "rank": 25, "score": 9.852644096929234 }, { "content": " up: Vec3::new(0., 1., 0.),\n\n move_speed,\n\n look_sensitivity,\n\n current_x: window_width as f32 / 2.,\n\n current_y: window_height as f32 / 2.,\n\n azimuth: 0.,\n\n zenith: 0.,\n\n changed: true,\n\n view_matrix: Mat4::IDENTITY,\n\n }\n\n }\n\n\n\n /// Returns the view matrix (either cached or recomputed)\n\n pub fn view_mat(&mut self) -> Mat4 {\n\n if self.changed {\n\n self.changed = false;\n\n self.view_matrix = Mat4::look_at_rh(self.pos, self.pos + self.dir, self.up);\n\n }\n\n\n\n self.view_matrix\n", "file_path": "src/camera.rs", "rank": 26, "score": 9.758263046533418 }, { "content": "use std::{mem::size_of, ptr};\n\n\n\nuse glam::Vec4;\n\n\n\nuse crate::ogl::uniform_buffer::UniformBufferElement;\n\n\n\n/// Uniform buffer element that stores the material settings\n\npub struct Material {\n\n pub base_color_factor: Vec4,\n\n}\n\n\n\nimpl Material {\n\n pub fn new() -> Self {\n\n Self {\n\n base_color_factor: Vec4::splat(1.),\n\n }\n\n }\n\n}\n\n\n\nimpl UniformBufferElement for Material {\n", "file_path": "src/renderer/material.rs", "rank": 27, "score": 9.754216586880048 }, { "content": "use std::{mem::size_of, ptr};\n\n\n\nuse glam::Vec3;\n\n\n\nuse crate::ogl::uniform_buffer::UniformBufferElement;\n\n\n\n/// Uniform buffer element that stores the lighing data\n\npub struct Lighting {\n\n pub light_pos: Vec3,\n\n}\n\n\n\nimpl Lighting {\n\n pub fn new(light_pos: Vec3) -> Self {\n\n Self { light_pos }\n\n }\n\n}\n\n\n\nimpl UniformBufferElement for Lighting {\n\n fn update(&self) {\n\n // GLSL vec3 has an alignment of 16 bytes (4 floats)\n", "file_path": "src/renderer/lighting.rs", "rank": 28, "score": 9.528610459524055 }, { "content": " settings: UniformBuffer::new(Settings::new()),\n\n material: UniformBuffer::new(Material::new()),\n\n lighting: UniformBuffer::new(Lighting::new(Vec3::new(400., 1000., 400.))),\n\n node_animation_transforms: Vec::new(),\n\n })\n\n }\n\n\n\n /// Render a new frame\n\n pub fn render(\n\n &mut self,\n\n models: &mut [Model],\n\n camera: &mut Camera,\n\n window: &MyWindow,\n\n gui_state: &Gui,\n\n ) {\n\n unsafe {\n\n gl::Viewport(0, 0, window.width as i32, window.height as i32);\n\n gl::Enable(gl::DEPTH_TEST);\n\n\n\n gl::Enable(gl::CULL_FACE);\n", "file_path": "src/renderer.rs", "rank": 29, "score": 9.497133844290271 }, { "content": "use std::{mem::size_of, ptr};\n\n\n\nuse crate::ogl::uniform_buffer::UniformBufferElement;\n\n\n\n/// Uniform buffer element that stores the rendering 'settings' (controls)\n\npub struct Settings {\n\n pub do_skinning: bool,\n\n}\n\n\n\nimpl Settings {\n\n pub fn new() -> Self {\n\n Self { do_skinning: false }\n\n }\n\n}\n\n\n\nimpl UniformBufferElement for Settings {\n\n fn update(&self) {\n\n let size = size_of::<i32>();\n\n let num = if self.do_skinning { 1 } else { 0 };\n\n\n", "file_path": "src/renderer/settings.rs", "rank": 30, "score": 9.395943238453711 }, { "content": " pub name: String,\n\n /// Children nodes\n\n pub children: Vec<Node>,\n\n /// Optional mesh data of the node (can contain multiple primitives)\n\n pub mesh: Option<Mesh>,\n\n /// Transform of the node on the hierarchy\n\n pub transform: Mat4,\n\n /// Optional skeleton data this node is root of\n\n pub joints: Option<Joints>,\n\n}\n\n\n\nimpl Node {\n\n /// Crate a node from a gltf::Node structure\n\n fn from_gltf(\n\n node: &gltf::Node,\n\n bundle: &mut DataBundle,\n\n id: &mut u32,\n\n scene: &gltf::Scene,\n\n ) -> Result<Self> {\n\n let mut children = Vec::new();\n", "file_path": "src/model.rs", "rank": 31, "score": 9.35145375354497 }, { "content": "use std::{mem::size_of, ptr};\n\n\n\nuse glam::Mat4;\n\n\n\nuse crate::ogl::uniform_buffer::UniformBufferElement;\n\n\n\n/// Uniform buffer element that stores the transformation matrices\n\npub struct Transforms {\n\n pub projection: Mat4,\n\n pub view: Mat4,\n\n pub model: Mat4,\n\n}\n\n\n\nimpl Transforms {\n\n pub fn new_indentity() -> Self {\n\n Self {\n\n projection: Mat4::IDENTITY,\n\n view: Mat4::IDENTITY,\n\n model: Mat4::IDENTITY,\n\n }\n", "file_path": "src/renderer/transforms.rs", "rank": 32, "score": 9.167172542145813 }, { "content": "use std::{\n\n ffi::{c_void, CStr},\n\n mem::size_of,\n\n ptr,\n\n};\n\n\n\n/// Abstraction for working with OpenGL Shaders.\n\npub mod shader;\n\n\n\n/// Abstraction for working with OpenGL Uniform Buffers.\n\npub mod uniform_buffer;\n\n\n\n// Indices of the vertex attributes\n\npub const POS_INDEX: u32 = 0;\n\npub const TEXCOORDS_INDEX: u32 = 1;\n\npub const NORMALS_INDEX: u32 = 2;\n\npub const JOINTS_INDEX: u32 = 3;\n\npub const WEIGHTS_INDEX: u32 = 4;\n\n\n\n/// Create an opengl buffer with floating-point content.\n\n///\n\n/// 'buffer' is a reference to a slice of T.\n\n///\n\n/// 'components', 'attrib index' and 'typ' have the same meaning as the respective\n\n/// arguments in glVertexAttribPointer.\n", "file_path": "src/ogl.rs", "rank": 33, "score": 9.061448336073708 }, { "content": "use std::{mem::size_of, ptr};\n\n\n\nuse glam::Mat4;\n\n\n\nuse crate::ogl::uniform_buffer::UniformBufferElement;\n\n\n\nconst MAX_JOINT_TRANSFORMS: usize = 256;\n\n\n\n/// Uniform buffer element that stores the vertex joint transforms\n\npub struct JointTransforms {\n\n pub matrices: Vec<Mat4>,\n\n}\n\n\n\nimpl JointTransforms {\n\n pub fn new() -> Self {\n\n Self {\n\n matrices: Vec::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/renderer/joint_transforms.rs", "rank": 34, "score": 9.002743831632706 }, { "content": "use std::time::Instant;\n\n\n\nuse egui::{CollapsingHeader, CtxRef, RichText, Slider, Ui};\n\nuse glam::{Quat, Vec3};\n\n\n\nuse crate::{\n\n camera::Camera,\n\n model::{AnimationControl, Animations, Joint, Model, Node},\n\n};\n\n\n\n/// Contains the current state of the GUI.\n\n/// Implements methods for displaying the widgets.\n\npub struct Gui {\n\n /// Default 0 (assuming that there is at least 1 model in the scene)\n\n pub selected_model: usize,\n\n /// If joints should be visible inside of the mesh\n\n pub draw_skeleton: bool,\n\n /// If the mesh should be visible\n\n pub mesh_visible: bool,\n\n}\n", "file_path": "src/gui.rs", "rank": 35, "score": 8.806810825563408 }, { "content": "use std::path::Path;\n\n\n\nuse eyre::{eyre, Result};\n\nuse glam::{Mat4, Quat, Vec3};\n\nuse gltf::scene::Transform as GTransform;\n\n\n\nmod animation;\n\nmod joints;\n\nmod mesh;\n\nmod transform;\n\n\n\npub use self::{\n\n animation::{Animation, AnimationControl, AnimationTransform, AnimationTransforms, Animations},\n\n joints::{Joint, Joints},\n\n mesh::{Mesh, Primitive, PrimitiveTexture},\n\n transform::Transform,\n\n};\n\n\n\n/// Image and vertex data of the asset.\n\npub struct DataBundle {\n", "file_path": "src/model.rs", "rank": 36, "score": 8.572407079756925 }, { "content": "### Trait\n\n\n\nTrait je obdoba interfacu. </br>\n\nNapř. ve standardní knihovně existuje trait `Debug`, který umožňuje vypsat hodnotu na standardní výstup pomocí standardních `print` funkcí.\n\n\n\nV tomto projektu využívám trait např. pro podporu OpenGL UniformBufferů.\n\n\n\n\n\n```\n\npub struct UniformBuffer<T: UniformBufferElement> {\n\n pub id: u32,\n\n pub inner: T,\n\n}\n\n\n\npub trait UniformBufferElement {\n\n /// The binding port\n\n const BINDING: u32;\n\n /// Update buffer data using gl::BufferSubData\n\n fn update(&self);\n\n /// Allocate data for the element with gl::BufferData\n\n fn init_buffer(&self);\n\n}\n\n```\n\n\n\nStruktura `UniformBuffer` je generická, přičemž do `T` je možné \"dosadit\" pouze typy, které implementují trait `UniformBufferElement`. Trait potom implementuju pro všechny typy, které chci používat jako UniformBuffer v shaderech.\n\n\n\nNěkteré traity umí kompilátor (či makra) vygenerovat automaticky pomocí `Derive` syntaxe:\n\n\n\n```\n\n#[Derive(Debug)] // Kompilátor sám vygeneruje kód pro implementaci Debug traity pro danou strukturu\n\nstruct Point {\n\n x: f32,\n\n y: f32,\n\n z: f32,\n\n}\n\n```\n\n\n", "file_path": "RustSyntax.md", "rank": 37, "score": 7.904137382867829 }, { "content": "/// Abstraction for working with UniformBuffers.\n\n/// UniformBuffer is generic over T, and T must implement the UniformBufferElement trait.\n\npub struct UniformBuffer<T: UniformBufferElement> {\n\n pub id: u32,\n\n pub inner: T,\n\n}\n\n\n\nimpl<T: UniformBufferElement> UniformBuffer<T>\n\nwhere\n\n T: UniformBufferElement,\n\n{\n\n /// Generate a new UniformBuffer and allocate memory for it\n\n pub fn new(inner: T) -> Self {\n\n let mut id: u32 = 0;\n\n\n\n unsafe {\n\n gl::GenBuffers(1, &mut id);\n\n gl::BindBuffer(gl::UNIFORM_BUFFER, id);\n\n\n\n let binding = T::BINDING;\n", "file_path": "src/ogl/uniform_buffer.rs", "rank": 38, "score": 7.7294049767530755 }, { "content": "use std::mem::size_of;\n\n\n\nuse eyre::{eyre, Result};\n\nuse gl::types::GLenum;\n\nuse glam::{Vec2, Vec3, Vec4};\n\nuse gltf::{\n\n image::Format,\n\n mesh::util::ReadIndices,\n\n texture::{MagFilter, MinFilter, WrappingMode},\n\n};\n\n\n\nuse crate::ogl;\n\n\n\nuse super::DataBundle;\n\n\n\n/// Gltf terminology is needlessly confusing.\n\n/// A gltf 'Mesh' contains multiple real sub-meshes (called Primitives in the gltf parlance)\n\npub struct Mesh {\n\n /// 'Primitives' of the 'mesh'\n\n // TODO: could be optimized - most meshes probably only contain a single primitive - avoid allocating a vector\n", "file_path": "src/model/mesh.rs", "rank": 39, "score": 7.570332959085409 }, { "content": "use glam::{Mat4, Quat, Vec3};\n\nuse gltf::scene::Transform as GTransform;\n\n\n\n/// Describes the transformation of a Node or a Joint\n\npub struct Transform {\n\n /// Local translation relative to the parent joint\n\n pub translation: Vec3,\n\n /// Local rotation relative to the parent joint\n\n pub rotation: Quat,\n\n /// Local scale relative to the parent joint\n\n pub scale: Vec3,\n\n}\n\n\n\nimpl Transform {\n\n /// Creates the transform from the gltf::Node struct\n\n pub fn from_gltf(node: &gltf::Node) -> Self {\n\n let (translation, rotation, scale) = match node.transform() {\n\n GTransform::Matrix { matrix: mat } => {\n\n // https://www.khronos.org/registry/glTF/specs/2.0/glTF-2.0.html#transformations\n\n // \"When matrix is defined, it MUST be decomposable to TRS properties.\"\n", "file_path": "src/model/transform.rs", "rank": 40, "score": 7.515815790870746 }, { "content": " let persp = Mat4::perspective_rh(\n\n f32::to_radians(60.),\n\n window.width as f32 / window.height as f32,\n\n 0.1,\n\n 3000.,\n\n );\n\n\n\n let model = &mut models[gui_state.selected_model];\n\n\n\n self.transforms.inner.projection = persp;\n\n self.transforms.inner.view = camera.view_mat();\n\n self.transforms.inner.model = model.transform;\n\n self.transforms.update();\n\n\n\n self.recalculate_animation(model);\n\n\n\n let transform = model.transform;\n\n self.render_node(&mut model.root, transform, gui_state);\n\n }\n\n\n", "file_path": "src/renderer.rs", "rank": 41, "score": 7.452709969958802 }, { "content": "impl Animation {\n\n pub fn new(\n\n channels: Vec<Channel>,\n\n current_time: f32,\n\n end_time: f32,\n\n name: Option<String>,\n\n ) -> Self {\n\n Self {\n\n channels,\n\n current_time,\n\n end_time,\n\n name,\n\n }\n\n }\n\n\n\n /// Creates the animation from a gltf::Document struct and the DataBundle\n\n pub fn from_gltf(gltf: &gltf::Document, bundle: &DataBundle) -> Result<Animations> {\n\n let mut animations = Vec::new();\n\n\n\n for animation in gltf.animations() {\n", "file_path": "src/model/animation.rs", "rank": 42, "score": 7.403497523586783 }, { "content": " pub primitives: Vec<Primitive>,\n\n /// Name of the 'Mesh'\n\n pub name: Option<String>,\n\n}\n\n\n\nimpl Mesh {\n\n /// Create a mesh from the gltf::Mesh struct and the DataBundle\n\n pub fn from_gltf(mesh: &gltf::Mesh, bundle: &mut DataBundle) -> Result<Self> {\n\n let name = mesh.name().map(|n| n.to_owned());\n\n\n\n let mut primitives = Vec::new();\n\n for primitive in mesh.primitives() {\n\n let primitive = Primitive::from_gltf(&primitive, bundle)?;\n\n primitives.push(primitive);\n\n }\n\n\n\n Ok(Mesh { primitives, name })\n\n }\n\n}\n\n\n", "file_path": "src/model/mesh.rs", "rank": 43, "score": 7.3892844083948175 }, { "content": " /// An artifical root node\n\n pub root: Node,\n\n /// Name of the model\n\n pub name: String,\n\n /// Animation data\n\n pub animations: Animations,\n\n /// Model transforms of the whole object\n\n pub transform: Mat4,\n\n}\n\n\n\nimpl Model {\n\n /// Load the model from a path to a gltf file\n\n pub fn from_gltf(path: &str) -> Result<Model> {\n\n let (gltf, buffers, images) = gltf::import(path)?;\n\n let name = Path::new(path)\n\n .file_name()\n\n .map(|osstr| osstr.to_string_lossy().to_string())\n\n .unwrap_or_else(|| \"N/A\".to_string());\n\n\n\n let mut bundle = DataBundle::new(buffers, images);\n", "file_path": "src/model.rs", "rank": 44, "score": 7.354282542360237 }, { "content": "\n\nimpl Gui {\n\n pub fn new() -> Self {\n\n Self {\n\n selected_model: 0,\n\n draw_skeleton: false,\n\n mesh_visible: true,\n\n }\n\n }\n\n\n\n /// Creates the GUI.\n\n ///\n\n /// Immediate mode GUI - is called every frame.\n\n pub fn create_gui(&mut self, scene: &mut [Model], camera: &mut Camera, egui_ctx: &mut CtxRef) {\n\n self.gui_model_hierarchy_window(scene, egui_ctx);\n\n self.gui_joints_window(&mut scene[self.selected_model], egui_ctx);\n\n self.gui_side_panel(scene, camera, egui_ctx);\n\n }\n\n\n\n /// Create the subwindow containing the model hierarchy\n", "file_path": "src/gui.rs", "rank": 45, "score": 7.2680856478534945 }, { "content": " pub fn size(&self) -> usize {\n\n match self {\n\n Indices::U32(buf) => buf.len() * size_of::<u32>(),\n\n Indices::U16(buf) => buf.len() * size_of::<u16>(),\n\n Indices::U8(buf) => buf.len() * size_of::<u8>(),\n\n }\n\n }\n\n\n\n /// The lenght (in elements) of the buffer\n\n pub fn len(&self) -> usize {\n\n match self {\n\n Indices::U32(buf) => buf.len(),\n\n Indices::U16(buf) => buf.len(),\n\n Indices::U8(buf) => buf.len(),\n\n }\n\n }\n\n\n\n /// A pointer to the start of the buffer\n\n pub fn ptr(&self) -> *const std::ffi::c_void {\n\n match self {\n", "file_path": "src/model/mesh.rs", "rank": 46, "score": 6.3882568873552374 }, { "content": " render();\n\n\n\n gl::UseProgram(0);\n\n }\n\n }\n\n\n\n /// Tries to compile a shader and checks for compilation errors.\n\n fn compile_shader(src: &[u8], typ: GLenum) -> Result<u32> {\n\n unsafe {\n\n let shader = gl::CreateShader(typ);\n\n gl::ShaderSource(shader, 1, &(src.as_ptr() as _), ptr::null_mut());\n\n gl::CompileShader(shader);\n\n\n\n let mut res = 0;\n\n let mut info_log = [0u8; 512];\n\n let mut info_len = 0;\n\n\n\n gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut res);\n\n\n\n if res == 0 {\n", "file_path": "src/ogl/shader.rs", "rank": 47, "score": 6.176652196198844 }, { "content": "}\n\n\n\nimpl Primitive {\n\n /// Creates the primitive from the gltf::Primitive struct and the DataBundle\n\n pub fn from_gltf(primitive: &gltf::Primitive, bundle: &mut DataBundle) -> Result<Self> {\n\n let mode = primitive.mode();\n\n\n\n if mode != gltf::mesh::Mode::Triangles {\n\n return Err(eyre!(\"primitive mode: '{mode:?}' is not impelemnted\"));\n\n }\n\n\n\n let reader = primitive.reader(|buffer| Some(&bundle.buffers[buffer.index()]));\n\n\n\n let positions = reader\n\n .read_positions()\n\n .ok_or(eyre!(\"primitive doesn't containt positions\"))?\n\n .map(Vec3::from)\n\n .collect();\n\n\n\n let indices = match reader\n", "file_path": "src/model/mesh.rs", "rank": 48, "score": 6.048939393880213 }, { "content": "\n\n/// Represents a single gltf 2.0 model (used models only have 1 scene).\n\nmod model;\n\n\n\n/// Handles rendering the whole scene.\n\nmod renderer;\n\n\n\n/// Abstractions for working with OpenGL.\n\nmod ogl;\n\n\n\n/// Handles window creation and egui boilerplate.\n\nmod window;\n\n\n\n/// Creates the window, configures OpenGL, sets up the scene and begins the render loop.\n", "file_path": "src/main.rs", "rank": 49, "score": 6.003247846484417 }, { "content": " active_animation: usize,\n\n start_time: Instant,\n\n },\n\n Controllable {\n\n active_animation: usize,\n\n },\n\n Static,\n\n}\n\n\n\n/// Contains all animation data\n\npub struct Animation {\n\n pub channels: Vec<Channel>,\n\n /// Current time of the animation\n\n pub current_time: f32,\n\n /// The time in seconds of the last keyframe, start time is implicitly 0\n\n pub end_time: f32,\n\n /// Optional name of the animation\n\n pub name: Option<String>,\n\n}\n\n\n", "file_path": "src/model/animation.rs", "rank": 50, "score": 5.974272451277874 }, { "content": " /// Index of the node this channel is applied to\n\n pub node: usize,\n\n /// Times of the keyframes\n\n pub keyframe_times: Vec<f32>,\n\n /// Transforms that should be applied to the respective node\n\n pub transforms: AnimationTransforms,\n\n /// The type of the interpolation that should be applied between the keyframes\n\n pub interpolation_type: Interpolation,\n\n}\n\n\n\nimpl Channel {\n\n pub fn new(\n\n node_index: usize,\n\n keyframe_times: Vec<f32>,\n\n transforms: AnimationTransforms,\n\n interpolation_type: Interpolation,\n\n ) -> Self {\n\n Self {\n\n node: node_index,\n\n keyframe_times,\n", "file_path": "src/model/animation.rs", "rank": 51, "score": 5.6566955138802175 }, { "content": " gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, wrap_s as i32);\n\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, wrap_t as i32);\n\n }\n\n }\n\n}\n\n\n\n/// Texture info for a primitive.\n\n///\n\n/// If the primitive has a texture, copy the texture information from the Model's gl_textures.\n\n///\n\n/// If not, the base_color_factor serves as the object color.\n\n#[derive(Clone)]\n\npub enum PrimitiveTexture {\n\n None { base_color_factor: Vec4 },\n\n Some { gl_id: u32, base_color_factor: Vec4 },\n\n}\n\n\n\n/// Optional skin data for a primitive.\n\npub struct PrimSkin {\n\n pub joints: Vec<[u32; 4]>,\n", "file_path": "src/model/mesh.rs", "rank": 52, "score": 5.639105213945496 }, { "content": "\n\n# Rust syntax\n\n\n\nZde jsem se snažil alespoň základně popsat syntax Rustu. </br>\n\nSyntax Rustu je trochu zvláštní, protože kombinuje prvky C++, OCaml, Haskell a trochu i Ruby.\n\n\n\n## Definice proměnných\n\n\n\n```\n\nlet jmeno: datovy typ (není nutné uvádět) = hodnota;\n\n\n\nlet cislo = 42;\n\nlet jmeno: String = \"Praha\";\n\n```\n\n\n\n## Základní datové typy\n\n\n\nZákladní typy mají explicitní velikost.\n\n\n\n```\n\nUnsigned integers - u<počet bitů> - např. u32, u64 ... </br>\n\nSigned integers - i<počet bitů> - např. i32, i64 ... </br>\n\nFloating-point - f<32 / 64> </br>\n\n```\n\n\n\nKonverze mezi datovými typy musí být vždy explicitní:\n\n\n\n```\n\nlet integer: u32 = 1;\n\nlet float: f64 = 5.0;\n\n\n\nlet vysledek = (integer as f64) * float;\n\n// nebo\n\nlet vysledek = integer * (float as u32);\n\n```\n\n\n\nRust podobně jako C++ používá název \"vector\" pro dynamické pole (typ `Vec`).\n\nPro vektory v matematickém smyslu používám typy `Vec2`, `Vec3`, `Vec4` z knihovny `glam`.\n\n\n\n## Mutabilita\n\n\n\nV Rustu se mutabilita rozlišuje 2 způsoby:\n\n\n\n### Proměnné\n\n\n\n```\n\nlet mut cislo = 1; // mohu měnit hodnotu proměnné\n\nlet cislo = 1; // konstanta\n\n```\n\n\n\n### Reference\n\n\n\n```\n\nlet mut cislo = 1;\n\n\n\nlet mut_ref = &mut cislo; // reference, pomocí které mohu hodnotu měnit\n\nlet ref = &cislo; // reference, pomocí které mohu hodnotu pouze číst\n\n```\n\n\n\n## Funkce\n\n\n\n```\n\nfn jmeno_funkce(jmeno_parametru: typ_parametru, ...) -> návratový_typ {\n\n ...\n\n}\n\n```\n\n\n\n## Typový systém\n\n\n\n### Struktury\n\n\n\n```\n\nstruct Vertex {\n\n position: Vec3,\n\n color: Vec4,\n\n ...\n\n}\n\n```\n\n\n\nMetody (či \"statické\" funkce) které patří ke struktuře jsou definovány v `impl` bloku.\n\nMetody mají narozdíl od \"statických\" funkcí jako první argument `self` referenci (`&mut self` nebo `&self`)\n\n\n\n```\n\nimpl Vertex {\n\n pub fn dehomog(&mut self) {\n\n ...\n\n }\n\n}\n\n```\n\n\n", "file_path": "RustSyntax.md", "rank": 53, "score": 5.437050487742599 }, { "content": "/// A Primitive represents a single 'mesh' in the normal meaning of that word\n\n/// (a collection of vertices with a specific topology like Triangles or Lines).\n\n///\n\n// TODO: It's not needed to store all this data in RAM.\n\n// TODO: load vertex data without allocation and copying\n\npub struct Primitive {\n\n /// A texture (if any) of this mesh\n\n pub texture_info: PrimitiveTexture,\n\n /// OpenGL VAO identifier\n\n pub vao: u32,\n\n /// Vertex indices\n\n pub indices: Indices,\n\n /// Vertex positions\n\n pub positions: Vec<Vec3>,\n\n /// Vertex texture coordinates\n\n pub texcoords: Vec<Vec2>,\n\n /// Vertex normals\n\n pub normals: Vec<Vec3>,\n\n /// Vertex skin data (joints indices, weights)\n\n pub skin: Option<PrimSkin>,\n", "file_path": "src/model/mesh.rs", "rank": 54, "score": 5.428736448423722 }, { "content": "use glam::{Mat4, Vec3};\n\n\n\n/// A component encapsulating the camera transformations\n\npub struct Camera {\n\n /// Position of the camera\n\n pos: Vec3,\n\n /// The direction vector\n\n dir: Vec3,\n\n /// The 'up' vector\n\n up: Vec3,\n\n /// Move speed\n\n pub move_speed: f32,\n\n /// Look sensitivity\n\n pub look_sensitivity: f32,\n\n /// Last x position of the mouse\n\n current_x: f32,\n\n /// Last y position of the mouse\n\n current_y: f32,\n\n /// Horizontal angle from center\n\n azimuth: f32,\n", "file_path": "src/camera.rs", "rank": 55, "score": 5.421757298441602 }, { "content": " Indices::U32(buf) => buf.as_ptr() as _,\n\n Indices::U16(buf) => buf.as_ptr() as _,\n\n Indices::U8(buf) => buf.as_ptr() as _,\n\n }\n\n }\n\n\n\n /// A GL_TYPE corresponding to the variant of the buffer\n\n pub fn gl_type(&self) -> GLenum {\n\n match self {\n\n Indices::U32(_) => gl::UNSIGNED_INT,\n\n Indices::U16(_) => gl::UNSIGNED_SHORT,\n\n Indices::U8(_) => gl::UNSIGNED_BYTE,\n\n }\n\n }\n\n}\n", "file_path": "src/model/mesh.rs", "rank": 56, "score": 5.303411707927665 }, { "content": " material: UniformBuffer<Material>,\n\n #[allow(unused)]\n\n /// Current lighting settings\n\n lighting: UniformBuffer<Lighting>,\n\n /// Current joint / node transforms\n\n node_animation_transforms: Vec<NodeAnimationTransform>,\n\n}\n\n\n\nimpl Renderer {\n\n /// Create a new renderer\n\n pub fn new() -> Result<Self> {\n\n let texture_shader =\n\n Shader::from_file(\"shaders/vs_combined.vert\", \"shaders/fs_texture.frag\")?;\n\n let color_shader = Shader::from_file(\"shaders/vs_combined.vert\", \"shaders/fs_color.frag\")?;\n\n\n\n Ok(Self {\n\n texture_shader,\n\n color_shader,\n\n transforms: UniformBuffer::new(Transforms::new_indentity()),\n\n joint_transforms: UniformBuffer::new(JointTransforms::new()),\n", "file_path": "src/renderer.rs", "rank": 57, "score": 5.2125483782878685 }, { "content": "}\n\n\n\nimpl Joint {\n\n pub fn new(\n\n node_index: usize,\n\n parent: Option<usize>,\n\n inverse_bind_matrix: Mat4,\n\n transform: Transform,\n\n name: String,\n\n ) -> Self {\n\n Self {\n\n node_index,\n\n parent,\n\n inverse_bind_matrix,\n\n transform,\n\n name,\n\n }\n\n }\n\n}\n", "file_path": "src/model/joints.rs", "rank": 58, "score": 5.100187732595529 }, { "content": " // Add null-terminators\n\n vs_src.push(b'\\0');\n\n fs_src.push(b'\\0');\n\n\n\n let vs = Self::compile_shader(&vs_src, gl::VERTEX_SHADER)?;\n\n let fs = Self::compile_shader(&fs_src, gl::FRAGMENT_SHADER)?;\n\n let shader_program = Self::link_shaders(vs, fs)?;\n\n Ok(Shader { id: shader_program })\n\n }\n\n\n\n /// Use this shader to render.\n\n ///\n\n /// Draw calls should be passed using the `render` function parameter.\n\n pub fn render<F>(&self, render: F)\n\n where\n\n F: FnOnce(),\n\n {\n\n unsafe {\n\n gl::UseProgram(self.id);\n\n\n", "file_path": "src/ogl/shader.rs", "rank": 59, "score": 5.09356259768713 }, { "content": " gl::Uniform4f(loc, vec.x, vec.y, vec.z, vec.w);\n\n }\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn set_f32(&self, v: f32, name: &str) {\n\n Self::check_inform_name(name);\n\n unsafe {\n\n let loc = gl::GetUniformLocation(self.id, name.as_ptr() as _);\n\n gl::Uniform1f(loc, v);\n\n }\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn set_u32(&self, v: u32, name: &str) {\n\n Self::check_inform_name(name);\n\n unsafe {\n\n let loc = gl::GetUniformLocation(self.id, name.as_ptr() as _);\n\n gl::Uniform1ui(loc, v);\n\n }\n\n }\n\n\n\n /// Uniform names have to be null-terminated and have to be ASCII (I think...)\n\n fn check_inform_name(name: &str) {\n\n assert!(name.is_ascii());\n\n assert!(name.ends_with('\\0'));\n\n }\n\n}\n", "file_path": "src/ogl/shader.rs", "rank": 60, "score": 5.0915164893037925 }, { "content": " gl::GetShaderInfoLog(shader, 512, &mut info_len as _, info_log.as_mut_ptr() as _);\n\n let info_msg = String::from_utf8_lossy(&info_log);\n\n return Err(eyre!(\"Failed to compile a shader: '{}'\", info_msg));\n\n }\n\n\n\n Ok(shader)\n\n }\n\n }\n\n\n\n /// Tries to link the vertex and fragment shaders (passed by their ids) and checks for linking errors.\n\n fn link_shaders(vs: u32, fs: u32) -> Result<u32> {\n\n unsafe {\n\n let shader_program = gl::CreateProgram();\n\n gl::AttachShader(shader_program, vs);\n\n gl::AttachShader(shader_program, fs);\n\n gl::LinkProgram(shader_program);\n\n\n\n let mut res = 0;\n\n let mut info_log = [0u8; 512];\n\n let mut info_len = 0;\n", "file_path": "src/ogl/shader.rs", "rank": 61, "score": 4.797931317245904 }, { "content": " image.width as i32,\n\n image.height as i32,\n\n 0,\n\n format,\n\n gl::UNSIGNED_BYTE,\n\n image.pixels.as_ptr() as _,\n\n );\n\n gl::GenerateMipmap(gl::TEXTURE_2D);\n\n\n\n texture\n\n };\n\n\n\n let texture = PrimitiveTexture::Some {\n\n gl_id: gl_tex_id,\n\n base_color_factor: Vec4::from(base_color_factor),\n\n };\n\n bundle.gl_textures[tex_index] = Some(texture.clone());\n\n texture\n\n }\n\n\n", "file_path": "src/model/mesh.rs", "rank": 62, "score": 4.718658943806123 }, { "content": " .read_indices()\n\n .ok_or(eyre!(\"primitive doesn't containt indices\"))?\n\n {\n\n ReadIndices::U32(b) => Indices::U32(b.collect()),\n\n ReadIndices::U16(b) => Indices::U16(b.collect()),\n\n ReadIndices::U8(b) => Indices::U8(b.collect()),\n\n };\n\n\n\n let mut texcoords = Vec::new();\n\n let mut texture_set = 0;\n\n while let Some(texcoords_reader) = reader.read_tex_coords(texture_set) {\n\n if texture_set >= 1 {\n\n // Used for loading textures other than the diffuse map\n\n //eprintln!(\"WARN: primitive has more than 1 texture coordinate set\");\n\n break;\n\n }\n\n\n\n texcoords = texcoords_reader.into_f32().map(Vec2::from).collect();\n\n\n\n texture_set += 1;\n", "file_path": "src/model/mesh.rs", "rank": 63, "score": 4.699316496220457 }, { "content": " /// Vertex data\n\n buffers: Vec<gltf::buffer::Data>,\n\n /// Texture data\n\n images: Vec<gltf::image::Data>,\n\n /// To keep track if which textures were already sent to the GPU\n\n pub gl_textures: Vec<Option<PrimitiveTexture>>,\n\n}\n\n\n\nimpl DataBundle {\n\n fn new(buffers: Vec<gltf::buffer::Data>, images: Vec<gltf::image::Data>) -> Self {\n\n Self {\n\n buffers,\n\n gl_textures: vec![Option::None; images.len()],\n\n images,\n\n }\n\n }\n\n}\n\n\n\n/// This represents a gltf model and contains necessary data for rendering.\n\npub struct Model {\n", "file_path": "src/model.rs", "rank": 64, "score": 4.608263591485254 }, { "content": "mod transforms;\n\n\n\nuse self::{\n\n joint_transforms::JointTransforms, lighting::Lighting, material::Material, settings::Settings,\n\n transforms::Transforms,\n\n};\n\n\n\n/// A component responsible for rendering the scene.\n\npub struct Renderer {\n\n /// Shader for meshes containing texture data\n\n texture_shader: Shader,\n\n /// Shader for meshes without textures\n\n color_shader: Shader,\n\n /// Current MVP transformation matrices\n\n transforms: UniformBuffer<Transforms>,\n\n /// Joint transformation matrices\n\n joint_transforms: UniformBuffer<JointTransforms>,\n\n /// Rendering settings\n\n settings: UniformBuffer<Settings>,\n\n /// Current mesh material\n", "file_path": "src/renderer.rs", "rank": 65, "score": 3.8165706864085243 }, { "content": " gl::BindTexture(gl::TEXTURE_2D, texture);\n\n\n\n self.set_texture_sampler(&tex.sampler());\n\n\n\n let image = &bundle.images[tex_index];\n\n\n\n assert!(image.width.is_power_of_two());\n\n assert!(image.height.is_power_of_two());\n\n\n\n let (internal_format, format) = match image.format {\n\n Format::R8G8 => (gl::RG8, gl::RG),\n\n Format::R8G8B8 => (gl::RGB8, gl::RGB),\n\n Format::R8G8B8A8 => (gl::RGBA8, gl::RGBA),\n\n f => unimplemented!(\"Unimplemented image format: '{f:?}'\"),\n\n };\n\n\n\n gl::TexImage2D(\n\n gl::TEXTURE_2D,\n\n 0,\n\n internal_format as i32,\n", "file_path": "src/model/mesh.rs", "rank": 66, "score": 3.8066897801601502 }, { "content": "\n\n renderer.render(&mut scene, &mut camera, &window, &gui);\n\n gui.create_gui(&mut scene, &mut camera, &mut window.egui_ctx);\n\n\n\n let should_quit = window.end_frame();\n\n if should_quit {\n\n break 'render_loop;\n\n }\n\n\n\n thread::sleep(Duration::from_millis(3));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 67, "score": 3.7219120783702726 }, { "content": " joints,\n\n inverse_bind_matrices,\n\n );\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// A structure representing a single Joint in the skeleton\n\npub struct Joint {\n\n /// The same node index as in the gltf file\n\n pub node_index: usize,\n\n /// An index to the parent node (None if this joint is the root)\n\n pub parent: Option<usize>,\n\n /// The matrix that transforms this node to the origin\n\n pub inverse_bind_matrix: Mat4,\n\n /// The transform of the joint relative to it's parent\n\n pub transform: Transform,\n\n /// Name for debug purposes\n\n pub name: String,\n", "file_path": "src/model/joints.rs", "rank": 68, "score": 3.673655643335332 }, { "content": "\n\n fn gui_joints_window(&mut self, model: &mut Model, egui_ctx: &mut CtxRef) {\n\n self.gui_joints_window_helper(&mut model.root, &mut model.animations, egui_ctx);\n\n }\n\n\n\n /// Recursive - creates the joints window and it's nodes\n\n fn gui_joints_window_helper(\n\n &mut self,\n\n node: &mut Node,\n\n animations: &mut Animations,\n\n egui_ctx: &mut CtxRef,\n\n ) {\n\n if let Some(joints) = &mut node.joints {\n\n egui::Window::new(\"Joints\").show(egui_ctx, |ui| {\n\n egui::ScrollArea::vertical().show(ui, |ui| {\n\n for joint in joints.joints.iter_mut() {\n\n let joint_name = &joint.name;\n\n\n\n // FIXME: for some reason there is an ID collision when rendering the droid model... maybe a bug in egui ?\n\n CollapsingHeader::new(joint_name).show(ui, |ui| {\n", "file_path": "src/gui.rs", "rank": 69, "score": 3.456493479255786 }, { "content": "use glam::{Mat4, Vec2, Vec3, Vec4, Vec4Swizzles};\n\n\n\nuse crate::{\n\n model::Joint,\n\n ogl::{self, shader::Shader},\n\n};\n\n\n\n// TODO: do not create a new buffer every frame\n\n/// Draws the joints by their current world transforms\n", "file_path": "src/renderer/skeleton_mesh.rs", "rank": 70, "score": 3.4223907103706663 }, { "content": " pub fn strafe_right(&mut self, d: f32) {\n\n let dir = self.dir.cross(self.up);\n\n self.pos += dir * d * self.move_speed;\n\n self.changed = true;\n\n }\n\n\n\n /// Moves the camera sideways to the left\n\n pub fn strafe_left(&mut self, d: f32) {\n\n self.strafe_right(-d);\n\n }\n\n\n\n /// Updates the latest (x,y) mouse position\n\n pub fn set_x_y(&mut self, new_x: f32, new_y: f32) {\n\n self.current_x = new_x;\n\n self.current_y = new_y;\n\n }\n\n\n\n /// Update the (x, y) mouse position and update the azimuth and zenith\n\n pub fn adjust_look(&mut self, new_x: f32, new_y: f32) {\n\n let dx = new_x - self.current_x;\n", "file_path": "src/camera.rs", "rank": 71, "score": 3.324004282249275 }, { "content": " }\n\n\n\n /// Sets the position of the camera\n\n pub fn set_pos(&mut self, pos: Vec3) {\n\n self.pos = pos;\n\n self.changed = true;\n\n }\n\n\n\n /// Moves the camera forward\n\n pub fn move_forward(&mut self, d: f32) {\n\n self.pos += self.dir * d * self.move_speed;\n\n self.changed = true;\n\n }\n\n\n\n /// Moves the camera backward\n\n pub fn move_backward(&mut self, d: f32) {\n\n self.move_forward(-d);\n\n }\n\n\n\n /// Moves the camera sideways to the right\n", "file_path": "src/camera.rs", "rank": 72, "score": 3.28746595250739 }, { "content": " _src: u32,\n\n _typ: u32,\n\n id: u32,\n\n severity: u32,\n\n _len: i32,\n\n msg: *const i8,\n\n _user_param: *mut c_void,\n\n) {\n\n // Buffer creation on NVidia cards\n\n if id == 131185 {\n\n return;\n\n }\n\n\n\n match severity {\n\n gl::DEBUG_SEVERITY_NOTIFICATION => print!(\"OpenGL - notification: \"),\n\n gl::DEBUG_SEVERITY_LOW => print!(\"OpenGL - low: \"),\n\n gl::DEBUG_SEVERITY_MEDIUM => print!(\"OpenGL - medium: \"),\n\n gl::DEBUG_SEVERITY_HIGH => print!(\"OpenGL - high: \"),\n\n _ => unreachable!(\"Unknown severity in glDebugCallback: '{}'\", severity),\n\n }\n\n\n\n // TODO: check if the message is guaranteed to be ASCII\n\n let msg = unsafe { CStr::from_ptr(msg) };\n\n println!(\"OpenGL debug message: '{}'\", msg.to_string_lossy())\n\n}\n", "file_path": "src/ogl.rs", "rank": 73, "score": 3.2865717716898057 }, { "content": " fn gui_model_hierarchy_window(&mut self, scene: &[Model], egui_ctx: &mut CtxRef) {\n\n let model = &scene[self.selected_model];\n\n\n\n egui::Window::new(\"Model Hierarchy\")\n\n .scroll2([false, true])\n\n .resizable(true)\n\n .show(egui_ctx, |ui| {\n\n self.gui_node(&model.root, ui);\n\n });\n\n }\n\n\n\n /// Recusrive - creates the node hierarchy inside the model hierarchy window\n\n fn gui_node(&mut self, node: &Node, ui: &mut Ui) {\n\n let default_open = node.children.len() == 1;\n\n\n\n ui.horizontal(|ui| {\n\n if !&node.children.is_empty() {\n\n CollapsingHeader::new(&node.name)\n\n .id_source(node.index)\n\n .default_open(default_open)\n", "file_path": "src/gui.rs", "rank": 74, "score": 3.0981233397828216 }, { "content": " mesh: None,\n\n transform: Mat4::IDENTITY,\n\n joints: None,\n\n };\n\n\n\n Ok(Model {\n\n root,\n\n name,\n\n animations,\n\n transform: Mat4::IDENTITY,\n\n })\n\n }\n\n}\n\n\n\n/// A Node represents a subset of a gltf scene\n\n/// Nodes form a tree hierarchy\n\npub struct Node {\n\n /// The same index as in the gltf file\n\n pub index: usize,\n\n /// Name of the node\n", "file_path": "src/model.rs", "rank": 75, "score": 3.0595487772833874 }, { "content": " start_time: Instant::now(),\n\n }\n\n }\n\n AnimationControl::Loop {\n\n active_animation: _,\n\n start_time: _,\n\n } => {\n\n animations.animation_control = AnimationControl::Controllable {\n\n active_animation: i,\n\n }\n\n }\n\n };\n\n }\n\n });\n\n }\n\n }\n\n}\n", "file_path": "src/gui.rs", "rank": 76, "score": 2.9785279026727647 }, { "content": " let end = scales[start_index + 1];\n\n\n\n let interpolated = start.lerp(end, coeff);\n\n return AnimationTransform::Scale(interpolated);\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// The type of all animation transforms in the channel\n\npub enum AnimationTransforms {\n\n Translations(Vec<Vec3>),\n\n Rotations(Vec<Quat>),\n\n Scales(Vec<Vec3>),\n\n}\n\n\n\n/// The type of a single animation transform\n\npub enum AnimationTransform {\n\n Translation(Vec3),\n\n Rotation(Quat),\n\n Scale(Vec3),\n\n}\n", "file_path": "src/model/animation.rs", "rank": 77, "score": 2.9688706027645604 }, { "content": "### Result\n\n\n\nResult je také definován jako enum:\n\n\n\n```\n\nenum Result<T, E> {\n\n Ok(T),\n\n Err(E).\n\n}\n\n```\n\n\n\nPoužívá se v metodách, které buďto vrátí variantu `Ok` s ořekávaným výsledkem, nebo hodnotu `Err` s chybou. </br>\n\nS Resulty se často používá `?` syntax, který v případě chyby vrací hodnotu chybu z momentální funkce volajícímu.\n\n\n\n```\n\nfn read_username_from_file() -> Result<String, io::Error> {\n\n let mut f = File::open(\"username.txt\")?; // Pokud nastane chyba při otevření souboru, je funkce přerušena a error je vrácen volající funkci\n\n let mut s = String::new();\n\n f.read_to_string(&mut s)?;\n\n Ok(s)\n\n}\n\n```\n\n\n\n### Panic\n\n\n\nPanic značí chybu, ze které se program nedokáže zotavit a je nutné co nejdříve program ukončit.\n\n\n\n## Unsafe\n\n\n\nRust garantuje bezpečnost práce s pamětí a s více-vláknovými aplikacemi.\n\nPokud je ale potřeba low-level přístup, jako například k OpenGL, tak se musí využívat `unsafe` bloky.\n", "file_path": "RustSyntax.md", "rank": 78, "score": 2.962488465270929 }, { "content": " fn init_buffer(&self) {\n\n unsafe {\n\n gl::BufferData(\n\n gl::UNIFORM_BUFFER,\n\n 3 * size_of::<[f32; 16]>() as isize,\n\n ptr::null() as _,\n\n gl::DYNAMIC_DRAW,\n\n );\n\n }\n\n }\n\n\n\n const BINDING: u32 = 1;\n\n}\n", "file_path": "src/renderer/transforms.rs", "rank": 79, "score": 2.8871897080094024 }, { "content": " }\n\n }\n\n\n\n fn init_buffer(&self) {\n\n let size = MAX_JOINT_TRANSFORMS * size_of::<[f32; 16]>();\n\n\n\n unsafe {\n\n gl::BufferData(\n\n gl::UNIFORM_BUFFER,\n\n size as isize,\n\n ptr::null() as _,\n\n gl::DYNAMIC_DRAW,\n\n );\n\n }\n\n }\n\n\n\n const BINDING: u32 = 2;\n\n}\n", "file_path": "src/renderer/joint_transforms.rs", "rank": 80, "score": 2.739697497559275 }, { "content": "\n\n //\n\n // Uniform setters...\n\n //\n\n\n\n #[allow(unused)]\n\n pub fn set_mat4(&self, mat: Mat4, name: &str) {\n\n Self::check_inform_name(name);\n\n unsafe {\n\n let loc = gl::GetUniformLocation(self.id, name.as_ptr() as _);\n\n gl::UniformMatrix4fv(loc, 1, gl::FALSE, mat.to_cols_array().as_ptr() as _);\n\n }\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn set_mat4_arr(&self, mats: &[Mat4], name: &str) {\n\n Self::check_inform_name(name);\n\n\n\n let mats_flat: Vec<f32> = mats.iter().flat_map(|m| m.to_cols_array()).collect();\n\n\n", "file_path": "src/ogl/shader.rs", "rank": 81, "score": 2.6126749605086115 }, { "content": " unsafe {\n\n gl::BufferSubData(\n\n gl::UNIFORM_BUFFER,\n\n 0,\n\n size as isize,\n\n &num as *const i32 as _,\n\n );\n\n }\n\n }\n\n\n\n fn init_buffer(&self) {\n\n let size = size_of::<i32>();\n\n\n\n unsafe {\n\n gl::BufferData(\n\n gl::UNIFORM_BUFFER,\n\n size as isize,\n\n ptr::null() as _,\n\n gl::DYNAMIC_DRAW,\n\n );\n\n }\n\n }\n\n\n\n const BINDING: u32 = 3;\n\n}\n", "file_path": "src/renderer/settings.rs", "rank": 82, "score": 2.508988301930847 }, { "content": " }\n\n}\n\n\n\nimpl UniformBufferElement for Transforms {\n\n fn update(&self) {\n\n let buf: Vec<f32> = [self.projection, self.view, self.model]\n\n .iter()\n\n .flat_map(|mat| mat.to_cols_array())\n\n .collect();\n\n\n\n unsafe {\n\n gl::BufferSubData(\n\n gl::UNIFORM_BUFFER,\n\n 0,\n\n (buf.len() * size_of::<f32>()) as isize,\n\n buf.as_ptr() as _,\n\n );\n\n }\n\n }\n\n\n", "file_path": "src/renderer/transforms.rs", "rank": 83, "score": 2.4721427047468145 }, { "content": " unsafe {\n\n let loc = gl::GetUniformLocation(self.id, name.as_ptr() as _);\n\n gl::UniformMatrix4fv(loc, mats.len() as i32, gl::FALSE, mats_flat.as_ptr() as _);\n\n }\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn set_vec3(&self, vec: Vec3, name: &str) {\n\n Self::check_inform_name(name);\n\n unsafe {\n\n let loc = gl::GetUniformLocation(self.id, name.as_ptr() as _);\n\n gl::Uniform3f(loc, vec.x, vec.y, vec.z);\n\n }\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn set_vec4(&self, vec: Vec4, name: &str) {\n\n Self::check_inform_name(name);\n\n unsafe {\n\n let loc = gl::GetUniformLocation(self.id, name.as_ptr() as _);\n", "file_path": "src/ogl/shader.rs", "rank": 84, "score": 2.4587646537441037 }, { "content": " // skin are scaled to zero simultaneously.\n\n // ..... why is this a thing...\n\n // FIXME: scale(0, 0, 0)...\n\n\n\n Self {\n\n translation,\n\n rotation,\n\n scale,\n\n }\n\n }\n\n\n\n /// Constructs the transform combined from translation, rotation and scale\n\n pub fn matrix(&self) -> Mat4 {\n\n Mat4::from_translation(self.translation)\n\n * Mat4::from_quat(self.rotation)\n\n * Mat4::from_scale(self.scale)\n\n }\n\n}\n", "file_path": "src/model/transform.rs", "rank": 85, "score": 2.434287079318076 }, { "content": " let size = 4 * size_of::<f32>();\n\n let buf = self.light_pos.extend(0.).to_array();\n\n\n\n unsafe {\n\n gl::BufferSubData(gl::UNIFORM_BUFFER, 0, size as isize, buf.as_ptr() as _);\n\n }\n\n }\n\n\n\n fn init_buffer(&self) {\n\n let size = 4 * size_of::<f32>();\n\n\n\n unsafe {\n\n gl::BufferData(\n\n gl::UNIFORM_BUFFER,\n\n size as isize,\n\n ptr::null() as _,\n\n gl::DYNAMIC_DRAW,\n\n );\n\n }\n\n }\n\n\n\n const BINDING: u32 = 5;\n\n}\n", "file_path": "src/renderer/lighting.rs", "rank": 86, "score": 2.33426159408941 }, { "content": " fn update(&self) {\n\n let size = 4 * size_of::<f32>();\n\n let buf = self.base_color_factor.to_array();\n\n\n\n unsafe {\n\n gl::BufferSubData(gl::UNIFORM_BUFFER, 0, size as isize, buf.as_ptr() as _);\n\n }\n\n }\n\n\n\n fn init_buffer(&self) {\n\n let size = 4 * size_of::<f32>();\n\n\n\n unsafe {\n\n gl::BufferData(\n\n gl::UNIFORM_BUFFER,\n\n size as isize,\n\n ptr::null() as _,\n\n gl::DYNAMIC_DRAW,\n\n );\n\n }\n\n }\n\n\n\n const BINDING: u32 = 4;\n\n}\n", "file_path": "src/renderer/material.rs", "rank": 87, "score": 2.3141171166796415 }, { "content": "\n\nimpl UniformBufferElement for JointTransforms {\n\n fn update(&self) {\n\n if self.matrices.len() > MAX_JOINT_TRANSFORMS {\n\n todo!(\"Support models with more than 256 joints\");\n\n }\n\n\n\n let buf: Vec<f32> = self\n\n .matrices\n\n .iter()\n\n .flat_map(|mat| mat.to_cols_array())\n\n .collect();\n\n\n\n unsafe {\n\n gl::BufferSubData(\n\n gl::UNIFORM_BUFFER,\n\n 0,\n\n (buf.len() * size_of::<f32>()) as isize,\n\n buf.as_ptr() as _,\n\n );\n", "file_path": "src/renderer/joint_transforms.rs", "rank": 88, "score": 2.3029279767613886 }, { "content": " }\n\n\n\n self.texture_shader.render(|| {\n\n draw_mesh(prim.vao, prim);\n\n });\n\n }\n\n };\n\n }\n\n }\n\n\n\n /// Recalculates the skin matrices for each joint\n\n pub fn recalc_skin_matrices(\n\n &mut self,\n\n joints: &mut [Joint],\n\n outer_transform: Mat4,\n\n gui_state: &Gui,\n\n ) {\n\n self.apply_joint_transforms(joints);\n\n\n\n // world transforms of each joint after applying the animation for the current frame\n", "file_path": "src/renderer.rs", "rank": 89, "score": 2.258646764883879 }, { "content": " }\n\n }\n\n\n\n /// Renders the mesh of a node\n\n fn render_mesh(&mut self, mesh: &Mesh, node_transform: Mat4) {\n\n self.transforms.inner.model = node_transform;\n\n self.transforms.update();\n\n\n\n let draw_mesh = |vao: u32, prim: &Primitive| unsafe {\n\n gl::BindVertexArray(vao);\n\n\n\n gl::DrawElements(\n\n gl::TRIANGLES,\n\n prim.indices.len() as i32,\n\n prim.indices.gl_type(),\n\n ptr::null(),\n\n );\n\n\n\n gl::BindVertexArray(0);\n\n };\n", "file_path": "src/renderer.rs", "rank": 90, "score": 2.1822864990846105 }, { "content": " gl::BindBufferBase(gl::UNIFORM_BUFFER, binding, id);\n\n\n\n inner.init_buffer();\n\n gl::BindBuffer(gl::UNIFORM_BUFFER, 0);\n\n }\n\n\n\n let s = Self { id, inner };\n\n s.update();\n\n s\n\n }\n\n\n\n /// Update the UniformBuffer with the current data\n\n pub fn update(&self) {\n\n unsafe {\n\n gl::BindBuffer(gl::UNIFORM_BUFFER, self.id);\n\n\n\n self.inner.update();\n\n\n\n gl::BindBuffer(gl::UNIFORM_BUFFER, 0);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ogl/uniform_buffer.rs", "rank": 91, "score": 2.106646587051483 }, { "content": " .fold(0f32, |a, b| a.max(b));\n\n let animation = Animation::new(channels, 0.1, end_time, name);\n\n\n\n animations.push(animation);\n\n }\n\n\n\n Ok(Animations {\n\n animations,\n\n animation_control: AnimationControl::Static,\n\n })\n\n }\n\n\n\n /// <https://www.khronos.org/registry/glTF/specs/2.0/glTF-2.0.html#animations>\n\n /// Implementations MUST use following equations to decode real floating-point\n\n /// value f from a normalized integer c and vise-versa:\n\n ///\n\n /// accessor.componentType int-to-float float-to-int\n\n /// ------------------------------------------------------------------------------\n\n /// signed byte f = max(c / 127.0, -1.0) c = round(f * 127.0)\n\n /// unsigned byte f = c / 255.0 c = round(f * 255.0)\n", "file_path": "src/model/animation.rs", "rank": 92, "score": 2.002977658553355 }, { "content": " Self::show_joint_transforms(joint, animations, ui);\n\n });\n\n }\n\n });\n\n });\n\n } else {\n\n // I assume there is only 1 skeleton in the models we are going to work with\n\n for child_node in &mut node.children {\n\n self.gui_joints_window_helper(child_node, animations, egui_ctx);\n\n }\n\n }\n\n }\n\n\n\n /// Creates the gui for transforms of a specific joint\n\n fn show_joint_transforms(joint: &mut Joint, animations: &mut Animations, ui: &mut Ui) {\n\n let trans = &mut joint.transform.translation;\n\n let (axis, angle) = joint.transform.rotation.to_axis_angle();\n\n let mut angle = angle.to_degrees();\n\n\n\n let response = ui.group(|ui| {\n", "file_path": "src/gui.rs", "rank": 93, "score": 1.9807578209077374 }, { "content": " // Calculate current time inside the animation\n\n let mut since_start = Instant::now().duration_since(start_time).as_secs_f32();\n\n if since_start > anim.end_time {\n\n since_start %= anim.end_time;\n\n }\n\n\n\n anim.current_time = since_start;\n\n active_animation\n\n }\n\n AnimationControl::Controllable { active_animation } => active_animation,\n\n AnimationControl::Static => return,\n\n };\n\n\n\n self.node_animation_transforms.clear();\n\n let anim = &model.animations.animations[active_animation];\n\n let current_time = anim.current_time;\n\n\n\n // Interpolate the animation transforms\n\n for channel in &anim.channels {\n\n let keyframe_times = &channel.keyframe_times;\n", "file_path": "src/renderer.rs", "rank": 94, "score": 1.963668502978309 }, { "content": " transforms,\n\n interpolation_type,\n\n }\n\n }\n\n\n\n /// Get a transform at a specific index of (keyframe_times - transforms)\n\n pub fn get_fixed_transform(&self, index: usize) -> AnimationTransform {\n\n match self.interpolation_type {\n\n Interpolation::Linear => {}\n\n Interpolation::Step => todo!(\"Step interpolation\"),\n\n Interpolation::CubicSpline => todo!(\"Cubic spline interpolation\"),\n\n }\n\n\n\n match &self.transforms {\n\n AnimationTransforms::Translations(trans) => {\n\n AnimationTransform::Translation(trans[index])\n\n }\n\n AnimationTransforms::Rotations(rotations) => {\n\n AnimationTransform::Rotation(rotations[index])\n\n }\n", "file_path": "src/model/animation.rs", "rank": 95, "score": 1.9431835309540508 }, { "content": " scene: &gltf::Scene,\n\n ) -> Result<Self> {\n\n let joint_indices: Vec<usize> = skin.joints().map(|j| j.index()).collect();\n\n\n\n let mut joints = Vec::new();\n\n\n\n let reader = skin.reader(|buf| Some(&bundle.buffers[buf.index()]));\n\n let inverse_bind_matrices = match reader.read_inverse_bind_matrices() {\n\n Some(matrices) => matrices.map(|m| Mat4::from_cols_array_2d(&m)).collect(),\n\n None => vec![Mat4::IDENTITY; joints.len()],\n\n };\n\n\n\n // TODO: not great performance-wise\n\n let children: Vec<gltf::Node> = scene.nodes().collect();\n\n\n\n Self::build_hierarchy(\n\n &children,\n\n &joint_indices,\n\n None,\n\n &mut joints,\n", "file_path": "src/model/joints.rs", "rank": 96, "score": 1.9176905214864566 }, { "content": " Mat4::from_cols_array_2d(&mat).to_scale_rotation_translation()\n\n }\n\n GTransform::Decomposed {\n\n translation,\n\n rotation,\n\n scale,\n\n } => {\n\n let translation = Vec3::from(translation);\n\n let scale = Vec3::from(scale);\n\n let rotation = Quat::from_array(rotation);\n\n\n\n (translation, rotation, scale)\n\n }\n\n };\n\n\n\n // https://www.khronos.org/registry/glTF/specs/2.0/glTF-2.0.html#transformations\n\n // When the scale is zero on all three axes (by node transform or by animated scale),\n\n // implementations are free to optimize away rendering of the node’s mesh, and all of\n\n // the node’s children’s meshes. This provides a mechanism to animate visibility.\n\n // Skinned meshes must not use this optimization unless all of the joints in the\n", "file_path": "src/model/transform.rs", "rank": 97, "score": 1.9034515294128105 }, { "content": " }\n\n\n\n let normals = reader\n\n .read_normals()\n\n .ok_or(eyre!(\"primitive doesn't containt normals\"))?\n\n .map(Vec3::from)\n\n .collect();\n\n\n\n let skin = match (reader.read_joints(0), reader.read_weights(0)) {\n\n (Some(joints), Some(weights)) => {\n\n let joints = joints.into_u16().map(|j| j.map(|ji| ji as u32)).collect();\n\n // TODO: u8 / u16 joint weights normalization\n\n match weights {\n\n gltf::mesh::util::ReadWeights::U8(_) => todo!(\"U8 weights\"),\n\n gltf::mesh::util::ReadWeights::U16(_) => todo!(\"U16 weights\"),\n\n _ => {}\n\n }\n\n let weights = weights.into_f32().collect();\n\n\n\n Some(PrimSkin::new(joints, weights))\n", "file_path": "src/model/mesh.rs", "rank": 98, "score": 1.9034515294128105 }, { "content": " AnimationTransforms::Scales(scales) => AnimationTransform::Scale(scales[index]),\n\n }\n\n }\n\n\n\n /// Get an interpolated transform between (keyframe_times - transforms)[start_index..start_index + 1]\n\n /// interpolated by the 'coeff' coefficient\n\n /// <https://www.khronos.org/registry/glTF/specs/2.0/glTF-2.0.html#appendix-c-interpolation>\n\n pub fn interpolate_transforms(\n\n &self,\n\n start_index: usize, // end index is always start_index + 1\n\n coeff: f32,\n\n ) -> AnimationTransform {\n\n match self.interpolation_type {\n\n Interpolation::Linear => {}\n\n Interpolation::Step => todo!(\"Step interpolation\"),\n\n Interpolation::CubicSpline => todo!(\"Cubic spline interpolation\"),\n\n }\n\n\n\n match &self.transforms {\n\n AnimationTransforms::Translations(trans) => {\n", "file_path": "src/model/animation.rs", "rank": 99, "score": 1.7167301145758818 } ]
Rust
rmqtt-plugins/rmqtt-plugin-template/src/lib.rs
phial3/rmqtt
8c29529e273007178fd0af73dccb6b0bf6729339
use async_trait::async_trait; use rmqtt::{ broker::hook::{Handler, HookResult, Parameter, Register, ReturnType, Type}, plugin::{DynPlugin, DynPluginResult, Plugin}, Result, Runtime, }; #[inline] pub async fn register( runtime: &'static Runtime, name: &'static str, descr: &'static str, default_startup: bool, immutable: bool, ) -> Result<()> { runtime .plugins .register(name, default_startup, immutable, move || -> DynPluginResult { Box::pin(async move { Template::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) }) }) }) .await?; Ok(()) } struct Template { _runtime: &'static Runtime, name: String, descr: String, register: Box<dyn Register>, } impl Template { #[inline] async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> { let register = runtime.extends.hook_mgr().await.register(); Ok(Self { _runtime: runtime, name: name.into(), descr: descr.into(), register }) } } #[async_trait] impl Plugin for Template { #[inline] async fn init(&mut self) -> Result<()> { log::debug!("{} init", self.name); self.register.add(Type::ClientConnack, Box::new(HookHandler::new())).await; self.register.add(Type::ClientSubscribe, Box::new(HookHandler::new())).await; self.register.add(Type::ClientUnsubscribe, Box::new(HookHandler::new())).await; self.register.add(Type::MessageDelivered, Box::new(HookHandler::new())).await; self.register.add(Type::MessagePublish, Box::new(HookHandler::new())).await; self.register.add_priority(Type::ClientSubscribeCheckAcl, 10, Box::new(HookHandler::new())).await; self.register.add_priority(Type::GrpcMessageReceived, 10, Box::new(HookHandler::new())).await; Ok(()) } #[inline] fn name(&self) -> &str { &self.name } #[inline] async fn start(&mut self) -> Result<()> { log::info!("{} start", self.name); self.register.start().await; Ok(()) } #[inline] async fn stop(&mut self) -> Result<bool> { log::info!("{} stop", self.name); self.register.stop().await; Ok(true) } #[inline] fn version(&self) -> &str { "0.1.1" } #[inline] fn descr(&self) -> &str { &self.descr } } struct HookHandler {} impl HookHandler { fn new() -> Self { Self {} } } #[async_trait] impl Handler for HookHandler { async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType { match param { Parameter::ClientConnack(connect_info, r) => { log::debug!("client connack, {:?}, {:?}", connect_info, r); } Parameter::ClientSubscribe(_session, c, subscribe) => { log::debug!("{:?} client subscribe, {:?}", c.id, subscribe); } Parameter::ClientUnsubscribe(_session, c, unsubscribe) => { log::debug!("{:?} client unsubscribe, {:?}", c.id, unsubscribe); } Parameter::MessagePublish(_session, c, publish) => { log::debug!("{:?} message publish, {:?}", c.id, publish); } Parameter::MessageDelivered(_session, c, from, _publish) => { log::debug!("{:?} MessageDelivered, {:?}", c.id, from); } Parameter::ClientSubscribeCheckAcl(_s, _c, subscribe) => { log::debug!("{:?} ClientSubscribeCheckAcl, {:?}", _c.id, subscribe); } _ => { log::error!("unimplemented, {:?}", param) } } (true, acc) } }
use async_trait::async_trait; use rmqtt::{ broker::hook::{Handler, HookResult, Parameter, Register, ReturnType, Type}, plugin::{DynPlugin, DynPluginResult, Plugin}, Result, Runtime, }; #[inline] pub async fn register( runtime: &'static Runtime, name: &'static str, descr: &'static str, default_startup: bool, immutable: bool, ) -> Result<()> { runtime .plugins .register(name, default_startup, immutable, move || -> DynPluginResult { Box::pin(async move { Template::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) }) }) }) .await?; Ok(()) } struct Template { _runtime: &'static Runtime, name: String, descr: String, register: Box<dyn Register>, } impl Template { #[inline] async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> { let register = runtime.extends.hook_mgr().await.register(); Ok(Self { _runtime: runtime, name: name.into(), descr: descr.into(), register }) } } #[async_trait] impl Plugin for Template { #[inline] async fn init(&mut self) -> Result<()> { log::debug!("{} init", self.name); self.register.add(Type::ClientConnack, Box::new(HookHandler::new())).await; self.register.add(Type::ClientSubscribe, Box::new(HookHandler::new())).await; self.register.add(Type::ClientUnsubscribe, Box::new(HookHandler::new())).await; self.register.add(Type::MessageDelivered, Box::new(HookHandler::new())).await; self.register.add(Type::MessagePublish, Box::new(HookHandler::new())).await; self.register.add_priority(Type::ClientSubscribeCheckAcl, 10, Box::new(HookHandler::new())).await; self.register.add_priority(Type::GrpcMessageReceived, 10, Box::new(HookHandler::new())).await; Ok(()) } #[inline] fn name(&self) -> &str { &self.name } #[inline] async fn start(&mut self) -> Result<()> { log::info!("{} start", self.name); self.register.start().await; Ok(()) } #[inline] async fn stop(&mut self) -> Result<bool> { log::info!("{} stop", self.name); self.register.stop().await; Ok(true) } #[inline] fn version(&self) -> &str { "0.1.1" } #[inline] fn descr(&self) -> &str { &self.descr } } struct HookHandler {} impl HookHandler { fn new() -> Self { Self {} } } #[async_trait] impl Handler for HookHandler { async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType { match param { Parameter::ClientC
} Parameter::MessagePublish(_session, c, publish) => { log::debug!("{:?} message publish, {:?}", c.id, publish); } Parameter::MessageDelivered(_session, c, from, _publish) => { log::debug!("{:?} MessageDelivered, {:?}", c.id, from); } Parameter::ClientSubscribeCheckAcl(_s, _c, subscribe) => { log::debug!("{:?} ClientSubscribeCheckAcl, {:?}", _c.id, subscribe); } _ => { log::error!("unimplemented, {:?}", param) } } (true, acc) } }
onnack(connect_info, r) => { log::debug!("client connack, {:?}, {:?}", connect_info, r); } Parameter::ClientSubscribe(_session, c, subscribe) => { log::debug!("{:?} client subscribe, {:?}", c.id, subscribe); } Parameter::ClientUnsubscribe(_session, c, unsubscribe) => { log::debug!("{:?} client unsubscribe, {:?}", c.id, unsubscribe);
function_block-random_span
[ { "content": "pub trait PluginFn: 'static + Sync + Send + Fn() -> BoxFuture<Result<DynPlugin>> {}\n\n\n\nimpl<T> PluginFn for T where T: 'static + Sync + Send + ?Sized + Fn() -> BoxFuture<Result<DynPlugin>> {}\n\n\n\npub type DynPluginResult = BoxFuture<Result<DynPlugin>>;\n\npub type DynPlugin = Box<dyn Plugin>;\n\npub type DynPluginFn = Box<dyn PluginFn>;\n\n\n\npub struct Entry {\n\n inited: bool,\n\n active: bool,\n\n //will reject start, stop, and load config operations\n\n immutable: bool,\n\n plugin: Option<DynPlugin>,\n\n plugin_f: Option<DynPluginFn>,\n\n}\n\n\n\nimpl Entry {\n\n #[inline]\n\n pub fn inited(&self) -> bool {\n", "file_path": "rmqtt/src/plugin.rs", "rank": 0, "score": 272924.6579060672 }, { "content": "#[inline]\n\npub fn is_busy() -> bool {\n\n #[inline]\n\n fn _is_busy() -> bool {\n\n let busies = ACTIVE_COUNTS\n\n .get()\n\n .map(|m| {\n\n m.iter()\n\n .group_by(|item| (item.key().0, item.value().1))\n\n .into_iter()\n\n .map(|(k, g)| {\n\n (\n\n k,\n\n g.map(|item| {\n\n let (c, _) = item.value();\n\n *c\n\n })\n\n .sum::<isize>(),\n\n )\n\n })\n\n .filter_map(|((_, busy_limit), c)| if c > busy_limit { Some(1) } else { None })\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 2, "score": 202462.10699895275 }, { "content": "#[inline]\n\nfn format_timestamp(t: i64) -> String {\n\n if t <= 0 {\n\n \"\".into()\n\n } else {\n\n use chrono::TimeZone;\n\n if let LocalResult::Single(t) = chrono::Local.timestamp_opt(t, 0) {\n\n t.format(\"%Y-%m-%d %H:%M:%S\").to_string()\n\n } else {\n\n \"\".into()\n\n }\n\n }\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/types.rs", "rank": 3, "score": 196306.07417785923 }, { "content": "#[inline]\n\npub fn serialize_datetime_option<S>(t: &Option<Duration>, s: S) -> std::result::Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n if let Some(t) = t {\n\n t.as_secs().to_string().serialize(s)\n\n } else {\n\n \"\".serialize(s)\n\n }\n\n}\n\n\n\n#[derive(Clone, Serialize)]\n\npub struct NodeAddr {\n\n pub id: NodeId,\n\n pub addr: Addr,\n\n}\n\n\n\nimpl std::fmt::Debug for NodeAddr {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}@{:?}\", self.id, self.addr)\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 4, "score": 188860.5981579766 }, { "content": "#[inline]\n\npub fn to_uptime(uptime: i64) -> String {\n\n let uptime_secs = uptime % 60;\n\n let uptime = uptime / 60;\n\n let uptime_minus = uptime % 60;\n\n let uptime = uptime / 60;\n\n let uptime_hours = uptime % 24;\n\n let uptime_days = uptime / 24;\n\n format!(\"{} days {} hours, {} minutes, {} seconds\", uptime_days, uptime_hours, uptime_minus, uptime_secs)\n\n}\n", "file_path": "rmqtt/src/node.rs", "rank": 5, "score": 186903.92463507495 }, { "content": "pub trait OnEventFn: 'static + Sync + Send + Fn() {}\n\nimpl<T> OnEventFn for T where T: 'static + Sync + Send + Clone + ?Sized + Fn() {}\n\n\n\n#[derive(Clone)]\n\npub struct Sender<T> {\n\n tx: mpsc::Sender<()>,\n\n queue: Arc<Queue<T>>,\n\n policy_fn: Rc<dyn PolicyFn<T>>,\n\n}\n\n\n\nimpl<T> Sender<T> {\n\n #[inline]\n\n pub async fn close(&mut self) -> Result<()> {\n\n self.tx.close().await?;\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n pub fn len(&self) -> usize {\n\n self.queue.len()\n", "file_path": "rmqtt/src/broker/queue.rs", "rank": 6, "score": 184021.84772363934 }, { "content": "fn open_file(filename: &str) -> Result<File> {\n\n OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .append(true)\n\n .open(filename)\n\n .map_err(|e| MqttError::from(format!(\"logger file config error, filename: {}, {:?}\", filename, e)))\n\n}\n", "file_path": "rmqtt/src/logger.rs", "rank": 7, "score": 184017.5477397069 }, { "content": "#[inline]\n\nfn timestamp_parse_from_str(ts: &str, fmt: &str) -> anyhow::Result<i64> {\n\n let ndt = chrono::NaiveDateTime::parse_from_str(ts, fmt)?;\n\n let ndt = ndt.and_local_timezone(*chrono::Local::now().offset());\n\n match ndt {\n\n LocalResult::None => Err(anyhow::Error::msg(\"Impossible\")),\n\n LocalResult::Single(d) => Ok(d.timestamp()),\n\n LocalResult::Ambiguous(d, _tz) => Ok(d.timestamp()),\n\n }\n\n}\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 8, "score": 183716.01689933013 }, { "content": "type HandlerId = String;\n\n\n\n//#[derive(Clone)]\n\npub struct DefaultHookManager {\n\n #[allow(clippy::type_complexity)]\n\n handlers: Arc<DashMap<Type, Arc<sync::RwLock<BTreeMap<(Priority, HandlerId), HookEntry>>>>>,\n\n}\n\n\n\nimpl DefaultHookManager {\n\n #[inline]\n\n pub fn instance() -> &'static DefaultHookManager {\n\n static INSTANCE: OnceCell<DefaultHookManager> = OnceCell::new();\n\n INSTANCE.get_or_init(|| Self { handlers: Arc::new(DashMap::default()) })\n\n }\n\n\n\n #[inline]\n\n async fn add(&self, typ: Type, priority: Priority, handler: Box<dyn Handler>) -> Result<HandlerId> {\n\n let id = Uuid::new_v4().as_simple().encode_lower(&mut Uuid::encode_buffer()).to_string();\n\n let type_handlers =\n\n self.handlers.entry(typ).or_insert(Arc::new(sync::RwLock::new(BTreeMap::default())));\n", "file_path": "rmqtt/src/broker/default.rs", "rank": 9, "score": 183318.5854244445 }, { "content": "#[inline]\n\npub fn to_duration(text: &str) -> Duration {\n\n let text = text.to_lowercase().replace(\"ms\", \"Y\");\n\n let ms: u64 = text\n\n .split_inclusive(|x| x == 's' || x == 'm' || x == 'h' || x == 'd' || x == 'w' || x == 'f' || x == 'Y')\n\n .map(|x| {\n\n let mut chars = x.chars();\n\n let u = match chars.nth_back(0) {\n\n None => return 0,\n\n Some(u) => u,\n\n };\n\n let v = match chars.as_str().parse::<u64>() {\n\n Err(_e) => return 0,\n\n Ok(v) => v,\n\n };\n\n match u {\n\n 'Y' => v,\n\n 's' => v * 1000,\n\n 'm' => v * 60000,\n\n 'h' => v * 3600000,\n\n 'd' => v * 86400000,\n\n 'w' => v * 604800000,\n\n 'f' => v * 1209600000,\n\n _ => 0,\n\n }\n\n })\n\n .sum();\n\n Duration::from_millis(ms)\n\n}\n\n\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 10, "score": 183296.16811620517 }, { "content": "#[inline]\n\npub fn to_bytesize(text: &str) -> usize {\n\n let text = text.to_uppercase().replace(\"GB\", \"G\").replace(\"MB\", \"M\").replace(\"KB\", \"K\");\n\n text.split_inclusive(|x| x == 'G' || x == 'M' || x == 'K' || x == 'B')\n\n .map(|x| {\n\n let mut chars = x.chars();\n\n let u = match chars.nth_back(0) {\n\n None => return 0,\n\n Some(u) => u,\n\n };\n\n let v = match chars.as_str().parse::<usize>() {\n\n Err(_e) => return 0,\n\n Ok(v) => v,\n\n };\n\n match u {\n\n 'B' => v,\n\n 'K' => v * BYTESIZE_K,\n\n 'M' => v * BYTESIZE_M,\n\n 'G' => v * BYTESIZE_G,\n\n _ => 0,\n\n }\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 11, "score": 183296.16811620517 }, { "content": "#[inline]\n\npub fn parse_topic_filter(\n\n topic_filter: &ByteString,\n\n shared_subscription_supported: bool,\n\n) -> Result<(TopicFilter, Option<SharedGroup>)> {\n\n let mut shared_group = None;\n\n let err = MqttError::TopicError(\"Illegal topic filter\".into());\n\n //$share/abc/\n\n let topic = if shared_subscription_supported {\n\n let mut levels = topic_filter.splitn(3, '/').collect::<Vec<_>>();\n\n let is_share = levels.first().map(|f| *f == \"$share\").unwrap_or(false);\n\n if is_share {\n\n if levels.len() < 3 {\n\n return Err(err);\n\n }\n\n levels.remove(0);\n\n shared_group = Some(SharedGroup::from(levels.remove(0)));\n\n ByteString::from(levels.remove(0))\n\n } else {\n\n topic_filter.clone()\n\n }\n", "file_path": "rmqtt/src/broker/types.rs", "rank": 13, "score": 178295.0420507327 }, { "content": "pub trait PolicyFn<P>: 'static + Fn(&P) -> Policy {}\n\n\n\nimpl<T, P> PolicyFn<P> for T where T: 'static + Clone + ?Sized + Fn(&P) -> Policy {}\n\n\n", "file_path": "rmqtt/src/broker/queue.rs", "rank": 14, "score": 177159.74269483463 }, { "content": "fn filtering(q: &SearchParams, entry: &dyn Entry) -> bool {\n\n let s = if let Some(s) = entry.session() {\n\n s\n\n } else {\n\n return false;\n\n };\n\n\n\n let c = if let Some(c) = entry.client() {\n\n c\n\n } else {\n\n return false;\n\n };\n\n\n\n if let Some(clientid) = &q.clientid {\n\n if clientid.as_bytes() != s.id.client_id.as_bytes() {\n\n return false;\n\n }\n\n }\n\n\n\n if let Some(username) = &q.username {\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/clients.rs", "rank": 15, "score": 176710.47597630715 }, { "content": "/// Initializes a logger using `slog` and `slog_scope`.\n\n///\n\n/// This function creates a `GlobalLoggerGuard` and sets the global logger to the `logger` passed\n\n/// in the `Runtime` instance. It also initializes `slog_stdlog` with the log level specified in\n\n/// the `Runtime` settings.\n\npub fn logger_init() -> GlobalLoggerGuard {\n\n let level = slog_log_to_level(Runtime::instance().settings.log.level.inner());\n\n let logger = Runtime::instance().logger.clone();\n\n // Make sure to save the guard, see documentation for more information\n\n let guard = slog_scope::set_global_logger(logger.clone());\n\n // register slog_stdlog as the log handler with the log crate\n\n slog_stdlog::init_with_level(level).unwrap();\n\n guard\n\n}\n\n\n", "file_path": "rmqtt/src/logger.rs", "rank": 16, "score": 170364.62221008242 }, { "content": "type Headers = (Option<ContentType>, HeaderMap, HashMap<String, String>);\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct Req {\n\n #[serde(serialize_with = \"Req::serialize_url\", deserialize_with = \"Req::deserialize_url\")]\n\n pub url: Url,\n\n #[serde(serialize_with = \"Req::serialize_method\", deserialize_with = \"Req::deserialize_method\")]\n\n pub method: Method,\n\n #[serde(\n\n default,\n\n serialize_with = \"Req::serialize_headers\",\n\n deserialize_with = \"Req::deserialize_headers\"\n\n )]\n\n pub headers: Headers,\n\n pub params: HashMap<String, String>,\n\n}\n\n\n\nimpl Req {\n\n pub fn is_get(&self) -> bool {\n\n self.method == Method::GET\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/config.rs", "rank": 17, "score": 168908.20464222773 }, { "content": "struct RetainHandler {\n\n retainer: &'static Retainer,\n\n _cfg: Arc<RwLock<PluginConfig>>,\n\n message_type: MessageType,\n\n}\n\n\n\nimpl RetainHandler {\n\n fn new(retainer: &'static Retainer, cfg: &Arc<RwLock<PluginConfig>>, message_type: MessageType) -> Self {\n\n Self { retainer, _cfg: cfg.clone(), message_type }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for RetainHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::GrpcMessageReceived(typ, msg) => {\n\n log::debug!(\"GrpcMessageReceived, type: {}, msg: {:?}\", typ, msg);\n\n if self.message_type != *typ {\n\n return (true, acc);\n", "file_path": "rmqtt-plugins/rmqtt-retainer/src/lib.rs", "rank": 18, "score": 157649.56163672946 }, { "content": "struct AclHandler {\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n}\n\n\n\nimpl AclHandler {\n\n fn new(cfg: &Arc<RwLock<PluginConfig>>) -> Self {\n\n Self { cfg: cfg.clone() }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for AclHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::ClientConnected(_session, client) => {\n\n let cfg = self.cfg.clone();\n\n let client_id = client.id.client_id.clone();\n\n let username = client.connect_info.username().cloned();\n\n let build_placeholders = async move {\n\n for rule in cfg.read().await.rules() {\n", "file_path": "rmqtt-plugins/rmqtt-acl/src/lib.rs", "rank": 19, "score": 157649.56163672946 }, { "content": "struct CounterHandler {\n\n metrics: &'static Metrics,\n\n}\n\n\n\nimpl CounterHandler {\n\n fn new() -> Self {\n\n Self { metrics: Metrics::instance() }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for CounterHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::ClientConnect(connect_info) => {\n\n self.metrics.client_connect_inc();\n\n if connect_info.username().is_none() {\n\n self.metrics.client_auth_anonymous_inc();\n\n }\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-counter/src/lib.rs", "rank": 20, "score": 157649.56163672946 }, { "content": "/// Creates a new `slog::Logger` with two `Drain`s: one for printing to the console and another for\n\n/// printing to a file.\n\n///\n\n/// This function takes three arguments: `filename`, which specifies the name of the file to print\n\n/// to; `to`, which specifies where to print the logs (either the console or a file); and `level`,\n\n/// which specifies the minimum log level to print. The function sets the format for the logs and\n\n/// creates the two `Drain`s using the provided parameters. It then combines the two `Drain`s using a\n\n/// `Tee` and returns the resulting `Logger`.\n\npub fn config_logger(filename: String, to: To, level: Level) -> slog::Logger {\n\n let custom_timestamp =\n\n |io: &mut dyn io::Write| write!(io, \"{}\", chrono::Local::now().format(\"%Y-%m-%d %H:%M:%S%.3f\"));\n\n\n\n let print_msg_header = |fn_timestamp: &dyn ThreadSafeTimestampFn<Output = io::Result<()>>,\n\n mut rd: &mut dyn RecordDecorator,\n\n record: &Record,\n\n _use_file_location: bool|\n\n -> io::Result<bool> {\n\n rd.start_timestamp()?;\n\n fn_timestamp(&mut rd)?;\n\n\n\n rd.start_whitespace()?;\n\n write!(rd, \" \")?;\n\n\n\n rd.start_level()?;\n\n write!(rd, \"{}\", record.level().as_short_str())?;\n\n\n\n rd.start_location()?;\n\n if record.function().is_empty() {\n", "file_path": "rmqtt/src/logger.rs", "rank": 21, "score": 156197.23277060426 }, { "content": "type TopicsType = Option<(Arc<TopicTree<()>>, Vec<String>)>;\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct Rule {\n\n pub action: String,\n\n #[serde(default)]\n\n pub urls: Vec<Url>,\n\n #[serde(\n\n default,\n\n deserialize_with = \"Rule::deserialize_topics\",\n\n serialize_with = \"Rule::serialize_topics\"\n\n )]\n\n pub topics: TopicsType,\n\n}\n\n\n\nimpl Rule {\n\n fn serialize_topics<S>(topics: &TopicsType, s: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: ser::Serializer,\n\n {\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/config.rs", "rank": 22, "score": 155311.02447641248 }, { "content": "struct AuthHandler {\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n}\n\n\n\nimpl AuthHandler {\n\n fn new(cfg: &Arc<RwLock<PluginConfig>>) -> Self {\n\n Self { cfg: cfg.clone() }\n\n }\n\n\n\n async fn response_result(resp: Response) -> Result<(ResponseResult, Superuser, Cacheable)> {\n\n if resp.status().is_success() {\n\n let superuser = resp.headers().contains_key(SUPERUSER);\n\n let cache_timeout = if let Some(tm) = resp.headers().get(CACHEABLE).and_then(|v| v.to_str().ok())\n\n {\n\n match tm.parse::<i64>() {\n\n Ok(tm) => Some(tm),\n\n Err(e) => {\n\n log::warn!(\"Parse X-Cache error, {:?}\", e);\n\n None\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/lib.rs", "rank": 23, "score": 154101.1084645624 }, { "content": "#[inline]\n\npub fn deserialize_datetime_option<'de, D>(deserializer: D) -> std::result::Result<Option<Duration>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let t_str = String::deserialize(deserializer)?;\n\n if t_str.is_empty() {\n\n Ok(None)\n\n } else {\n\n let t = if let Ok(d) = timestamp_parse_from_str(&t_str, \"%Y-%m-%d %H:%M:%S\") {\n\n Duration::from_secs(d as u64)\n\n } else {\n\n let d = t_str.parse::<u64>().map_err(serde::de::Error::custom)?;\n\n Duration::from_secs(d)\n\n };\n\n Ok(Some(t))\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 24, "score": 151885.92384111808 }, { "content": "struct SystemTopicHandler {\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n // message_type: MessageType,\n\n nodeid: NodeId,\n\n}\n\n\n\nimpl SystemTopicHandler {\n\n fn new(cfg: &Arc<RwLock<PluginConfig>>) -> Self {\n\n let nodeid = Runtime::instance().node.id();\n\n Self { cfg: cfg.clone(), nodeid }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for SystemTopicHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n log::debug!(\"param: {:?}, acc: {:?}\", param, acc);\n\n let now = chrono::Local::now();\n\n let now_time = now.format(\"%Y-%m-%d %H:%M:%S%.3f\").to_string();\n\n if let Some((topic, payload)) = match param {\n", "file_path": "rmqtt-plugins/rmqtt-sys-topic/src/lib.rs", "rank": 25, "score": 150746.62182515132 }, { "content": "struct WebHookHandler {\n\n tx: Arc<RwLock<Sender<Message>>>,\n\n chan_queue_count: Arc<AtomicIsize>,\n\n}\n\n\n\nimpl WebHookHandler {\n\n async fn handle(\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n writers: HookWriters,\n\n backoff_strategy: Arc<ExponentialBackoff>,\n\n typ: hook::Type,\n\n topic: Option<TopicFilter>,\n\n body: serde_json::Value,\n\n ) -> Result<()> {\n\n let topic = if let Some(topic) = topic { Some(Topic::from_str(&topic)?) } else { None };\n\n let hook_writes = {\n\n let cfg = cfg.read().await;\n\n if let Some(rules) = cfg.rules.get(&typ) {\n\n //get action and urls\n\n let action_urls = rules.iter().filter_map(|r| {\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 26, "score": 150746.62182515132 }, { "content": "#[inline]\n\npub fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let v = String::deserialize(deserializer)?;\n\n Ok(to_duration(&v))\n\n}\n\n\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 27, "score": 147721.4997263193 }, { "content": "#[inline]\n\npub fn deserialize_addr<'de, D>(deserializer: D) -> Result<SocketAddr, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let addr = String::deserialize(deserializer)?\n\n .parse::<std::net::SocketAddr>()\n\n .map_err(serde::de::Error::custom)?;\n\n Ok(addr)\n\n}\n\n\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 28, "score": 145194.9026895504 }, { "content": "fn route(cfg: PluginConfigType) -> Router {\n\n Router::with_path(\"api/v1\")\n\n .hoop(affix::inject(cfg))\n\n .hoop(api_logger)\n\n .get(list_apis)\n\n .push(Router::with_path(\"brokers\").get(get_brokers).push(Router::with_path(\"<id>\").get(get_brokers)))\n\n .push(Router::with_path(\"nodes\").get(get_nodes).push(Router::with_path(\"<id>\").get(get_nodes)))\n\n .push(Router::with_path(\"health/check\").get(check_health))\n\n .push(\n\n Router::with_path(\"clients\").get(search_clients).push(\n\n Router::with_path(\"<clientid>\")\n\n .get(get_client)\n\n .delete(kick_client)\n\n .push(Router::with_path(\"online\").get(check_online)),\n\n ),\n\n )\n\n .push(\n\n Router::with_path(\"subscriptions\")\n\n .get(query_subscriptions)\n\n .push(Router::with_path(\"<clientid>\").get(get_client_subscriptions)),\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/api.rs", "rank": 29, "score": 143968.60715750075 }, { "content": "#[async_trait]\n\npub trait Plugin: Send + Sync {\n\n #[inline]\n\n async fn init(&mut self) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn name(&self) -> &str {\n\n \"\"\n\n }\n\n\n\n #[inline]\n\n async fn get_config(&self) -> Result<serde_json::Value> {\n\n Ok(json!({}))\n\n }\n\n\n\n #[inline]\n\n async fn load_config(&mut self) -> Result<()> {\n\n Err(MqttError::from(\"unimplemented!\"))\n\n }\n", "file_path": "rmqtt/src/plugin.rs", "rank": 30, "score": 143014.74749873282 }, { "content": "#[inline]\n\npub fn deserialize_duration_option<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let v = String::deserialize(deserializer)?;\n\n if v.is_empty() {\n\n Ok(None)\n\n } else {\n\n Ok(Some(to_duration(&v)))\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 31, "score": 140472.05942925622 }, { "content": "#[inline]\n\npub fn get_rate() -> f64 {\n\n RATES.get().map(|m| m.iter().map(|entry| *entry.value()).sum::<f64>()).unwrap_or_default()\n\n}\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 32, "score": 135311.59653488168 }, { "content": "#[inline]\n\npub fn get_active_count() -> isize {\n\n ACTIVE_COUNTS\n\n .get()\n\n .map(|m| {\n\n m.iter()\n\n .map(|item| {\n\n let (c, _) = item.value();\n\n *c\n\n })\n\n .sum()\n\n })\n\n .unwrap_or_default()\n\n}\n\n\n\nstatic RATES: OnceCell<DashMap<(Port, ThreadId), f64>> = OnceCell::new();\n\n\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 33, "score": 132759.1610093575 }, { "content": "pub fn active_grpc_requests() -> isize {\n\n ACTIVE_REQUEST_COUNT.load(Ordering::SeqCst)\n\n}\n", "file_path": "rmqtt/src/grpc/server.rs", "rank": 34, "score": 132754.11395884375 }, { "content": "#[async_trait]\n\npub trait Handler: Sync + Send {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType;\n\n}\n\n\n", "file_path": "rmqtt/src/broker/hook.rs", "rank": 35, "score": 132292.17990123978 }, { "content": "struct RetainerPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n retainer: &'static Retainer,\n\n}\n\n\n\nimpl RetainerPlugin {\n\n #[inline]\n\n async fn new<N: Into<String>, D: Into<String>>(\n\n runtime: &'static Runtime,\n\n name: N,\n\n descr: D,\n\n ) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = runtime.settings.plugins.load_config::<PluginConfig>(&name)?;\n\n log::info!(\"{} RetainerPlugin cfg: {:?}\", name, cfg);\n\n let register = runtime.extends.hook_mgr().await.register();\n", "file_path": "rmqtt-plugins/rmqtt-retainer/src/lib.rs", "rank": 36, "score": 129842.20650916093 }, { "content": "struct CounterPlugin {\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n}\n\n\n\nimpl CounterPlugin {\n\n #[inline]\n\n async fn new<N: Into<String>, D: Into<String>>(\n\n runtime: &'static Runtime,\n\n name: N,\n\n descr: D,\n\n ) -> Result<Self> {\n\n let name = name.into();\n\n let register = runtime.extends.hook_mgr().await.register();\n\n Ok(Self { name, descr: descr.into(), register })\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "rmqtt-plugins/rmqtt-counter/src/lib.rs", "rank": 37, "score": 129842.20650916093 }, { "content": "struct AclPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n}\n\n\n\nimpl AclPlugin {\n\n #[inline]\n\n async fn new<N: Into<String>, D: Into<String>>(\n\n runtime: &'static Runtime,\n\n name: N,\n\n descr: D,\n\n ) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(runtime.settings.plugins.load_config::<PluginConfig>(&name)?));\n\n log::debug!(\"{} AclPlugin cfg: {:?}\", name, cfg.read().await);\n\n let register = runtime.extends.hook_mgr().await.register();\n\n Ok(Self { runtime, name, descr: descr.into(), register, cfg })\n", "file_path": "rmqtt-plugins/rmqtt-acl/src/lib.rs", "rank": 38, "score": 129842.20650916093 }, { "content": "struct ClusterPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n grpc_clients: GrpcClients,\n\n shared: &'static ClusterShared,\n\n retainer: &'static ClusterRetainer,\n\n router: &'static ClusterRouter,\n\n}\n\n\n\nimpl ClusterPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(\n\n runtime.settings.plugins.load_config_with::<PluginConfig>(&name, &[\"node_grpc_addrs\"])?,\n\n ));\n\n log::debug!(\"{} ClusterPlugin cfg: {:?}\", name, cfg.read().await);\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/lib.rs", "rank": 39, "score": 127562.49497121313 }, { "content": "struct ClusterPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<PluginConfig>,\n\n grpc_clients: GrpcClients,\n\n shared: &'static ClusterShared,\n\n retainer: &'static ClusterRetainer,\n\n\n\n router: &'static ClusterRouter,\n\n raft_mailbox: Option<Mailbox>,\n\n}\n\n\n\nimpl ClusterPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let env_list_keys = [\"node_grpc_addrs\", \"raft_peer_addrs\"];\n\n let mut cfg = runtime.settings.plugins.load_config_with::<PluginConfig>(&name, &env_list_keys)?;\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/lib.rs", "rank": 40, "score": 127562.49497121313 }, { "content": "#[inline]\n\npub fn deserialize_addr_option<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<std::net::SocketAddr>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let addr = String::deserialize(deserializer).map(|mut addr| {\n\n if !addr.contains(':') {\n\n addr += \":0\";\n\n }\n\n addr\n\n })?;\n\n let addr = addr.parse::<std::net::SocketAddr>().map_err(serde::de::Error::custom)?;\n\n Ok(Some(addr))\n\n}\n\n\n", "file_path": "rmqtt/src/settings/mod.rs", "rank": 41, "score": 126226.2748540288 }, { "content": "struct HttpApiPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: PluginConfigType,\n\n shutdown_tx: Option<ShutdownTX>,\n\n}\n\n\n\nimpl HttpApiPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(runtime.settings.plugins.load_config::<PluginConfig>(&name)?));\n\n log::debug!(\"{} HttpApiPlugin cfg: {:?}\", name, cfg.read().await);\n\n let register = runtime.extends.hook_mgr().await.register();\n\n let shutdown_tx = Some(Self::start(runtime, cfg.clone()).await);\n\n Ok(Self { runtime, name, descr: descr.into(), register, cfg, shutdown_tx })\n\n }\n\n\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/lib.rs", "rank": 42, "score": 125388.69878701157 }, { "content": "struct SystemTopicPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n running: Arc<AtomicBool>,\n\n}\n\n\n\nimpl SystemTopicPlugin {\n\n #[inline]\n\n async fn new<N: Into<String>, D: Into<String>>(\n\n runtime: &'static Runtime,\n\n name: N,\n\n descr: D,\n\n ) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = runtime.settings.plugins.load_config_default::<PluginConfig>(&name)?;\n\n log::debug!(\"{} SystemTopicPlugin cfg: {:?}\", name, cfg);\n\n let register = runtime.extends.hook_mgr().await.register();\n", "file_path": "rmqtt-plugins/rmqtt-sys-topic/src/lib.rs", "rank": 43, "score": 125388.69878701157 }, { "content": "struct WebHookPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n chan_queue_count: Arc<AtomicIsize>,\n\n tx: Arc<RwLock<Sender<Message>>>,\n\n writers: HookWriters,\n\n exec: TaskExecQueue,\n\n}\n\n\n\nimpl WebHookPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(Self::load_config(runtime, &name)?));\n\n log::debug!(\"{} WebHookPlugin cfg: {:?}\", name, cfg.read().await);\n\n let writers = Arc::new(DashMap::default());\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 44, "score": 125388.69878701157 }, { "content": "struct AuthHttpPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n}\n\n\n\nimpl AuthHttpPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(runtime.settings.plugins.load_config::<PluginConfig>(&name)?));\n\n log::debug!(\"{} AuthHttpPlugin cfg: {:?}\", name, cfg.read().await);\n\n let register = runtime.extends.hook_mgr().await.register();\n\n Ok(Self { runtime, name, descr: descr.into(), register, cfg })\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/lib.rs", "rank": 45, "score": 125388.69878701157 }, { "content": "type PluginConfigType = Arc<RwLock<PluginConfig>>;\n\n\n\n#[inline]\n\npub async fn register(\n\n runtime: &'static Runtime,\n\n name: &'static str,\n\n descr: &'static str,\n\n default_startup: bool,\n\n immutable: bool,\n\n) -> Result<()> {\n\n runtime\n\n .plugins\n\n .register(name, default_startup, immutable, move || -> DynPluginResult {\n\n Box::pin(async move {\n\n HttpApiPlugin::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) })\n\n })\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/lib.rs", "rank": 46, "score": 124952.20914615702 }, { "content": "#[inline]\n\nfn init_task_exec_queue(workers: usize, queue_max: usize) {\n\n let (exec, task_runner) = Builder::default().workers(workers).queue_max(queue_max).build();\n\n\n\n tokio::spawn(async move {\n\n task_runner.await;\n\n });\n\n\n\n TASK_EXEC_QUEUE.set(exec).ok().expect(\"Failed to initialize task execution queue\")\n\n}\n\n\n\n#[inline]\n\npub(crate) fn task_exec_queue() -> &'static TaskExecQueue {\n\n TASK_EXEC_QUEUE.get().expect(\"TaskExecQueue not initialized\")\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/lib.rs", "rank": 47, "score": 123805.70864227576 }, { "content": "#[inline]\n\nfn set_rate(name: Port, rate: f64) {\n\n let rates = RATES.get_or_init(DashMap::default);\n\n let mut entry = rates.entry((name, std::thread::current().id())).or_default();\n\n *entry.value_mut() = rate;\n\n}\n\n\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 48, "score": 123709.03686585656 }, { "content": "#[test]\n\nfn test_reason() {\n\n assert_eq!(Reason::ConnectKicked(false).is_kicked(false), true);\n\n assert_eq!(Reason::ConnectKicked(false).is_kicked(true), false);\n\n assert_eq!(Reason::ConnectKicked(true).is_kicked(true), true);\n\n assert_eq!(Reason::ConnectKicked(true).is_kicked(false), false);\n\n assert_eq!(Reason::ConnectKicked(true).is_kicked_by_admin(), true);\n\n assert_eq!(Reason::ConnectKicked(false).is_kicked_by_admin(), false);\n\n assert_eq!(Reason::ConnectDisconnect(None).is_kicked(false), false);\n\n assert_eq!(Reason::ConnectDisconnect(None).is_kicked_by_admin(), false);\n\n\n\n let reasons = Reason::Reasons(vec![\n\n Reason::PublishRefused,\n\n Reason::ConnectKicked(false),\n\n Reason::MessageExpiration,\n\n ]);\n\n assert_eq!(reasons.to_string(), \"PublishRefused,Kicked,MessageExpiration\");\n\n}\n", "file_path": "rmqtt/src/broker/types.rs", "rank": 49, "score": 122268.43203476127 }, { "content": "fn on_handshake(req: &Request, mut response: Response) -> std::result::Result<Response, ErrorResponse> {\n\n const PROTOCOL_ERROR: &str = \"No \\\"Sec-WebSocket-Protocol: mqtt\\\" in client request\";\n\n let mqtt_protocol = req\n\n .headers()\n\n .get(\"Sec-WebSocket-Protocol\")\n\n .ok_or_else(|| ErrorResponse::new(Some(PROTOCOL_ERROR.into())))?;\n\n if mqtt_protocol != \"mqtt\" {\n\n return Err(ErrorResponse::new(Some(PROTOCOL_ERROR.into())));\n\n }\n\n response.headers_mut().append(\"Sec-WebSocket-Protocol\", HeaderValue::from_static(\"mqtt\"));\n\n Ok(response)\n\n}\n", "file_path": "rmqtt-bin/src/ws.rs", "rank": 50, "score": 118598.90335003083 }, { "content": "pub trait VecToString {\n\n fn to_string(&self) -> String;\n\n}\n\n\n\nimpl<'a> VecToString for Vec<&'a Level> {\n\n #[inline]\n\n fn to_string(&self) -> String {\n\n self.iter().map(|l| l.to_string()).collect::<Vec<String>>().join(\"/\")\n\n }\n\n}\n\n\n\nimpl<'a> VecToString for &'a [Level] {\n\n #[inline]\n\n fn to_string(&self) -> String {\n\n self.iter().map(|l| l.to_string()).collect::<Vec<String>>().join(\"/\")\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/broker/topic.rs", "rank": 51, "score": 117308.34062201236 }, { "content": "struct HookWriter {\n\n file_name: String,\n\n file: Option<File>,\n\n}\n\n\n\nimpl HookWriter {\n\n fn new(file: ByteString) -> Self {\n\n Self { file_name: file.to_string(), file: None }\n\n }\n\n\n\n #[inline]\n\n pub async fn log(&mut self, msg: &[u8]) -> Result<(), Box<dyn std::error::Error>> {\n\n if let Some(file) = self.file.as_mut() {\n\n file.write_all(msg).await?;\n\n file.write_all(b\"\\n\").await?;\n\n } else {\n\n Self::create_dirs(Path::new(&self.file_name)).await?;\n\n let mut file = OpenOptions::new().create(true).append(true).open(&self.file_name).await?;\n\n file.write_all(msg).await?;\n\n file.write_all(b\"\\n\").await?;\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 52, "score": 116644.56507589722 }, { "content": "#[inline]\n\nfn init_task_exec_queue(workers: usize, queue_max: usize) -> TaskExecQueue {\n\n let (exec, task_runner) = Builder::default().workers(workers).queue_max(queue_max).build();\n\n\n\n tokio::spawn(async move {\n\n task_runner.await;\n\n });\n\n\n\n exec\n\n}\n\n\n\n//Failure count\n\n#[inline]\n\npub(crate) fn fails() -> &'static Counter {\n\n static INSTANCE: OnceCell<Counter> = OnceCell::new();\n\n INSTANCE.get_or_init(Counter::new)\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 53, "score": 115581.79654988385 }, { "content": "fn get_fields_named(data: &Data) -> &FieldsNamed {\n\n match *data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => fields,\n\n Fields::Unnamed(ref _fields) => {\n\n unreachable!()\n\n }\n\n Fields::Unit => {\n\n unreachable!()\n\n }\n\n },\n\n Data::Enum(_) | Data::Union(_) => unreachable!(),\n\n }\n\n}\n", "file_path": "rmqtt-macros/src/metrics.rs", "rank": 54, "score": 114546.35136942254 }, { "content": "type HookWriters = Arc<DashMap<ByteString, Arc<RwLock<HookWriter>>>>;\n\n\n\n#[inline]\n\npub async fn register(\n\n runtime: &'static Runtime,\n\n name: &'static str,\n\n descr: &'static str,\n\n default_startup: bool,\n\n immutable: bool,\n\n) -> Result<()> {\n\n runtime\n\n .plugins\n\n .register(name, default_startup, immutable, move || -> DynPluginResult {\n\n Box::pin(async move {\n\n WebHookPlugin::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) })\n\n })\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 55, "score": 113913.742237991 }, { "content": "pub trait QoSEx {\n\n fn value(&self) -> u8;\n\n fn less_value(&self, qos: QoS) -> QoS;\n\n}\n\n\n\nimpl QoSEx for QoS {\n\n #[inline]\n\n fn value(&self) -> u8 {\n\n match self {\n\n QoS::AtMostOnce => 0,\n\n QoS::AtLeastOnce => 1,\n\n QoS::ExactlyOnce => 2,\n\n }\n\n }\n\n\n\n #[inline]\n\n fn less_value(&self, qos: QoS) -> QoS {\n\n if self.value() < qos.value() {\n\n *self\n\n } else {\n", "file_path": "rmqtt/src/broker/types.rs", "rank": 56, "score": 113877.3988217508 }, { "content": "#[inline]\n\nfn get_local_stats() -> TaskExecStats {\n\n LOCAL_ACTIVE_COUNTS.get().map(|m| m.iter().map(|item| item.value().clone()).sum()).unwrap_or_default()\n\n}\n", "file_path": "rmqtt/src/runtime.rs", "rank": 57, "score": 111778.48926348596 }, { "content": "#[async_trait]\n\npub trait Register: Sync + Send {\n\n async fn add(&self, typ: Type, handler: Box<dyn Handler>) {\n\n self.add_priority(typ, 0, handler).await;\n\n }\n\n\n\n async fn add_priority(&self, typ: Type, priority: Priority, handler: Box<dyn Handler>);\n\n\n\n async fn start(&self) {}\n\n\n\n async fn stop(&self) {}\n\n}\n\n\n", "file_path": "rmqtt/src/broker/hook.rs", "rank": 58, "score": 110047.96638549029 }, { "content": "// This function extracts data from the decoded Cargo.toml file and uses it to generate Rust code\n\nfn plugins(decoded: &toml::Value) {\n\n let mut inits = Vec::new();\n\n // Extract the data from the \"package.metadata.plugins\" field of the Cargo.toml file\n\n if let Some(plugins) = decoded\n\n .get(\"package\")\n\n .and_then(|package| package.get(\"metadata\"))\n\n .and_then(|metadata| metadata.get(\"plugins\"))\n\n .and_then(|plugins| plugins.as_table())\n\n {\n\n // Iterate over the plugins and extract the relevant data\n\n for (id, cfg) in plugins {\n\n let plugin_id = id.replace('-', \"_\");\n\n let name = cfg.get(\"name\").and_then(|v| v.as_str()).unwrap_or(id);\n\n let descr = cfg.get(\"description\").and_then(|v| v.as_str()).unwrap_or_default();\n\n let default_startup = cfg.get(\"default_startup\").and_then(|v| v.as_bool()).unwrap_or(false);\n\n let immutable = cfg.get(\"immutable\").and_then(|v| v.as_bool()).unwrap_or(false);\n\n println!(\n\n \"plugin_id: {}, default_startup: {}, immutable: {}, name: {}, descr: {}\",\n\n plugin_id, default_startup, immutable, name, descr\n\n );\n", "file_path": "rmqtt-bin/build.rs", "rank": 59, "score": 109825.1843142462 }, { "content": "type Message = (hook::Type, Option<TopicFilter>, serde_json::Value);\n\n\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 60, "score": 109719.14412972075 }, { "content": "type Cacheable = Option<i64>;\n\n\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/lib.rs", "rank": 61, "score": 107383.7503924928 }, { "content": "#[inline]\n\nfn set_active_count(name: Port, c: isize, handshaking_busy_limit: Option<usize>) {\n\n let active_counts = ACTIVE_COUNTS.get_or_init(DashMap::default);\n\n let mut entry = active_counts.entry((name, std::thread::current().id())).or_default();\n\n let (count, busy_limit) = entry.value_mut();\n\n *count = c;\n\n if let Some(handshaking_busy_limit) = handshaking_busy_limit {\n\n *busy_limit = handshaking_busy_limit as isize;\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 62, "score": 105593.33923596687 }, { "content": "#[proc_macro_derive(Metrics)]\n\npub fn derive_metrics(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n metrics::build(input)\n\n}\n", "file_path": "rmqtt-macros/src/lib.rs", "rank": 63, "score": 105267.2645966973 }, { "content": "type ShutdownTX = oneshot::Sender<()>;\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/lib.rs", "rank": 64, "score": 105218.787501639 }, { "content": "#[inline]\n\nfn get_local_exec() -> Rc<LocalTaskExecQueue> {\n\n std::thread_local! {\n\n pub static LOCAL_EXECUTORS: Rc<LocalTaskExecQueue> = {\n\n let exec_workers = Runtime::instance().settings.task.local_exec_workers;\n\n let exec_queue_max = Runtime::instance().settings.task.local_exec_queue_max;\n\n let (tokens, period) = Runtime::instance().settings.task.local_exec_rate_limit;\n\n let tokens = tokens.get() as usize;\n\n\n\n let rate_limiter = leaky_bucket::RateLimiter::builder()\n\n .initial(tokens)\n\n .refill(tokens / 10)\n\n .interval(period / 10)\n\n .max(tokens)\n\n .fair(true)\n\n .build();\n\n\n\n let (tx, rx) = futures::channel::mpsc::channel(exec_queue_max);\n\n\n\n let (exec, task_runner) = LocalBuilder::default()\n\n .workers(exec_workers)\n", "file_path": "rmqtt/src/runtime.rs", "rank": 65, "score": 104595.84938770249 }, { "content": "type WebSocketStreamType<T> = Pin<Box<dyn Future<Output = Result<WebSocketStream<T>, WSError>>>>;\n\n\n\npin_project_lite::pin_project! {\n\n pub struct WSServiceFut<T>\n\n where\n\n T: AsyncRead,\n\n T: AsyncWrite,\n\n T: Unpin,\n\n {\n\n fut: WebSocketStreamType<T>,\n\n #[pin]\n\n delay: Option<Sleep>,\n\n }\n\n}\n\n\n\nimpl<T: AsyncRead + AsyncWrite + Unpin> Future for WSServiceFut<T> {\n\n type Output = Result<WsStream<T>, ntex_mqtt::MqttError<MqttError>>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let mut this = self.project();\n", "file_path": "rmqtt-bin/src/ws.rs", "rank": 70, "score": 102383.86573133324 }, { "content": "use rmqtt::{async_trait::async_trait, log};\n\nuse rmqtt::{\n\n broker::hook::{Handler, HookResult, Parameter, ReturnType},\n\n grpc::{Message as GrpcMessage, MessageReply as GrpcMessageReply, MessageType},\n\n Runtime,\n\n};\n\n\n\nuse super::clients;\n\nuse super::plugin;\n\nuse super::subs;\n\nuse super::types::{Message, MessageReply};\n\n\n\npub(crate) struct HookHandler {\n\n pub message_type: MessageType,\n\n}\n\n\n\nimpl HookHandler {\n\n pub(crate) fn new(message_type: MessageType) -> Self {\n\n Self { message_type }\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 71, "score": 91870.67032621337 }, { "content": " pub(crate) fn new(\n\n shared: &'static ClusterShared,\n\n router: &'static ClusterRouter,\n\n retainer: &'static ClusterRetainer,\n\n ) -> Self {\n\n Self { shared, router, retainer }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for HookHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::GrpcMessageReceived(typ, msg) => {\n\n log::debug!(\"GrpcMessageReceived, type: {}, msg: {:?}\", typ, msg);\n\n if self.shared.message_type != *typ {\n\n return (true, acc);\n\n }\n\n match msg {\n\n Message::Forwards(from, publish) => {\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/handler.rs", "rank": 72, "score": 91870.25150176919 }, { "content": "}\n\n\n\n#[async_trait]\n\nimpl Handler for HookHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::GrpcMessageReceived(typ, msg) => {\n\n log::debug!(\"GrpcMessageReceived, type: {}, msg: {:?}\", typ, msg);\n\n if self.message_type != *typ {\n\n return (true, acc);\n\n }\n\n match msg {\n\n GrpcMessage::Data(data) => {\n\n let new_acc = match Message::decode(data) {\n\n Err(e) => {\n\n log::error!(\"Message::decode, error: {:?}\", e);\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(e.to_string())))\n\n }\n\n Ok(Message::BrokerInfo) => {\n\n let broker_info = Runtime::instance().node.broker_info().await;\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 73, "score": 91869.91075604579 }, { "content": "\n\nimpl HookHandler {\n\n pub(crate) fn new(\n\n shared: &'static ClusterShared,\n\n retainer: &'static ClusterRetainer,\n\n raft_mailbox: Mailbox,\n\n ) -> Self {\n\n Self { shared, retainer, raft_mailbox }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for HookHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n log::debug!(\"hook, Parameter type: {:?}\", param.get_type());\n\n match param {\n\n Parameter::ClientDisconnected(_s, c, r) => {\n\n log::debug!(\"{:?} hook::ClientDisconnected reason: {:?}\", c.id, r);\n\n if !r.is_kicked(false) {\n\n let msg = Message::Disconnected { id: c.id.clone() }.encode().unwrap();\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 74, "score": 91869.10262308104 }, { "content": "use rmqtt::broker::{Router, Shared};\n\nuse rmqtt::{async_trait::async_trait, log, SubscriptionSize};\n\nuse rmqtt::{\n\n broker::{\n\n hook::{Handler, HookResult, Parameter, ReturnType},\n\n types::{From, Publish, SubRelationsMap},\n\n },\n\n grpc::{Message, MessageReply},\n\n Id, Runtime,\n\n};\n\n\n\nuse super::{hook_message_dropped, retainer::ClusterRetainer, router::ClusterRouter, shared::ClusterShared};\n\n\n\npub(crate) struct HookHandler {\n\n shared: &'static ClusterShared,\n\n router: &'static ClusterRouter,\n\n retainer: &'static ClusterRetainer,\n\n}\n\n\n\nimpl HookHandler {\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/handler.rs", "rank": 75, "score": 91860.67122242543 }, { "content": " ))),\n\n }\n\n }\n\n Ok(Message::ReloadPluginConfig { name }) => {\n\n match Runtime::instance().plugins.load_config(name).await {\n\n Ok(()) => match MessageReply::ReloadPluginConfig.encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n },\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n\n Ok(Message::LoadPlugin { name }) => {\n\n match Runtime::instance().plugins.start(name).await {\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 76, "score": 91859.20428331537 }, { "content": " Ok(()) => match MessageReply::LoadPlugin.encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n },\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n\n Ok(Message::UnloadPlugin { name }) => {\n\n match Runtime::instance().plugins.stop(name).await {\n\n Ok(ok) => match MessageReply::UnloadPlugin(ok).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 77, "score": 91854.34517230972 }, { "content": "use rmqtt_raft::Mailbox;\n\n\n\nuse rmqtt::broker::Shared;\n\nuse rmqtt::rust_box::task_exec_queue::SpawnExt;\n\nuse rmqtt::{async_trait::async_trait, log, tokio, MqttError};\n\nuse rmqtt::{\n\n broker::hook::{Handler, HookResult, Parameter, ReturnType},\n\n grpc::{Message as GrpcMessage, MessageReply},\n\n Id, Runtime,\n\n};\n\n\n\nuse super::config::{retry, BACKOFF_STRATEGY};\n\nuse super::message::{Message, RaftGrpcMessage, RaftGrpcMessageReply};\n\nuse super::{hook_message_dropped, retainer::ClusterRetainer, shared::ClusterShared, task_exec_queue};\n\n\n\npub(crate) struct HookHandler {\n\n shared: &'static ClusterShared,\n\n retainer: &'static ClusterRetainer,\n\n raft_mailbox: Mailbox,\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 78, "score": 91853.98554167028 }, { "content": " }\n\n },\n\n Ok(Message::GetPlugins) => match plugin::get_plugins().await {\n\n Ok(plugins) => match MessageReply::GetPlugins(plugins).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n },\n\n Err(e) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(e.to_string())))\n\n }\n\n },\n\n Ok(Message::GetPlugin { name }) => match plugin::get_plugin(name).await {\n\n Ok(plugin) => match MessageReply::GetPlugin(plugin).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 79, "score": 91851.28735918239 }, { "content": " Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n },\n\n Err(e) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(e.to_string())))\n\n }\n\n },\n\n Ok(Message::GetPluginConfig { name }) => {\n\n match plugin::get_plugin_config(name).await {\n\n Ok(cfg) => match MessageReply::GetPluginConfig(cfg).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n },\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 80, "score": 91849.45695996526 }, { "content": " Message::NumberOfSessions => {\n\n let new_acc = HookResult::GrpcMessageReply(Ok(MessageReply::NumberOfSessions(\n\n //self.shared.inner().sessions().await,\n\n Runtime::instance().stats.sessions.count() as usize,\n\n )));\n\n return (false, Some(new_acc));\n\n }\n\n Message::GetRetains(topic_filter) => {\n\n let new_acc = match self.retainer.inner().get(topic_filter).await {\n\n Ok(retains) => {\n\n HookResult::GrpcMessageReply(Ok(MessageReply::GetRetains(retains)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Err(e)),\n\n };\n\n return (false, Some(new_acc));\n\n }\n\n Message::Online(clientid) => {\n\n let new_acc = HookResult::GrpcMessageReply(Ok(MessageReply::Online(\n\n Runtime::instance()\n\n .extends\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/handler.rs", "rank": 81, "score": 91849.24306538745 }, { "content": " return (true, acc);\n\n }\n\n match msg {\n\n GrpcMessage::ForwardsTo(from, publish, sub_rels) => {\n\n if let Err(droppeds) =\n\n self.shared.forwards_to(from.clone(), publish, sub_rels.clone()).await\n\n {\n\n hook_message_dropped(droppeds).await;\n\n }\n\n return (false, acc);\n\n }\n\n GrpcMessage::Kick(id, clean_start, clear_subscriptions, is_admin) => {\n\n let mut entry = self.shared.inner().entry(id.clone());\n\n let new_acc = match entry.kick(*clean_start, *clear_subscriptions, *is_admin).await {\n\n Ok(Some(o)) => HookResult::GrpcMessageReply(Ok(MessageReply::Kick(Some(o)))),\n\n Ok(None) => HookResult::GrpcMessageReply(Ok(MessageReply::Kick(None))),\n\n Err(e) => HookResult::GrpcMessageReply(Err(e)),\n\n };\n\n return (false, Some(new_acc));\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 82, "score": 91848.6653210682 }, { "content": " let (shared_subs, subs_size) = forwards(from.clone(), publish.clone()).await;\n\n let new_acc =\n\n HookResult::GrpcMessageReply(Ok(MessageReply::Forwards(shared_subs, subs_size)));\n\n return (false, Some(new_acc));\n\n }\n\n Message::ForwardsTo(from, publish, sub_rels) => {\n\n if let Err(droppeds) =\n\n self.shared.inner().forwards_to(from.clone(), publish, sub_rels.clone()).await\n\n {\n\n hook_message_dropped(droppeds).await;\n\n }\n\n return (false, acc);\n\n }\n\n Message::Kick(id, clean_start, clear_subscriptions, is_admin) => {\n\n let entry = self.shared.inner().entry(id.clone());\n\n log::debug!(\"{:?}\", id);\n\n let new_acc = match entry.try_lock().await {\n\n Ok(mut entry) => {\n\n match entry.kick(*clean_start, *clear_subscriptions, *is_admin).await {\n\n Ok(o) => {\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/handler.rs", "rank": 83, "score": 91847.81279612012 }, { "content": " ))),\n\n }\n\n }\n\n Ok(Message::Subscribe(params)) =>\n\n {\n\n #[allow(clippy::mutable_key_type)]\n\n match subs::subscribe(params).await {\n\n Ok(replys) => {\n\n let ress = replys\n\n .into_iter()\n\n .map(|(t, res)| match res {\n\n Ok(b) => (t, (b, None)),\n\n Err(e) => (t, (false, Some(e.to_string()))),\n\n })\n\n .collect();\n\n match MessageReply::Subscribe(ress).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 84, "score": 91847.50182378075 }, { "content": " GrpcMessage::GetRetains(topic_filter) => {\n\n log::debug!(\"[GrpcMessage::GetRetains] topic_filter: {:?}\", topic_filter);\n\n let new_acc = match self.retainer.inner().get(topic_filter).await {\n\n Ok(retains) => {\n\n HookResult::GrpcMessageReply(Ok(MessageReply::GetRetains(retains)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Err(e)),\n\n };\n\n return (false, Some(new_acc));\n\n }\n\n GrpcMessage::SubscriptionsGet(clientid) => {\n\n let id = Id::from(Runtime::instance().node.id(), clientid.clone());\n\n let entry = self.shared.inner().entry(id);\n\n let new_acc = HookResult::GrpcMessageReply(Ok(MessageReply::SubscriptionsGet(\n\n entry.subscriptions().await,\n\n )));\n\n return (false, Some(new_acc));\n\n }\n\n GrpcMessage::Data(data) => {\n\n let new_acc = match RaftGrpcMessage::decode(data) {\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 85, "score": 91847.09176301057 }, { "content": " Ok(Message::StatsInfo) => {\n\n let node_status = Runtime::instance().node.status().await;\n\n let stats = Runtime::instance().stats.clone().await;\n\n match MessageReply::StatsInfo(node_status, Box::new(stats)).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n\n Ok(Message::MetricsInfo) => {\n\n let metrics = Runtime::instance().metrics.clone();\n\n match MessageReply::MetricsInfo(metrics).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 86, "score": 91846.64470327237 }, { "content": " .router()\n\n .await\n\n .is_online(Runtime::instance().node.id(), clientid)\n\n .await,\n\n )));\n\n return (false, Some(new_acc));\n\n }\n\n Message::SubscriptionsSearch(q) => {\n\n let new_acc = HookResult::GrpcMessageReply(Ok(MessageReply::SubscriptionsSearch(\n\n self.shared.inner()._query_subscriptions(q).await,\n\n )));\n\n return (false, Some(new_acc));\n\n }\n\n Message::SubscriptionsGet(clientid) => {\n\n let id = Id::from(Runtime::instance().node.id(), clientid.clone());\n\n let entry = self.shared.inner().entry(id);\n\n let new_acc = HookResult::GrpcMessageReply(Ok(MessageReply::SubscriptionsGet(\n\n entry.subscriptions().await,\n\n )));\n\n return (false, Some(new_acc));\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/handler.rs", "rank": 87, "score": 91846.4844800525 }, { "content": " e.to_string(),\n\n ))),\n\n },\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n\n };\n\n return (false, Some(new_acc));\n\n }\n\n _ => {\n\n log::error!(\"unimplemented, {:?}\", param)\n\n }\n\n }\n\n }\n\n _ => {\n\n log::error!(\"unimplemented, {:?}\", param)\n\n }\n\n }\n\n (true, acc)\n\n }\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 88, "score": 91845.91075378732 }, { "content": " }\n\n Message::RoutesGet(limit) => {\n\n let new_acc = HookResult::GrpcMessageReply(Ok(MessageReply::RoutesGet(\n\n self.router._inner().gets(*limit).await,\n\n )));\n\n return (false, Some(new_acc));\n\n }\n\n Message::RoutesGetBy(topic) => {\n\n let routes = match self.router._inner()._get_routes(topic).await {\n\n Ok(routes) => Ok(MessageReply::RoutesGetBy(routes)),\n\n Err(e) => Err(e),\n\n };\n\n let new_acc = HookResult::GrpcMessageReply(routes);\n\n return (false, Some(new_acc));\n\n }\n\n Message::SessionStatus(clientid) => {\n\n let status = self.shared.inner().session_status(clientid).await;\n\n let new_acc = HookResult::GrpcMessageReply(Ok(MessageReply::SessionStatus(status)));\n\n return (false, Some(new_acc));\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/handler.rs", "rank": 89, "score": 91845.76196741368 }, { "content": " GrpcMessageReply::Error(e.to_string()),\n\n )),\n\n }\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n\n Ok(Message::Unsubscribe(params)) => match subs::unsubscribe(params).await {\n\n Ok(()) => match MessageReply::Unsubscribe.encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n },\n\n Err(e) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(e.to_string())))\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 90, "score": 91845.40321437703 }, { "content": " let raft_mailbox = self.raft_mailbox.clone();\n\n tokio::spawn(async move {\n\n if let Err(e) = retry(BACKOFF_STRATEGY.clone(), || async {\n\n let msg = msg.clone();\n\n let mailbox = raft_mailbox.clone();\n\n let res = async move { mailbox.send_proposal(msg).await }\n\n .spawn(task_exec_queue())\n\n .result()\n\n .await\n\n .map_err(|_| {\n\n MqttError::from(\n\n \"Handler::hook(Message::Disconnected), task execution failure\",\n\n )\n\n })?\n\n .map_err(|e| MqttError::from(e.to_string()))?;\n\n Ok(res)\n\n })\n\n .await\n\n {\n\n log::warn!(\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 91, "score": 91845.34086887937 }, { "content": "\n\n _ => {\n\n log::error!(\"unimplemented, {:?}\", param)\n\n }\n\n }\n\n }\n\n _ => {\n\n log::error!(\"unimplemented, {:?}\", param)\n\n }\n\n }\n\n (true, acc)\n\n }\n\n}\n\n\n\nasync fn forwards(from: From, publish: Publish) -> (SubRelationsMap, SubscriptionSize) {\n\n log::debug!(\"forwards, From: {:?}, publish: {:?}\", from, publish);\n\n match Runtime::instance().extends.shared().await.forwards_and_get_shareds(from, publish).await {\n\n Err(droppeds) => {\n\n let subs_size = droppeds.len();\n\n hook_message_dropped(droppeds).await;\n\n (SubRelationsMap::default(), subs_size)\n\n }\n\n Ok(relations_map) => relations_map,\n\n }\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/handler.rs", "rank": 92, "score": 91843.73459433779 }, { "content": " log::debug!(\"{:?} offline info: {:?}\", id, o);\n\n HookResult::GrpcMessageReply(Ok(MessageReply::Kick(o)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Err(e)),\n\n }\n\n }\n\n Err(e) => {\n\n log::warn!(\"{:?}, try_lock error, {:?}\", id, e);\n\n HookResult::GrpcMessageReply(Err(e))\n\n }\n\n };\n\n return (false, Some(new_acc));\n\n }\n\n Message::NumberOfClients => {\n\n let new_acc = HookResult::GrpcMessageReply(Ok(MessageReply::NumberOfClients(\n\n //self.shared.inner().clients().await,\n\n Runtime::instance().stats.connections.count() as usize,\n\n )));\n\n return (false, Some(new_acc));\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/handler.rs", "rank": 93, "score": 91843.4492255852 }, { "content": " match MessageReply::BrokerInfo(broker_info).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n\n Ok(Message::NodeInfo) => {\n\n let node_info = Runtime::instance().node.node_info().await;\n\n match MessageReply::NodeInfo(node_info).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 94, "score": 91842.29230753217 }, { "content": " \"HookHandler, Message::Disconnected, raft mailbox send error, {:?}\",\n\n e\n\n );\n\n }\n\n });\n\n }\n\n }\n\n\n\n Parameter::SessionTerminated(_s, c, _r) => {\n\n let msg = Message::SessionTerminated { id: c.id.clone() }.encode().unwrap();\n\n let raft_mailbox = self.raft_mailbox.clone();\n\n tokio::spawn(async move {\n\n if let Err(e) = retry(BACKOFF_STRATEGY.clone(), || async {\n\n let msg = msg.clone();\n\n let mailbox = raft_mailbox.clone();\n\n let res = async move { mailbox.send_proposal(msg).await }\n\n .spawn(task_exec_queue())\n\n .result()\n\n .await\n\n .map_err(|_| {\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 95, "score": 91841.56195349086 }, { "content": " ))),\n\n }\n\n }\n\n Ok(Message::ClientSearch(q)) => {\n\n match MessageReply::ClientSearch(clients::search(&q).await).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n\n Ok(Message::ClientGet { clientid }) => {\n\n match MessageReply::ClientGet(clients::get(clientid).await).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(GrpcMessageReply::Error(\n\n e.to_string(),\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/handler.rs", "rank": 96, "score": 91840.69496628616 }, { "content": " MqttError::from(\n\n \"Handler::hook(Message::SessionTerminated), task execution failure\",\n\n )\n\n })?\n\n .map_err(|e| MqttError::from(e.to_string()))?;\n\n Ok(res)\n\n })\n\n .await\n\n {\n\n log::warn!(\n\n \"HookHandler, Message::SessionTerminated, raft mailbox send error, {:?}\",\n\n e\n\n );\n\n }\n\n });\n\n }\n\n\n\n Parameter::GrpcMessageReceived(typ, msg) => {\n\n log::debug!(\"GrpcMessageReceived, type: {}, msg: {:?}\", typ, msg);\n\n if self.shared.message_type != *typ {\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 97, "score": 91840.17414685497 }, { "content": " Err(e) => {\n\n log::error!(\"Message::decode, error: {:?}\", e);\n\n HookResult::GrpcMessageReply(Ok(MessageReply::Error(e.to_string())))\n\n }\n\n Ok(RaftGrpcMessage::GetRaftStatus) => {\n\n let raft_mailbox = self.raft_mailbox.clone();\n\n match raft_mailbox.status().await {\n\n Ok(status) => {\n\n match RaftGrpcMessageReply::GetRaftStatus(status).encode() {\n\n Ok(ress) => {\n\n HookResult::GrpcMessageReply(Ok(MessageReply::Data(ress)))\n\n }\n\n Err(e) => HookResult::GrpcMessageReply(Ok(MessageReply::Error(\n\n e.to_string(),\n\n ))),\n\n }\n\n }\n\n Err(e) => {\n\n HookResult::GrpcMessageReply(Ok(MessageReply::Error(e.to_string())))\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 98, "score": 91840.00446846745 }, { "content": " }\n\n }\n\n };\n\n return (false, Some(new_acc));\n\n }\n\n _ => {\n\n log::error!(\"unimplemented, {:?}\", param)\n\n }\n\n }\n\n }\n\n _ => {\n\n log::error!(\"unimplemented, {:?}\", param)\n\n }\n\n }\n\n (true, acc)\n\n }\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/handler.rs", "rank": 99, "score": 91834.09825996686 } ]
Rust
src/scope/lefthandsideexpr.rs
farodin91/js-parser-rs
8fdfe5b200788ec997d1446e1c44859a126493b7
use error::JsResult; use lexer::enums::{TokenType}; use scope::parser::{Parser, Item}; macro_rules! wait { ($expr:expr) => (match $expr { Item::None => (), Item::Item => return Ok(Item::Item), }) } macro_rules! none { ($expr:expr) => (match $expr { Item::None => return Ok(Item::None), Item::Item => (), }) } impl Parser { pub fn parse_left_hand_side_expr(&mut self) -> JsResult<Item> { println!("parse_left_hand_side_expr {:?}", self.peek()); wait!(try!(self.parse_call_expr())); self.parse_new_expr() } pub fn parse_new_expr(&mut self) -> JsResult<Item> { println!("parse_new_expr {:?}", self.peek()); wait!(try!(self.parse_member_expr())); if try!(self.consume(TokenType::New)) { try!(self.parse_new_expr()); Ok(Item::Item) } else { Ok(Item::None) } } pub fn parse_super_prop(&mut self) -> JsResult<Item> { if !try!(self.consume(TokenType::Super)) { return Ok(Item::None) } if try!(self.consume(TokenType::LeftBracket)) { try!(self.expect(TokenType::RightBracket)); } else { try!(self.expect(TokenType::Point)); try!(self.expect_identifier()); } Ok(Item::Item) } pub fn parse_meta_prop(&mut self) -> JsResult<Item> { try!(self.expect(TokenType::New)); try!(self.expect(TokenType::Point)); try!(self.expect(TokenType::Target)); Ok(Item::Item) } pub fn bump_new_and_member_expr_and_arguments(&mut self) -> JsResult<Item> { println!("bump_new_and_member_expr_and_arguments {:?}", self.peek()); if !try!(self.consume(TokenType::New)) { return Ok(Item::None) } try!(self.parse_member_expr()); try!(self.parse_arguments()); Ok(Item::Item) } pub fn parse_arguments(&mut self) -> JsResult<Item> { println!("parse_arguments {:?}", self.peek()); if !try!(self.consume(TokenType::LeftParen)) { return Ok(Item::None) } if !try!(self.consume(TokenType::ThreePoints)) { try!(self.parse_expr()); try!(self.consume(TokenType::ThreePoints)); } try!(self.consume_all_lineterminates()); try!(self.expect(TokenType::RightParen)); Ok(Item::Item) } pub fn parse_member(&mut self) -> JsResult<Item> { println!("parse_member {:?}", self.peek()); if try!(self.consume(TokenType::LeftBracket)) { try!(self.parse_expr()); try!(self.expect(TokenType::RightBracket)); return Ok(Item::Item) } if try!(self.consume(TokenType::Point)) { try!(self.expect_identifier_name()); return Ok(Item::Item) } Ok(Item::None) } pub fn member_expr_and_members(&mut self) -> JsResult<Item> { println!("member_expr_and_members {:?}", self.peek()); none!(try!(self.parse_member_expr())); self.parse_member() } pub fn parse_member_expr(&mut self) -> JsResult<Item> { let mut first = false; loop { println!("parse_member_expr {:?} {:?}", self.peek(), first); match try!(self.parse_primary_expr()) { Item::Item => { first = true; continue }, Item::None => (), } match try!(self.parse_super_prop()) { Item::Item => { first = true; continue }, Item::None => (), } if !first { return Ok(Item::None) } match try!(self.parse_member()) { Item::Item => continue, Item::None => (), } break } Ok(Item::Item) } pub fn parse_super_call(&mut self) -> JsResult<Item> { println!("parse_super_call {:?}", self.peek()); if !try!(self.consume(TokenType::Super)) { return Ok(Item::None) } try!(self.parse_arguments()); Ok(Item::Item) } pub fn member_and_arguments(&mut self) -> JsResult<Item> { println!("member_and_arguments {:?}", self.peek()); none!(try!(self.parse_member_expr())); self.parse_arguments() } pub fn parse_call_expr(&mut self) -> JsResult<Item> { let mut first = false; loop { println!("parse_call_expr {:?} {:?}", self.peek(), first); match try!(self.member_and_arguments()) { Item::Item => { first = true; continue }, Item::None => (), } match try!(self.parse_super_call()) { Item::Item => { first = true; continue }, Item::None => (), } if !first { return Ok(Item::None) } match try!(self.parse_member()) { Item::Item => continue, Item::None => (), } match try!(self.parse_arguments()) { Item::Item => continue, Item::None => (), } break } Ok(Item::Item) } }
use error::JsResult; use lexer::enums::{TokenType}; use scope::parser::{Parser, Item}; macro_rules! wait { ($expr:expr) => (match $expr { Item::None => (), Item::Item => return Ok(Item::Item), }) } macro_rules! none { ($expr:expr) => (match $expr { Item::None => return Ok(Item::None), Item::Item => (), }) } impl Parser { pub fn parse_left_hand_side_expr(&mut self) -> JsResult<Item> { println!("parse_left_hand_side_expr {:?}", self.peek()); wait!(try!(self.parse_call_expr())); self.parse_new_expr() } pub fn parse_new_expr(&mut self) -> JsResult<Item> { println!("parse_new_expr {:?}", self.peek()); wait!(try!(self.parse_member_expr())); if try!(self.consume(TokenType::New)) { try!(self.parse_new_expr()); Ok(Item::Item) } else { Ok(Item::None) } } pub fn parse_super_prop(&mut self) -> JsResult<Item> { if !try!(self.consume(TokenType::Super)) { return Ok(Item::None) } if try!(self.consume(TokenType::LeftBracket)) { try!(self.expect(TokenType::RightBracket)); } else { try!(self.expect(TokenType::Point)); try!(self.expect_identifier()); } Ok(Item::Item) } pub fn parse_meta_prop(&mut self) -> JsResult<Item> { try!(self.expect(TokenType::New)); try!(self.expect(TokenType::Point)); try!(self.expect(TokenType::Target)); Ok(Item::Item) } pub fn bump_new_and_member_expr_and_arguments(&mut self) -> JsResult<Item> { println!("bump_new_and_member_expr_and_arguments {:?}", self.peek()); if !try!(self.consume(TokenType::New)) { return Ok(Item::None) } try!(self.parse_member_expr()); try!(self.parse_arguments()); Ok(Item::Item) } pub fn parse_arguments(&mut self) -> JsResult<Item> { println!("parse_arguments {:?}", self.peek()); if !try!(self.consume(TokenType::LeftParen)) { return Ok(Item::None) } if !try!(self.consume(TokenType::ThreePoints)) { try!(self.parse_expr()); try!(self.consume(TokenType::ThreePoints)); } try!(self.consume_all_lineterminates()); try!(self.expect(TokenType::RightParen)); Ok(Item::Item) } pub fn parse_member(&mut self) -> JsResult<Item> { println!("parse_member {:?}", self.peek()); if try!(self.consume(TokenType::LeftBracket)) { try!(self.parse_expr()); try!(self.expect(TokenType::RightBracket)); return Ok(Item::Item) } if try!(self.consume(TokenType::Point)) { try!(self.expect_identifier_name()); return Ok(Item::Item
Item => { first = true; continue }, Item::None => (), } match try!(self.parse_super_prop()) { Item::Item => { first = true; continue }, Item::None => (), } if !first { return Ok(Item::None) } match try!(self.parse_member()) { Item::Item => continue, Item::None => (), } break } Ok(Item::Item) } pub fn parse_super_call(&mut self) -> JsResult<Item> { println!("parse_super_call {:?}", self.peek()); if !try!(self.consume(TokenType::Super)) { return Ok(Item::None) } try!(self.parse_arguments()); Ok(Item::Item) } pub fn member_and_arguments(&mut self) -> JsResult<Item> { println!("member_and_arguments {:?}", self.peek()); none!(try!(self.parse_member_expr())); self.parse_arguments() } pub fn parse_call_expr(&mut self) -> JsResult<Item> { let mut first = false; loop { println!("parse_call_expr {:?} {:?}", self.peek(), first); match try!(self.member_and_arguments()) { Item::Item => { first = true; continue }, Item::None => (), } match try!(self.parse_super_call()) { Item::Item => { first = true; continue }, Item::None => (), } if !first { return Ok(Item::None) } match try!(self.parse_member()) { Item::Item => continue, Item::None => (), } match try!(self.parse_arguments()) { Item::Item => continue, Item::None => (), } break } Ok(Item::Item) } }
) } Ok(Item::None) } pub fn member_expr_and_members(&mut self) -> JsResult<Item> { println!("member_expr_and_members {:?}", self.peek()); none!(try!(self.parse_member_expr())); self.parse_member() } pub fn parse_member_expr(&mut self) -> JsResult<Item> { let mut first = false; loop { println!("parse_member_expr {:?} {:?}", self.peek(), first); match try!(self.parse_primary_expr()) { Item::
random
[ { "content": "pub fn parse<T, I>(iter: T) -> Result<Vec<TokenType>, ErrorType> where\n\n T: IntoIterator<Item = char, IntoIter = I> + Sized,\n\n I: Iterator<Item = char> + 'static {\n\n let state = &mut LexerState::new(Box::new(iter.into_iter()));\n\n match state.parse() {\n\n Ok(_)=> (),\n\n Err(err) => return Err(err.error_type),\n\n }\n\n Ok(state.tokens().iter().map(|token| { token.clone().token }).collect())\n\n}\n", "file_path": "src/lib.rs", "rank": 0, "score": 31436.780620868252 }, { "content": "#[test]\n\nfn test_empty() {\n\n assert_eq!(js_parser_rs::parse(\"\".chars()), Ok(vec![]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 1, "score": 29317.69890011187 }, { "content": "#[test]\n\nfn test_semicolon() {\n\n assert_eq!(js_parser_rs::parse(\";\".chars()), Ok(vec![TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\";;;\".chars()), Ok(vec![TokenType::Semicolon,TokenType::Semicolon,TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\";;;;\".chars()), Ok(vec![TokenType::Semicolon,TokenType::Semicolon,TokenType::Semicolon,TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\";;\".chars()), Ok(vec![TokenType::Semicolon,TokenType::Semicolon]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 2, "score": 29317.69890011187 }, { "content": "#[test]\n\nfn test_regex() {\n\n assert_eq!(js_parser_rs::parse(\"= /ab+b/g;\".chars()), Ok(vec![TokenType::Equal, TokenType::Literal(LiteralType::Regex(String::from(\"ab+b\"), RegexIdentifier::Global)),TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\"( /ab+b/g)\".chars()), Ok(vec![TokenType::LeftParen, TokenType::Literal(LiteralType::Regex(String::from(\"ab+b\"), RegexIdentifier::Global)),TokenType::RightParen]));\n\n assert_eq!(js_parser_rs::parse(\"= /ab+b/;\".chars()), Ok(vec![TokenType::Equal, TokenType::Literal(LiteralType::Regex(String::from(\"ab+b\"), RegexIdentifier::None)),TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\": /ab+b/g;\".chars()), Ok(vec![TokenType::Colon, TokenType::Literal(LiteralType::Regex(String::from(\"ab+b\"), RegexIdentifier::Global)),TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\", /ab+b/g;\".chars()), Ok(vec![TokenType::Comma, TokenType::Literal(LiteralType::Regex(String::from(\"ab+b\"), RegexIdentifier::Global)),TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\", /*a*/ /ab+b/g;\".chars()), Ok(vec![TokenType::Comma, TokenType::Literal(LiteralType::Regex(String::from(\"ab+b\"), RegexIdentifier::Global)),TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\", /ab\\\\/b/g\".chars()), Ok(vec![TokenType::Comma, TokenType::Literal(LiteralType::Regex(String::from(\"ab/b\"), RegexIdentifier::Global))]));\n\n assert_eq!(js_parser_rs::parse(\", /ab\\\\\\\\b/g\".chars()), Ok(vec![TokenType::Comma, TokenType::Literal(LiteralType::Regex(String::from(\"ab\\\\b\"), RegexIdentifier::Global))]));\n\n assert_eq!(js_parser_rs::parse(\", /^h\\\\d$/i\".chars()), Ok(vec![TokenType::Comma, TokenType::Literal(LiteralType::Regex(String::from(\"^h\\\\d$\"), RegexIdentifier::Ignore))]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 3, "score": 29317.69890011187 }, { "content": "#[test]\n\nfn test_raw() {\n\n assert_eq!(js_parser_rs::parse(\"Hello\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\"))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello\\n\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\")),TokenType::LineTerminate]));\n\n assert_eq!(js_parser_rs::parse(\"Hello\\r\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\")),TokenType::LineTerminate]));\n\n assert_eq!(js_parser_rs::parse(\"Hello\\u{a0}\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\"))]));\n\n //assert_eq!(js_parser_rs::parse(\"Hello\\u{9}\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\"))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello\\u{b}\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\"))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello\\u{c}\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\"))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello\\t\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\"))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello \".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\"))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello=\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\")),TokenType::Equal]));\n\n assert_eq!(js_parser_rs::parse(\"Hello('sd'\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\")),TokenType::LeftParen,TokenType::Literal(LiteralType::String(String::from(\"sd\")))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello|Hello\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\")),TokenType::OrBitwise,TokenType::Identifier(String::from(\"Hello\"))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello.Hello\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\")),TokenType::Point,TokenType::Identifier(String::from(\"Hello\"))]));\n\n assert_eq!(js_parser_rs::parse(\"Hello/Hello\".chars()), Ok(vec![TokenType::Identifier(String::from(\"Hello\")),TokenType::Divide,TokenType::Identifier(String::from(\"Hello\"))]));\n\n\n\n assert_eq!(js_parser_rs::parse(\"\\\\u005f\\\\u005f\\\\u0076\\\\u0061\\\\u0072\".chars()), Ok(vec![TokenType::Identifier(String::from(\"__var\"))]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 4, "score": 29317.69890011187 }, { "content": "#[test]\n\nfn test_punctuator() {\n\n assert_eq!(js_parser_rs::parse(\"{\".chars()), Ok(vec![TokenType::LeftBrace]));\n\n assert_eq!(js_parser_rs::parse(\"}\".chars()), Ok(vec![TokenType::RightBrace]));\n\n assert_eq!(js_parser_rs::parse(\"[\".chars()), Ok(vec![TokenType::LeftBracket]));\n\n assert_eq!(js_parser_rs::parse(\"]\".chars()), Ok(vec![TokenType::RightBracket]));\n\n assert_eq!(js_parser_rs::parse(\"(\".chars()), Ok(vec![TokenType::LeftParen]));\n\n assert_eq!(js_parser_rs::parse(\")\".chars()), Ok(vec![TokenType::RightParen]));\n\n assert_eq!(js_parser_rs::parse(\"+\".chars()), Ok(vec![TokenType::Plus]));\n\n assert_eq!(js_parser_rs::parse(\"+=\".chars()), Ok(vec![TokenType::PlusAssign]));\n\n assert_eq!(js_parser_rs::parse(\"-\".chars()), Ok(vec![TokenType::Minus]));\n\n assert_eq!(js_parser_rs::parse(\"-=\".chars()), Ok(vec![TokenType::MinusAssign]));\n\n assert_eq!(js_parser_rs::parse(\"<\".chars()), Ok(vec![TokenType::SmallThan]));\n\n assert_eq!(js_parser_rs::parse(\">\".chars()), Ok(vec![TokenType::GreaterThan]));\n\n assert_eq!(js_parser_rs::parse(\"!\".chars()), Ok(vec![TokenType::Invert]));\n\n assert_eq!(js_parser_rs::parse(\"=>\".chars()), Ok(vec![TokenType::Lamda]));\n\n assert_eq!(js_parser_rs::parse(\".\".chars()), Ok(vec![TokenType::Point]));\n\n assert_eq!(js_parser_rs::parse(\"...\".chars()), Ok(vec![TokenType::ThreePoints]));\n\n assert_eq!(js_parser_rs::parse(\":\".chars()), Ok(vec![TokenType::Colon]));\n\n assert_eq!(js_parser_rs::parse(\"=\".chars()), Ok(vec![TokenType::Equal]));\n\n assert_eq!(js_parser_rs::parse(\"++\".chars()), Ok(vec![TokenType::Increment]));\n", "file_path": "tests/lexer.rs", "rank": 5, "score": 29317.69890011187 }, { "content": "#[test]\n\nfn test_terminate() {\n\n assert_eq!(js_parser_rs::parse(\"\\n\".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"\\r\".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"\\n ;\".chars()), Ok(vec![TokenType::Semicolon]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 6, "score": 29317.69890011187 }, { "content": "#[test]\n\nfn test_literal() {\n\n assert_eq!(js_parser_rs::parse(\"true\".chars()), Ok(vec![TokenType::Literal(LiteralType::Boolean(true))]));\n\n assert_eq!(js_parser_rs::parse(\"false\".chars()), Ok(vec![TokenType::Literal(LiteralType::Boolean(false))]));\n\n assert_eq!(js_parser_rs::parse(\"null\".chars()), Ok(vec![TokenType::Literal(LiteralType::Null)]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 7, "score": 29317.69890011187 }, { "content": "#[test]\n\nfn test_comment() {\n\n assert_eq!(js_parser_rs::parse(\"/*Hello World!\".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"// */ sdfsd\".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"/*Hello */;\".chars()), Ok(vec![TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\"/*Hello **/;\".chars()), Ok(vec![TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\"/**Hello **/;\".chars()), Ok(vec![TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\"/*Hello * */;\".chars()), Ok(vec![TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\"//Hello \\n;\".chars()), Ok(vec![TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\"/*Hello \\n;*/\".chars()), Ok(vec![]));\n\n //assert_eq!(js_parser_rs::parse(\"// IE \\\\r a\".chars()), Ok(vec![TokenType::CommentLiteral(String::from(\" IE \\r a\"))]));\n\n}\n\n\n\n\n", "file_path": "tests/lexer.rs", "rank": 8, "score": 29317.69890011187 }, { "content": "#[test]\n\nfn test_keyword() {\n\n //assert_eq!(js_parser_rs::parse(\"let\".chars()), Ok(vec![TokenType::Let)]));\n\n\n\n assert_eq!(js_parser_rs::parse(\"var\".chars()), Ok(vec![TokenType::Var]));\n\n assert_eq!(js_parser_rs::parse(\"if\".chars()), Ok(vec![TokenType::If]));\n\n assert_eq!(js_parser_rs::parse(\"else\".chars()), Ok(vec![TokenType::Else]));\n\n assert_eq!(js_parser_rs::parse(\"do\".chars()), Ok(vec![TokenType::Do]));\n\n assert_eq!(js_parser_rs::parse(\"typeof\".chars()), Ok(vec![TokenType::Typeof]));\n\n assert_eq!(js_parser_rs::parse(\"switch\".chars()), Ok(vec![TokenType::Switch]));\n\n assert_eq!(js_parser_rs::parse(\"catch\".chars()), Ok(vec![TokenType::Catch]));\n\n assert_eq!(js_parser_rs::parse(\"try\".chars()), Ok(vec![TokenType::Try]));\n\n assert_eq!(js_parser_rs::parse(\"instanceof\".chars()), Ok(vec![TokenType::Instanceof]));\n\n assert_eq!(js_parser_rs::parse(\"export\".chars()), Ok(vec![TokenType::Export]));\n\n assert_eq!(js_parser_rs::parse(\"return\".chars()), Ok(vec![TokenType::Return]));\n\n assert_eq!(js_parser_rs::parse(\"void\".chars()), Ok(vec![TokenType::Void]));\n\n assert_eq!(js_parser_rs::parse(\"extends\".chars()), Ok(vec![TokenType::Extends]));\n\n assert_eq!(js_parser_rs::parse(\"const\".chars()), Ok(vec![TokenType::Const]));\n\n assert_eq!(js_parser_rs::parse(\"finally\".chars()), Ok(vec![TokenType::Finally]));\n\n assert_eq!(js_parser_rs::parse(\"super\".chars()), Ok(vec![TokenType::Super]));\n\n assert_eq!(js_parser_rs::parse(\"with\".chars()), Ok(vec![TokenType::With]));\n", "file_path": "tests/lexer.rs", "rank": 9, "score": 29317.69890011187 }, { "content": "use error::JsResult;\n\nuse error::error::{Error, ErrorType, SyntaxErrorType};\n\nuse lexer::enums::{TokenType};\n\nuse scope::parser::{Parser, Item};\n\n\n\nmacro_rules! wait {\n\n ($expr:expr) => (match $expr {\n\n Item::Item => return Ok(Item::Item),\n\n Item::None => (),\n\n })\n\n}\n\n\n\n\n\nmacro_rules! none {\n\n ($expr:expr) => (match $expr {\n\n Item::None => return Ok(Item::None),\n\n Item::Item => (),\n\n })\n\n}\n\n\n", "file_path": "src/scope/expr.rs", "rank": 10, "score": 28684.970250927112 }, { "content": " }\n\n\n\n pub fn bump_and_return_item(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_primary_expr(&mut self) -> JsResult<Item> {\n\n println!(\"parse_primary_expr {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::This) => self.bump_and_return_item(),\n\n Some(TokenType::Identifier(_)) => self.bump_and_return_item(),\n\n Some(TokenType::Literal(_)) => self.bump_and_return_item(),\n\n Some(TokenType::LeftBracket) => self.parse_array_literal(),\n\n Some(TokenType::LeftBrace) => self.parse_object_literal(),\n\n Some(TokenType::Function) => self.parse_function_expr(),\n\n Some(TokenType::Yield) => self.parse_yield_expr(),\n\n Some(TokenType::LeftParen) => self.parse_cover_parenthesized_expression_and_arrow_parameter_list(),\n\n _ => Ok(Item::None)\n\n }\n", "file_path": "src/scope/expr.rs", "rank": 11, "score": 28676.383898709275 }, { "content": " }\n\n\n\n pub fn dump_and_parse_unary_expr(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n self.parse_unary_expr(Item::None)\n\n }\n\n\n\n pub fn parse_unary_expr(&mut self, first: Item) -> JsResult<Item> {\n\n println!(\"parse_unary_expr {:?}\", self.peek());\n\n wait!(try!(self.parse_update_expr(first)));\n\n match self.peek() {\n\n Some(TokenType::Delete) => self.dump_and_parse_unary_expr(),\n\n Some(TokenType::Void) => self.dump_and_parse_unary_expr(),\n\n Some(TokenType::Typeof) => self.dump_and_parse_unary_expr(),\n\n Some(TokenType::Plus) => self.dump_and_parse_unary_expr(),\n\n Some(TokenType::Minus) => self.dump_and_parse_unary_expr(),\n\n Some(TokenType::Tilde) => self.dump_and_parse_unary_expr(),\n\n Some(TokenType::Invert) => self.dump_and_parse_unary_expr(),\n\n _ => Ok(Item::None)\n\n }\n", "file_path": "src/scope/expr.rs", "rank": 12, "score": 28675.76221163443 }, { "content": " }\n\n pub fn parse_update_expr(&mut self, first: Item) -> JsResult<Item> {\n\n println!(\"parse_update_expr {:?}\", self.peek());\n\n let left = match first {\n\n Item::Item => Item::Item,\n\n Item::None => try!(self.parse_left_hand_side_expr())\n\n };\n\n match left {\n\n Item::None => {\n\n if try!(self.consume(TokenType::Increment)) {\n\n return self.parse_unary_expr(Item::None);\n\n }\n\n if try!(self.consume(TokenType::Decrement)) {\n\n return self.parse_unary_expr(Item::None);\n\n }\n\n return Ok(Item::None)\n\n },\n\n Item::Item => {\n\n if try!(self.consume(TokenType::Increment)) {\n\n return Ok(Item::Item)\n", "file_path": "src/scope/expr.rs", "rank": 13, "score": 28673.42130704926 }, { "content": " }\n\n\n\n pub fn parse_expr_stmt(&mut self) -> JsResult<Item> {\n\n println!(\"parse_expr_stmt {:?}\", self.peek());\n\n let result = match self.peek() {\n\n Some(TokenType::LeftBrace) => Ok(Item::None),\n\n Some(TokenType::Function) => Ok(Item::None),\n\n Some(TokenType::Class) => Ok(Item::None),\n\n Some(TokenType::Let) => {\n\n if self.peek_at(1) == Some(TokenType::LeftBracket) {\n\n Ok(Item::None)\n\n } else {\n\n Ok(Item::Item)\n\n }\n\n },\n\n Some(_) => Ok(Item::Item),\n\n None => Ok(Item::None),\n\n };\n\n none!(try!(result));\n\n try!(self.parse_expr());\n", "file_path": "src/scope/expr.rs", "rank": 14, "score": 28672.286074877888 }, { "content": "impl Parser {\n\n pub fn expect_identifier(&mut self) -> JsResult<()> {\n\n println!(\"expect_identifier {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::Identifier(_)) => {\n\n try!(self.bump());\n\n Ok(())\n\n }\n\n _ => Err(Error::new(ErrorType::SyntaxError(SyntaxErrorType::UnexpectedEOF), 0, 0, None))\n\n }\n\n }\n\n\n\n pub fn expect_identifier_name(&mut self) -> JsResult<()> {\n\n println!(\"expect_identifier {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::Identifier(_)) => {\n\n try!(self.bump());\n\n Ok(())\n\n }\n\n Some(TokenType::Get) => {\n", "file_path": "src/scope/expr.rs", "rank": 15, "score": 28669.346333747424 }, { "content": " pub fn parse_cover_parenthesized_expression_and_arrow_parameter_list(&mut self) -> JsResult<Item> {\n\n println!(\"parse_cover_parenthesized_expression_and_arrow_parameter_list {:?}\", self.peek());\n\n try!(self.expect(TokenType::LeftParen));\n\n if !try!(self.consume(TokenType::ThreePoints)) {\n\n try!(self.parse_expr());\n\n try!(self.consume(TokenType::ThreePoints));\n\n }\n\n try!(self.consume_all_lineterminates());\n\n try!(self.expect(TokenType::RightParen));\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_assign_expr(&mut self) -> JsResult<Item> {\n\n println!(\"parse_assign_expr {:?}\", self.peek());\n\n match try!(self.parse_left_hand_side_expr()) {\n\n Item::Item => {\n\n println!(\" parse_assign_expr {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::DivideAssign) |\n\n Some(TokenType::ExpAssign) |\n", "file_path": "src/scope/expr.rs", "rank": 16, "score": 28667.570894219927 }, { "content": " Some(TokenType::LeftShiftAssign) |\n\n Some(TokenType::ModAssign) |\n\n Some(TokenType::PlusAssign) |\n\n Some(TokenType::MinusAssign) |\n\n Some(TokenType::MultipleAssign) |\n\n Some(TokenType::OrBitwiseAssign) |\n\n Some(TokenType::XorAssign) |\n\n Some(TokenType::AndBitwiseAssign) |\n\n Some(TokenType::Equal) => {\n\n try!(self.bump());\n\n self.parse_assign_expr()\n\n }\n\n _ => {\n\n self.parse_conditional_expr(Item::Item)\n\n }\n\n }\n\n }\n\n Item::None => {\n\n wait!(try!(self.parse_conditional_expr(Item::None)));\n\n self.parse_yield_expr()\n", "file_path": "src/scope/expr.rs", "rank": 17, "score": 28667.003074706343 }, { "content": " }\n\n if try!(self.consume(TokenType::Decrement)) {\n\n return Ok(Item::Item)\n\n }\n\n return Ok(Item::Item)\n\n }\n\n }\n\n }\n\n\n\n pub fn parse_conditional_expr(&mut self, first: Item) -> JsResult<Item> {\n\n println!(\"parse_conditional_expr {:?}\", self.peek());\n\n none!(try!(self.parse_logical_expr(first)));\n\n if try!(self.consume(TokenType::QuestionMark)) {\n\n try!(self.parse_assign_expr());\n\n try!(self.expect(TokenType::Colon));\n\n try!(self.parse_assign_expr());\n\n }\n\n Ok(Item::Item)\n\n }\n\n\n", "file_path": "src/scope/expr.rs", "rank": 18, "score": 28666.79854277251 }, { "content": " if try!(self.consume(TokenType::LineTerminate)) {\n\n return Ok(Item::Item)\n\n }\n\n try!(self.expect(TokenType::Semicolon));\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_yield_expr(&mut self) -> JsResult<Item> {\n\n println!(\"parse_yield_expr {:?}\", self.peek());\n\n if !try!(self.consume(TokenType::Yield)) {\n\n return Ok(Item::None)\n\n }\n\n if try!(self.consume(TokenType::Semicolon)) {\n\n Ok(Item::Item)\n\n } else {\n\n try!(self.consume(TokenType::Multiple));\n\n try!(self.parse_assign_expr());\n\n try!(self.expect(TokenType::Semicolon));\n\n Ok(Item::Item)\n\n }\n", "file_path": "src/scope/expr.rs", "rank": 19, "score": 28666.2684690326 }, { "content": " }\n\n }\n\n }\n\n\n\n pub fn parse_expr(&mut self) -> JsResult<Item> {\n\n try!(self.consume_all_lineterminates());\n\n println!(\"parse_expr {:?}\", self.peek());\n\n try!(self.parse_assign_expr());\n\n while try!(self.consume(TokenType::Comma)) {\n\n try!(self.consume_all_lineterminates());\n\n try!(self.parse_assign_expr());\n\n }\n\n Ok(Item::Item)\n\n }\n\n}\n", "file_path": "src/scope/expr.rs", "rank": 20, "score": 28662.193931288126 }, { "content": " try!(self.bump());\n\n Ok(())\n\n }\n\n Some(TokenType::Set) => {\n\n try!(self.bump());\n\n Ok(())\n\n }\n\n _ => Err(Error::new(ErrorType::SyntaxError(SyntaxErrorType::UnexpectedEOF), 0, 0, None))\n\n }\n\n }\n\n\n\n pub fn consume_identifier(&mut self) -> JsResult<bool> {\n\n println!(\"consume_identifier {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::Identifier(_)) => {\n\n try!(self.bump());\n\n Ok(true)\n\n }\n\n _ => Ok(false)\n\n }\n", "file_path": "src/scope/expr.rs", "rank": 21, "score": 28659.193378977627 }, { "content": " pub fn parse_continue(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n if try!(self.consume(TokenType::Semicolon)) {\n\n Ok(Item::Item)\n\n } else {\n\n match self.peek() {\n\n Some(TokenType::Identifier(_)) => (),\n\n Some(t) => {\n\n return Err(Error::new(ErrorType::SyntaxError(SyntaxErrorType::Unexpected(t)), 0, 0, None))\n\n }\n\n None => {\n\n return Err(Error::new(ErrorType::SyntaxError(SyntaxErrorType::UnexpectedEOF), 0, 0, None))\n\n }\n\n }\n\n try!(self.expect(TokenType::Semicolon));\n\n Ok(Item::Item)\n\n }\n\n }\n\n\n\n pub fn parse_return(&mut self) -> JsResult<Item> {\n", "file_path": "src/scope/parser.rs", "rank": 22, "score": 28623.051374475453 }, { "content": " let then = try!(self.parse_stmt());\n\n match then {\n\n Item::Item => (),\n\n Item::None => try!(self.fatal(SyntaxErrorType::UnexpectedEOF))\n\n }\n\n\n\n if try!(self.consume(TokenType::Else)) {\n\n try!(self.parse_stmt());\n\n }\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_element_list(&mut self) -> JsResult<Item> {\n\n if !try!(self.consume(TokenType::ThreePoints)) {\n\n try!(self.parse_expr());\n\n try!(self.consume(TokenType::ThreePoints));\n\n }\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_array_literal(&mut self) -> JsResult<Item> {\n\n println!(\"parse_array_literal {:?}\", self.peek());\n\n try!(self.expect(TokenType::LeftBracket));\n\n try!(self.parse_element_list());\n\n try!(self.expect(TokenType::RightBracket));\n\n Ok(Item::Item)\n\n }\n\n}", "file_path": "src/scope/parser.rs", "rank": 23, "score": 28621.36850619264 }, { "content": " self.peek_at(0)\n\n }\n\n\n\n pub fn peek_at(&mut self, index: usize) -> Option<TokenType> {\n\n if self.index + index >= self.len {\n\n None\n\n } else {\n\n let token = self.tokens[self.index + index].clone();\n\n Some(token.token)\n\n }\n\n }\n\n\n\n pub fn from_tokens(tokens: Vec<Token>) -> JsResult<()> {\n\n let parser = &mut Parser::new(tokens);\n\n parser.push_scope();\n\n parser.parse_stmt_list()\n\n }\n\n\n\n pub fn parse_variable(&mut self) -> JsResult<Item> {\n\n println!(\"parse_variable {:?}\", self.peek());\n", "file_path": "src/scope/parser.rs", "rank": 24, "score": 28617.962128386167 }, { "content": " try!(self.bump());\n\n if try!(self.consume(TokenType::Semicolon)) {\n\n Ok(Item::Item)\n\n } else {\n\n let result = try!(self.parse_expr());\n\n match self.peek() {\n\n Some(TokenType::Semicolon) => try!(self.bump()),\n\n Some(TokenType::LineTerminate) => try!(self.bump()),\n\n Some(TokenType::RightBrace) => (),\n\n _ => ()\n\n }\n\n Ok(result)\n\n }\n\n }\n\n\n\n pub fn parse_with(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.expect(TokenType::LeftParen));\n\n try!( self.parse_expr());\n\n try!( self.expect(TokenType::RightParen));\n", "file_path": "src/scope/parser.rs", "rank": 25, "score": 28617.494435685432 }, { "content": "\n\n pub fn parse_variable_declaration(&mut self) -> JsResult<Item> {\n\n println!(\"parse_variable_declaration {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::Identifier(_)) => {\n\n try!(self.bump());\n\n\n\n return self.parse_initializer()\n\n }\n\n Some(TokenType::LeftBrace) => return Ok(Item::Item),\n\n Some(TokenType::LeftBracket) => return Ok(Item::Item),\n\n Some(t) => {\n\n try!(self.fatal(SyntaxErrorType::Unexpected(t)));\n\n }\n\n None => {\n\n try!(self.fatal(SyntaxErrorType::UnexpectedEOF));\n\n }\n\n }\n\n Ok(Item::Item)\n\n }\n", "file_path": "src/scope/parser.rs", "rank": 26, "score": 28616.96696003459 }, { "content": " try!(self.bump());\n\n\n\n try!(self.parse_variable_declaration());\n\n while try!(self.consume(TokenType::Comma)) {\n\n try!(self.parse_variable_declaration());\n\n }\n\n if !try!(self.consume(TokenType::LineTerminate)) {\n\n try!(self.expect(TokenType::Semicolon));\n\n }\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_initializer(&mut self) -> JsResult<Item> {\n\n println!(\"parse_initializer {:?}\", self.peek());\n\n if !try!(self.consume(TokenType::Equal)) {\n\n Ok(Item::None)\n\n } else {\n\n self.parse_assign_expr()\n\n }\n\n }\n", "file_path": "src/scope/parser.rs", "rank": 27, "score": 28616.93386115919 }, { "content": " self.parse_stmt()\n\n }\n\n\n\n pub fn parse_do(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_while(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.expect(TokenType::LeftParen));\n\n try!(self.parse_expr());\n\n try!(self.expect(TokenType::RightParen));\n\n self.parse_stmt()\n\n }\n\n\n\n pub fn parse_for(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n Ok(Item::Item)\n\n }\n", "file_path": "src/scope/parser.rs", "rank": 28, "score": 28614.005763835354 }, { "content": " Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_let(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n self.parse_binding_list()\n\n }\n\n\n\n pub fn parse_const(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n self.parse_binding_list()\n\n }\n\n\n\n pub fn parse_if(&mut self) -> JsResult<Item> {\n\n println!(\"parse_if {:?}\", self.peek());\n\n try!(self.bump());\n\n try!(self.expect(TokenType::LeftParen));\n\n try!(self.parse_expr());\n\n try!(self.expect(TokenType::RightParen));\n\n\n", "file_path": "src/scope/parser.rs", "rank": 29, "score": 28613.757877295237 }, { "content": " }\n\n\n\n self.fatal(SyntaxErrorType::Unexpected(next.token))\n\n }\n\n\n\n pub fn consume(&mut self, token: TokenType) -> JsResult<bool> {\n\n println!(\"consume: {:?} == {:?}\", token.clone(), self.peek());\n\n let matched = match self.peek() {\n\n None => false,\n\n Some(t) => t == token\n\n };\n\n\n\n if matched {\n\n try!(self.bump());\n\n }\n\n\n\n Ok(matched)\n\n }\n\n\n\n pub fn peek(&mut self) -> Option<TokenType> {\n", "file_path": "src/scope/parser.rs", "rank": 30, "score": 28613.487005565068 }, { "content": "\n\n pub fn parse_debugger(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.expect(TokenType::Semicolon));\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_break(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.consume_identifier());\n\n try!(self.expect(TokenType::Semicolon));\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_class(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_binding_list(&mut self) -> JsResult<Item> {\n", "file_path": "src/scope/parser.rs", "rank": 31, "score": 28609.758898835735 }, { "content": "use lexer::token::Token;\n\nuse lexer::enums::{TokenType};\n\nuse error::JsResult;\n\nuse error::error::{Error, ErrorType, SyntaxErrorType};\n\nuse std::iter::Peekable;\n\nuse std::vec::IntoIter;\n\n\n\npub type TokenPeekable = Peekable<Box<IntoIter<Token>>>;\n\n\n", "file_path": "src/scope/parser.rs", "rank": 32, "score": 28608.354993188143 }, { "content": " println!(\"next {:?}\", self.peek());\n\n self.index += 1;\n\n if self.index > self.len {\n\n Err(Error::new(ErrorType::SyntaxError(SyntaxErrorType::UnexpectedEOF), 0, 0, None))\n\n } else {\n\n Ok(self.tokens[self.index - 1].clone())\n\n }\n\n }\n\n\n\n pub fn fatal(&mut self, error: SyntaxErrorType) -> JsResult<()> {\n\n return Err(Error::new(ErrorType::SyntaxError(error), 0, 0, None))\n\n }\n\n\n\n\n\n pub fn expect(&mut self, token: TokenType) -> JsResult<()> {\n\n let next = try!(self.next());\n\n println!(\"expected: {:?} == {:?}\", token.clone(), next.clone());\n\n\n\n if next.token == token {\n\n return Ok(());\n", "file_path": "src/scope/parser.rs", "rank": 33, "score": 28607.952783989484 }, { "content": " index: 0,\n\n scopes: Vec::new()\n\n }\n\n }\n\n\n\n pub fn push_scope(&mut self) {\n\n self.scopes.push(Scope {})\n\n }\n\n\n\n pub fn bump(&mut self) -> JsResult<()> {\n\n println!(\"bump {:?}\", self.peek());\n\n self.index += 1;\n\n if self.index > self.len {\n\n Err(Error::new(ErrorType::SyntaxError(SyntaxErrorType::UnexpectedEOF), 0, 0, None))\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n pub fn next(&mut self) -> JsResult<Token> {\n", "file_path": "src/scope/parser.rs", "rank": 34, "score": 28607.784504326082 }, { "content": "#[test]\n\n#[should_panic]\n\nfn sould_panic_number() {\n\n println!(\"{:?}\", js_parser_rs::parse(\"0o394\".chars()));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 35, "score": 27336.856612036914 }, { "content": "#[test]\n\nfn test_useless_string() {\n\n assert_eq!(js_parser_rs::parse(\"+ \\\"-\\\\f]' ms=''>\\\" +\\n\".chars()), Ok(vec![TokenType::Plus,TokenType::Literal(LiteralType::String(String::from(\"-\\\\f]' ms=''>\"))),TokenType::Plus,TokenType::LineTerminate]));\n\n assert_eq!(js_parser_rs::parse(\"\\\"Hello World!\\\"\".chars()), Ok(vec![TokenType::Literal(LiteralType::String(String::from(\"Hello World!\")))]));\n\n assert_eq!(js_parser_rs::parse(\"\\\"Hello\".chars()), Err(ErrorType::SyntaxError(SyntaxErrorType::UnexpectedEOF)));\n\n assert_eq!(js_parser_rs::parse(\"\\\"Hel{}\\\" \\\"Hello World!\\\"\".chars()),Ok(vec![TokenType::Literal(LiteralType::String(String::from(\"Hel{}\"))),TokenType::Literal(LiteralType::String(String::from(\"Hello World!\")))]));\n\n assert_eq!(js_parser_rs::parse(\"\\\"Hello\\\\\\\" World!\\\"\".chars()), Ok(vec![TokenType::Literal(LiteralType::String(String::from(\"Hello\\\" World!\")))]));\n\n assert_eq!(js_parser_rs::parse(\"'Hello World!'\".chars()), Ok(vec![TokenType::Literal(LiteralType::String(String::from(\"Hello World!\")))]));\n\n assert_eq!(js_parser_rs::parse(\"'Hello\\\\' World!'\".chars()), Ok(vec![TokenType::Literal(LiteralType::String(String::from(\"Hello' World!\")))]));\n\n assert_eq!(js_parser_rs::parse(\"\\\"Hello\\\\\\\\ World!\\\"\".chars()), Ok(vec![TokenType::Literal(LiteralType::String(String::from(\"Hello\\\\ World!\")))]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 36, "score": 27336.856612036914 }, { "content": "#[test]\n\nfn test_useless_number() {\n\n assert_eq!(js_parser_rs::parse(\"0\".chars()), Ok(vec![TokenType::Literal(LiteralType::Integer(0))]));\n\n assert_eq!(js_parser_rs::parse(\"0xabcdef0\".chars()), Ok(vec![TokenType::Literal(LiteralType::Integer(0xabcdef0))]));\n\n assert_eq!(js_parser_rs::parse(\"0xABCDEF0\".chars()), Ok(vec![TokenType::Literal(LiteralType::Integer(0xABCDEF0))]));\n\n assert_eq!(js_parser_rs::parse(\"0XABCDEF0\".chars()), Ok(vec![TokenType::Literal(LiteralType::Integer(0xABCDEF0))]));\n\n assert_eq!(js_parser_rs::parse(\"0o34\".chars()), Ok(vec![TokenType::Literal(LiteralType::Integer(0o34))]));\n\n assert_eq!(js_parser_rs::parse(\"0O34\".chars()), Ok(vec![TokenType::Literal(LiteralType::Integer(0o34))]));\n\n assert_eq!(js_parser_rs::parse(\"0.0\".chars()), Ok(vec![TokenType::Literal(LiteralType::Float(0.0))]));\n\n assert_eq!(js_parser_rs::parse(\"0.123\".chars()), Ok(vec![TokenType::Literal(LiteralType::Float(0.123))]));\n\n assert_eq!(js_parser_rs::parse(\"123456789.123\".chars()), Ok(vec![TokenType::Literal(LiteralType::Float(123456789.123))]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 37, "score": 27336.856612036914 }, { "content": "#[test]\n\nfn test_white_spaces() {\n\n assert_eq!(js_parser_rs::parse(\" \".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"\\u{a0}\".chars()), Ok(vec![]));\n\n //assert_eq!(js_parser_rs::parse(\"\\u{9}\".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"\\u{b}\".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"\\u{c}\".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"\\t\".chars()), Ok(vec![]));\n\n assert_eq!(js_parser_rs::parse(\"; \".chars()), Ok(vec![TokenType::Semicolon]));\n\n assert_eq!(js_parser_rs::parse(\",\".chars()), Ok(vec![TokenType::Comma]));\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 38, "score": 27336.856612036914 }, { "content": "use error::JsResult;\n\nuse error::error::{ErrorType, SyntaxErrorType};\n\nuse lexer::enums::{LexerMode, TokenType, NumberType, StringType};\n\nuse lexer::state::{LexerState};\n\n\n\nimpl LexerState {\n\n fn start_punctuator(&mut self, t: TokenType) {\n\n self.update(LexerMode::Punctuator(t, 0));\n\n }\n\n\n\n pub fn parse_normal(&mut self, c: Option<char>) -> JsResult<bool> {\n\n let mut handled = true;\n\n match c {\n\n Some('a' ... 'z') | Some('A' ... 'Z') | Some('_') | Some('$') => {\n\n self.update(LexerMode::Raw);\n\n self.reset_tmp();\n\n self.tmp_push(c.unwrap());\n\n }\n\n Some('\"') => {\n\n self.update(LexerMode::String(StringType::DoubleQuote));\n", "file_path": "src/lexer/mode/none.rs", "rank": 39, "score": 26363.71337364428 }, { "content": " Some('.') => self.start_punctuator(TokenType::Point),\n\n Some('|') => self.start_punctuator(TokenType::OrBitwise),\n\n Some('*') => self.start_punctuator(TokenType::Multiple),\n\n Some('&') => self.start_punctuator(TokenType::AndBitwise),\n\n Some('^') => self.start_punctuator(TokenType::Xor),\n\n Some('+') => self.start_punctuator(TokenType::Plus),\n\n Some('-') => self.start_punctuator(TokenType::Minus),\n\n Some('%') => self.start_punctuator(TokenType::Mod),\n\n Some('=') => self.start_punctuator(TokenType::Equal),\n\n Some('<') => self.start_punctuator(TokenType::SmallThan),\n\n Some('/') => self.start_punctuator(TokenType::Divide),\n\n Some('!') => self.start_punctuator(TokenType::Invert),\n\n Some('>') => self.start_punctuator(TokenType::GreaterThan),\n\n None => {\n\n self.update(LexerMode::EOF)\n\n }\n\n Some('\\\\') => {\n\n let unicode = self.read_unicode();\n\n match unicode {\n\n Some(c) => {\n", "file_path": "src/lexer/mode/none.rs", "rank": 40, "score": 26353.578248968905 }, { "content": " println!(\"{:?}\", c);\n\n self.overwrite_current_char_with_unicode(c);\n\n handled = false\n\n }\n\n _ => {\n\n panic!(\"Unhandled Parser State Reached: {:?}, {:?}, {:?}, col {:?}, line {:?}\", c, self.mode(), self.is_escaped(), self.col(), self.line());\n\n }\n\n }\n\n }\n\n _ => {\n\n panic!(\"Unhandled Parser State Reached: {:?}, {:?}, {:?}, col {:?}, line {:?}\", c, self.mode(), self.is_escaped(), self.col(), self.line());\n\n //self.update(LexerMode::EOF);\n\n }\n\n }\n\n Ok(handled)\n\n }\n\n}", "file_path": "src/lexer/mode/none.rs", "rank": 41, "score": 26350.789138734868 }, { "content": " self.reset_tmp();\n\n }\n\n Some('\\'') => {\n\n self.update(LexerMode::String(StringType::SingleQuote));\n\n self.reset_tmp();\n\n }\n\n Some('0') => {\n\n self.update(LexerMode::Number(NumberType::None));\n\n self.reset_tmp();\n\n self.tmp_push(c.unwrap());\n\n }\n\n Some('1'...'9') => {\n\n self.update(LexerMode::Number(NumberType::NoneLiteral));\n\n self.reset_tmp();\n\n self.tmp_push(c.unwrap());\n\n }\n\n Some('\\n') |\n\n Some('\\r') => try!(self.push(TokenType::LineTerminate)),\n\n Some(' ') |\n\n Some('\\t') |\n", "file_path": "src/lexer/mode/none.rs", "rank": 42, "score": 26350.68149650724 }, { "content": " Some('\\u{c}') |\n\n Some('\\u{b}') |\n\n Some('\\u{a0}') => {\n\n if self.last_char_is_unicode() {\n\n let c = self.current_char();\n\n let err = self.error(ErrorType::SyntaxError(SyntaxErrorType::UnexpectedChar(c.unwrap())));\n\n return Err(err);\n\n }\n\n },\n\n Some(';') => try!(self.push(TokenType::Semicolon)),\n\n Some(',') => try!(self.push(TokenType::Comma)),\n\n Some('{') => try!(self.push(TokenType::LeftBrace)),\n\n Some('}') => try!(self.push(TokenType::RightBrace)),\n\n Some('[') => try!(self.push(TokenType::LeftBracket)),\n\n Some(']') => try!(self.push(TokenType::RightBracket)),\n\n Some('(') => try!(self.push(TokenType::LeftParen)),\n\n Some(')') => try!(self.push(TokenType::RightParen)),\n\n Some('~') => try!(self.push(TokenType::Tilde)),\n\n Some(':') => try!(self.push(TokenType::Colon)),\n\n Some('?') => try!(self.push(TokenType::QuestionMark)),\n", "file_path": "src/lexer/mode/none.rs", "rank": 43, "score": 26345.334388735337 }, { "content": "struct Scope {}\n\n\n\n#[derive(PartialEq, Debug, Clone)]\n\npub enum Item {\n\n Item,\n\n None\n\n}\n\n\n\npub struct Parser {\n\n tokens: Vec<Token>,\n\n len: usize,\n\n index: usize,\n\n scopes: Vec<Scope>,\n\n}\n\n\n\nimpl Parser {\n\n fn new(tokens: Vec<Token>) -> Parser {\n\n Parser {\n\n tokens: tokens.clone(),\n\n len: tokens.len(),\n", "file_path": "src/scope/parser.rs", "rank": 44, "score": 26266.974166706386 }, { "content": "#[allow(dead_code)]\n\nfn parse(file: &str) {\n\n let mut js = String::new();\n\n if !Path::new(file).exists() {\n\n println!(\"{:?} File not Found \", file);\n\n return\n\n }\n\n File::open(file).ok().unwrap().read_to_string(&mut js).ok();\n\n\n\n // || js.contains(\"es6id\") || js.contains(\"arrow-function\")\n\n if js.contains(\"negative: ReferenceError\") {\n\n println!(\"{:?} File excluded\", file);\n\n return\n\n }\n\n let syntax = if js.contains(\"negative: SyntaxError\") && !js.contains(\"eval\") {\n\n false\n\n } else {\n\n true\n\n };\n\n let context = &mut JsContext::new();\n\n let chars = context.parse(js);\n", "file_path": "tests/tc39.rs", "rank": 45, "score": 25868.685058073595 }, { "content": "#[test]\n\nfn test_parse_test_file() {\n\n let mut file = File::open(\"tests/js/test.js\").unwrap();\n\n let mut s = String::new();\n\n file.read_to_string(&mut s).unwrap();\n\n js_parser_rs::parse(OwningChars::new(s)).unwrap();\n\n //let a = js_parser_rs::parse(OwningChars::new(s)).unwrap();\n\n //assert_eq!(a, vec![])\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 46, "score": 25633.629015227016 }, { "content": "#[test]\n\nfn test_parse_typical_file() {\n\n let mut file = File::open(\"tests/js/jquery.js\").unwrap();\n\n let mut s = String::new();\n\n file.read_to_string(&mut s).unwrap();\n\n js_parser_rs::parse(OwningChars::new(s)).unwrap();\n\n //let a = js_parser_rs::parse(OwningChars::new(s)).unwrap();\n\n //assert_eq!(a, vec![])\n\n}\n\n\n\n\n", "file_path": "tests/lexer.rs", "rank": 47, "score": 25633.629015227016 }, { "content": "pub trait CodePos {\n\n fn location(&self) -> (u64, u32);\n\n}\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Clone)]\n\npub struct Error {\n\n pub error_type: ErrorType,\n\n line: u64,\n\n col: u32,\n\n expected: Option<&'static str>\n\n}\n\n\n\nimpl Error {\n\n pub fn new(etype: ErrorType, col: u32, line: u64, expected: Option<&'static str>) -> Error {\n\n Error {\n\n error_type: etype,\n\n line: line,\n\n col: col,\n\n expected: expected\n\n }\n\n }\n\n\n\n pub fn from_state<T>(etype: ErrorType, pos: &T, expected: Option<&'static str>) -> Error where T: CodePos {\n\n let p = pos.location();\n\n Error::new(etype, p.1, p.0, expected)\n\n }\n\n}", "file_path": "src/error/error.rs", "rank": 48, "score": 24735.409218894285 }, { "content": "var \\u005f\\u005f\\u0076\\u0061\\u0072 = 1;\n", "file_path": "tests/js/test.js", "rank": 49, "score": 15152.830020176008 }, { "content": "# js-parser-rs\n\nA javascript parser in rust (WIP)\n\n\n\n\n\n# Syntax\n\n\n\nReservedWord ::\n\n Keyword\n\n FutureReservedWord\n\n NullLiteral\n\n BooleanLiteral\n\n\n\nStatementList[Yield, Return] ://done\n\n StatementListItem[?Yield, ?Return]...\n\n\n\nStatementListItem[Yield, Return]://done\n\n Statement[?Yield, ?Return]\n\n Declaration[?Yield]\n\n\n\nDeclaration[Yield]://done\n\n HoistableDeclaration[?Yield, ~Default]\n\n ClassDeclaration[?Yield, ~Default]\n\n LexicalDeclaration[+In, ?Yield]\n\n\n\nLexicalDeclaration[In, Yield]://done\n\n LetOrConst BindingList[?In, ?Yield];\n\n\n\nLetOrConst://done\n\n let\n\n const\n\n\n\nBindingList[In, Yield]:\n\n LexicalBinding[?In, ?Yield]\n\n BindingList[?In, ?Yield] , LexicalBinding[?In, ?Yield]\n\n\n\nLexicalBinding[In, Yield]:\n\n BindingIdentifier[?Yield] Initializer[?In, ?Yield] opt\n\n BindingPattern[?Yield] Initializer[?In, ?Yield]\n\n\n\n\n\nStatement [Yield, Return]:\n\n BlockStatement[?Yield, ?Return]\n\n VariableStatement[?Yield]\n\n EmptyStatement\n\n ExpressionStatement[?Yield]\n\n IfStatement[?Yield, ?Return]\n\n BreakableStatement[?Yield, ?Return]\n\n ContinueStatement[?Yield]\n\n BreakStatement[?Yield]\n\n [+Return]ReturnStatement[?Yield]\n\n WithStatement[?Yield, ?Return]\n\n LabelledStatement[?Yield, ?Return]\n\n ThrowStatement[?Yield]\n\n TryStatement[?Yield, ?Return]\n\n DebuggerStatement\n\n\n\nEmptyStatement: ;//done\n\n\n\n\n\nExpressionStatement[Yield]:\n\n [lookahead ∉ { {, function, class, let [ }] Expression[+In, ?Yield];\n\n\n\nExpression[In, Yield]:\n\n AssignmentExpression[?In, ?Yield]\n\n Expression[?In, ?Yield] , AssignmentExpression[?In, ?Yield]\n\n\n\nAssignmentExpression[In, Yield]:\n\n ConditionalExpression[?In, ?Yield]\n\n [+Yield]YieldExpression[?In]\n\n ArrowFunction[?In, ?Yield]\n\n LeftHandSideExpression[?Yield] = AssignmentExpression[?In, ?Yield]\n\n LeftHandSideExpression[?Yield] AssignmentOperator AssignmentExpression[?In, ?Yield]\n\n\n\nYieldExpression[In]://done\n\n yield\n\n yield[no LineTerminator here] AssignmentExpression[?In, +Yield]\n\n yield[no LineTerminator here] * AssignmentExpression[?In, +Yield]\n\n\n", "file_path": "README.md", "rank": 50, "score": 14486.879958262498 }, { "content": " with ( Expression[+In, ?Yield] ) Statement[?Yield, ?Return]\n\n\n\nLabelledStatement[Yield, Return]://done\n\n LabelIdentifier[?Yield] : LabelledItem[?Yield, ?Return]\n\n\n\nLabelledItem[Yield, Return]://done\n\n Statement[?Yield, ?Return]\n\n FunctionDeclaration[?Yield, ~Default]\n\n\n\nThrowStatement[Yield]://done\n\n throw [no LineTerminator here] Expression[+In, ?Yield] ;\n\n\n\nTryStatement[Yield, Return]://done\n\n try Block[?Yield, ?Return] Catch[?Yield, ?Return]\n\n try Block[?Yield, ?Return] Finally[?Yield, ?Return]\n\n try Block[?Yield, ?Return] Catch[?Yield, ?Return] Finally[?Yield, ?Return]\n\n\n\nCatch[Yield, Return]://done\n\n catch ( CatchParameter[?Yield] ) Block[?Yield, ?Return]\n\n\n\nFinally[Yield, Return]://done\n\n finally Block[?Yield, ?Return]\n\n\n\nCatchParameter[Yield]:\n\n BindingIdentifier[?Yield]\n\n BindingPattern[?Yield]\n\n\n\nDebuggerStatement: debugger ; //done\n\n\n\nDefaultClause[Yield, Return]:\n\n default : StatementList[?Yield, ?Return] opt\n\n\n\nForDeclaration[Yield]:\n\n LetOrConst ForBinding[?Yield]\n\n\n\nForBinding[Yield]:\n\n BindingIdentifier[?Yield]\n\n BindingPattern[?Yield]\n\n\n\nBindingPattern[Yield]:\n\n ObjectBindingPattern[?Yield]\n\n ArrayBindingPattern[?Yield]\n\n\n\n\n\nVariableStatement[Yield]://done\n\n var VariableDeclarationList[+In, ?Yield];\n\n\n\nVariableDeclarationList[In, Yield]:\n\n VariableDeclaration[?In, ?Yield]\n\n VariableDeclarationList[?In, ?Yield],VariableDeclaration[?In, ?Yield]\n\n\n\nVariableDeclaration[In, Yield]:\n\n BindingIdentifier[?Yield] Initializer[?In, ?Yield] opt\n\n BindingPattern[?Yield] Initializer[?In, ?Yield]\n\n\n\nBindingIdentifier[Yield]:\n\n Identifier\n\n [~Yield]yield\n\n\n\nLabelIdentifier[Yield]:\n\n Identifier\n\n [~Yield]yield\n\n\n\nInitializer[In, Yield]://done\n\n = AssignmentExpression[?In, ?Yield]\n\n\n\n\n\nIdentifierReference[Yield]:\n\n Identifier\n\n [~Yield]yield\n\n\n\nIdentifier:\n\n IdentifierName but not ReservedWord\n", "file_path": "README.md", "rank": 51, "score": 14482.990293341041 }, { "content": " SwitchStatement[?Yield, ?Return]\n\n\n\nIterationStatement[Yield, Return]:\n\n do Statement[?Yield, ?Return] while ( Expression[+In, ?Yield] ) ;\n\n while ( Expression[+In, ?Yield] ) Statement[?Yield, ?Return]\n\n for ( [lookahead ∉ { let [ }] Expression[~In, ?Yield] opt ; Expression[+In, ?Yield] opt ; Expression[+In, ?Yield] opt ) Statement[?Yield, ?Return]\n\n for ( var VariableDeclarationList[~In, ?Yield] ; Expression[+In, ?Yield] opt ; Expression[+In, ?Yield] opt ) Statement[?Yield, ?Return]\n\n for ( LexicalDeclaration[~In, ?Yield] Expression[+In, ?Yield] opt ; Expression[+In, ?Yield] opt ) Statement[?Yield, ?Return]\n\n for ( [lookahead ∉ { let [ }]LeftHandSideExpression[?Yield] in Expression[+In, ?Yield] ) Statement[?Yield, ?Return]\n\n for ( var ForBinding[?Yield] in Expression[+In, ?Yield])Statement[?Yield, ?Return]\n\n for ( ForDeclaration[?Yield] in Expression[+In, ?Yield])Statement[?Yield, ?Return]\n\n for ( [lookahead ≠ let]LeftHandSideExpression[?Yield] of AssignmentExpression[+In, ?Yield] ) Statement[?Yield, ?Return]\n\n for ( var ForBinding[?Yield] of AssignmentExpression[+In, ?Yield] ) Statement[?Yield, ?Return]\n\n for ( ForDeclaration[?Yield] of AssignmentExpression[+In, ?Yield] ) Statement[?Yield, ?Return]\n\n\n\n\n\nSwitchStatement[Yield, Return]://done\n\n switch ( Expression[+In, ?Yield] ) CaseBlock[?Yield, ?Return]\n\n\n\nCaseBlock[Yield, Return]://done\n\n { CaseClauses[?Yield, ?Return] opt }\n\n { CaseClauses[?Yield, ?Return] opt DefaultClause[?Yield, ?Return] CaseClauses[?Yield, ?Return] opt }\n\n\n\nCaseClauses[Yield, Return]://done\n\n CaseClause[?Yield, ?Return]\n\n CaseClauses[?Yield, ?Return] CaseClause[?Yield, ?Return]\n\n\n\nCaseClause[Yield, Return]://done\n\n case Expression[+In, ?Yield] : StatementList[?Yield, ?Return] opt\n\n\n\nContinueStatement[Yield]://done\n\n continue ;\n\n continue [no LineTerminator here] LabelIdentifier[?Yield] ;\n\n\n\nBreakStatement[Yield]:\n\n break ;\n\n break [no LineTerminator here] LabelIdentifier[?Yield] ;\n\n\n\nWithStatement[Yield, Return]://done\n", "file_path": "README.md", "rank": 52, "score": 14478.814162456665 }, { "content": "ConditionalExpression: LogicalORExpression ? AssignmentExpression : AssignmentExpression\n\n\n\nAssignmentOperator:one of\n\n *= /= %= += -= <<= >>= >>>= &= ^= |= **=\n\n\n\nArrowFunction[In, Yield]:\n\n ArrowParameters[?Yield] [no LineTerminator here] => ConciseBody[?In]\n\n\n\nArrowParameters[Yield]:\n\n BindingIdentifier[?Yield]\n\n CoverParenthesizedExpressionAndArrowParameterList[?Yield]\n\n\n\nConciseBody[In]:\n\n [lookahead ≠ {] AssignmentExpression[?In, ~Yield]\n\n { FunctionBody[~Yield] }\n\n\n\nLeftHandSideExpression[Yield]:\n\n NewExpression[?Yield]\n\n CallExpression[?Yield]\n\n\n\nNewExpression[Yield]:\n\n MemberExpression[?Yield]\n\n new NewExpression[?Yield]\n\n\n\nMemberExpression[Yield]:\n\n PrimaryExpression[?Yield]\n\n MemberExpression[?Yield] [ Expression[+In, ?Yield] ]\n\n MemberExpression[?Yield] . IdentifierName\n\n MemberExpression[?Yield] TemplateLiteral[?Yield]\n\n SuperProperty[?Yield]\n\n MetaProperty\n\n new MemberExpression[?Yield] Arguments[?Yield]\n\n\n\nPrimaryExpression[Yield]:\n\n this\n\n IdentifierReference[?Yield]\n\n Literal\n\n ArrayLiteral[?Yield]\n\n ObjectLiteral[?Yield]\n\n FunctionExpression\n\n ClassExpression[?Yield]\n\n GeneratorExpression\n\n RegularExpressionLiteral\n\n TemplateLiteral[?Yield]\n\n CoverParenthesizedExpressionAndArrowParameterList[?Yield]\n\n\n\nCoverParenthesizedExpressionAndArrowParameterList[Yield]:\n\n ( Expression[+In, ?Yield] )\n\n ( )\n\n ( ...BindingIdentifier[?Yield] )\n\n ( ...BindingPattern[?Yield] )\n\n ( Expression[+In, ?Yield] , ... BindingIdentifier[?Yield] )\n\n ( Expression[+In, ?Yield] , ... BindingPattern[?Yield] )\n\n\n\n\n\n\n\nBlockStatement[Yield, Return]://done\n\n Block[?Yield, ?Return]\n\n\n\nBlock[Yield, Return]://done\n\n { StatementList[?Yield, ?Return]opt }\n\n\n\nIfStatement[Yield, Return]://done\n\n if ( Expression[+In, ?Yield] ) Statement[?Yield, ?Return] else Statement[?Yield, ?Return]\n\n if ( Expression[+In, ?Yield] ) Statement[?Yield, ?Return]\n\n\n\nBreakableStatement[Yield, Return]://done\n\n IterationStatement[?Yield, ?Return]\n", "file_path": "README.md", "rank": 53, "score": 14478.482275701796 }, { "content": "// Copyright 2009 the Sputnik authors. All rights reserved.\n\n// This code is governed by the BSD license found in the LICENSE file.\n\n\n\n/*---\n\n info: Unicode characters in variable Identifier are allowed\n\n es5id: 12.2_A4\n\n description: Create and use unicode characters in variable Identifier\n\n ---*/\n\n\n\n//////////////////////////////////////////////////////////////////////////////\n\n//CHECK#1\n\ntry {\n\n __var=__var;\n\n} catch (e) {\n\n $ERROR('#1: Unicode characters in variable Identifier allowed');\n\n}\n\n//\n\n//////////////////////////////////////////////////////////////////////////////\n\n\n\nvar \\u005f\\u005f\\u0076\\u0061\\u0072 = 1;\n\n\n\n//////////////////////////////////////////////////////////////////////////////\n\n//CHECK#2\n\nif (__var !== 1) {\n\n $ERROR('#2: __var === 1. Actual: __var ==='+ __var );\n\n}\n\n//\n\n//////////////////////////////////////////////////////////////////////////////\n", "file_path": "tests/js/test.js", "rank": 54, "score": 13284.546775604376 }, { "content": "/*!\n\n * jQuery JavaScript Library v1.11.3\n\n * http://jquery.com/\n\n *\n\n * Includes Sizzle.js\n\n * http://sizzlejs.com/\n\n *\n\n * Copyright 2005, 2014 jQuery Foundation, Inc. and other contributors\n\n * Released under the MIT license\n\n * http://jquery.org/license\n\n *\n\n * Date: 2015-04-28T16:19Z\n\n */\n\n\n\n(function( global, factory ) {\n\n\n\n if ( typeof module === \"object\" && typeof module.exports === \"object\" ) {\n\n // For CommonJS and CommonJS-like environments where a proper window is present,\n\n // execute the factory and get jQuery\n\n // For environments that do not inherently posses a window with a document\n\n // (such as Node.js), expose a jQuery-making factory as module.exports\n\n // This accentuates the need for the creation of a real window\n\n // e.g. var jQuery = require(\"jquery\")(window);\n\n // See ticket #14549 for more info\n\n module.exports = global.document ?\n\n factory( global, true ) :\n\n function( w ) {\n\n if ( !w.document ) {\n\n throw new Error( \"jQuery requires a window with a document\" );\n\n }\n\n return factory( w );\n\n };\n\n } else {\n\n factory( global );\n\n }\n\n\n\n// Pass this if window is not defined yet\n\n}(typeof window !== \"undefined\" ? window : this, function( window, noGlobal ) {\n\n\n\n// Can't do this because several apps including ASP.NET trace\n\n// the stack via arguments.caller.callee and Firefox dies if\n\n// you try to trace through \"use strict\" call chains. (#13335)\n\n// Support: Firefox 18+\n\n//\n\n\n\n var deletedIds = [];\n\n\n\n var slice = deletedIds.slice;\n\n\n\n var concat = deletedIds.concat;\n\n\n\n var push = deletedIds.push;\n\n\n\n var indexOf = deletedIds.indexOf;\n\n\n\n var class2type = {};\n\n\n\n var toString = class2type.toString;\n\n\n\n var hasOwn = class2type.hasOwnProperty;\n\n\n\n var support = {};\n\n\n\n\n\n\n\n var\n\n version = \"1.11.3\",\n\n\n\n // Define a local copy of jQuery\n\n jQuery = function( selector, context ) {\n\n // The jQuery object is actually just the init constructor 'enhanced'\n\n // Need init if jQuery is called (just allow error to be thrown if not included)\n\n return new jQuery.fn.init( selector, context );\n\n },\n\n\n\n // Support: Android<4.1, IE<9\n\n // Make sure we trim BOM and NBSP\n\n rtrim = /^[\\s\\uFEFF\\xA0]+|[\\s\\uFEFF\\xA0]+$/g,\n\n\n\n // Matches dashed string for camelizing\n\n rmsPrefix = /^-ms-/,\n\n rdashAlpha = /-([\\da-z])/gi,\n\n\n\n // Used by jQuery.camelCase as callback to replace()\n\n fcamelCase = function( all, letter ) {\n\n return letter.toUpperCase();\n\n };\n\n\n\n jQuery.fn = jQuery.prototype = {\n\n // The current version of jQuery being used\n\n jquery: version,\n\n\n\n constructor: jQuery,\n\n\n\n // Start with an empty selector\n\n selector: \"\",\n\n\n\n // The default length of a jQuery object is 0\n\n length: 0,\n\n\n\n toArray: function() {\n\n return slice.call( this );\n\n },\n\n\n\n // Get the Nth element in the matched element set OR\n\n // Get the whole matched element set as a clean array\n\n get: function( num ) {\n\n return num != null ?\n\n\n\n // Return just the one element from the set\n\n ( num < 0 ? this[ num + this.length ] : this[ num ] ) :\n\n\n\n // Return all the elements in a clean array\n\n slice.call( this );\n\n },\n\n\n\n // Take an array of elements and push it onto the stack\n\n // (returning the new matched element set)\n\n pushStack: function( elems ) {\n\n\n\n // Build a new jQuery matched element set\n\n var ret = jQuery.merge( this.constructor(), elems );\n\n\n\n // Add the old object onto the stack (as a reference)\n\n ret.prevObject = this;\n\n ret.context = this.context;\n\n\n\n // Return the newly-formed element set\n\n return ret;\n\n },\n\n\n\n // Execute a callback for every element in the matched set.\n\n // (You can seed the arguments with an array of args, but this is\n\n // only used internally.)\n\n each: function( callback, args ) {\n\n return jQuery.each( this, callback, args );\n\n },\n\n\n\n map: function( callback ) {\n\n return this.pushStack( jQuery.map(this, function( elem, i ) {\n\n return callback.call( elem, i, elem );\n\n }));\n\n },\n\n\n\n slice: function() {\n\n return this.pushStack( slice.apply( this, arguments ) );\n\n },\n\n\n\n first: function() {\n\n return this.eq( 0 );\n\n },\n\n\n\n last: function() {\n\n return this.eq( -1 );\n\n },\n\n\n\n eq: function( i ) {\n\n var len = this.length,\n\n j = +i + ( i < 0 ? len : 0 );\n\n return this.pushStack( j >= 0 && j < len ? [ this[j] ] : [] );\n\n },\n\n\n\n end: function() {\n\n return this.prevObject || this.constructor(null);\n\n },\n\n\n\n // For internal use only.\n\n // Behaves like an Array's method, not like a jQuery method.\n\n push: push,\n\n sort: deletedIds.sort,\n\n splice: deletedIds.splice\n\n };\n\n\n\n jQuery.extend = jQuery.fn.extend = function() {\n\n var src, copyIsArray, copy, name, options, clone,\n\n target = arguments[0] || {},\n\n i = 1,\n\n length = arguments.length,\n\n deep = false;\n\n\n\n // Handle a deep copy situation\n\n if ( typeof target === \"boolean\" ) {\n\n deep = target;\n\n\n\n // skip the boolean and the target\n\n target = arguments[ i ] || {};\n\n i++;\n\n }\n\n\n\n // Handle case when target is a string or something (possible in deep copy)\n\n if ( typeof target !== \"object\" && !jQuery.isFunction(target) ) {\n\n target = {};\n\n }\n\n\n\n // extend jQuery itself if only one argument is passed\n\n if ( i === length ) {\n\n target = this;\n\n i--;\n\n }\n\n\n\n for ( ; i < length; i++ ) {\n\n // Only deal with non-null/undefined values\n\n if ( (options = arguments[ i ]) != null ) {\n\n // Extend the base object\n\n for ( name in options ) {\n\n src = target[ name ];\n\n copy = options[ name ];\n\n\n\n // Prevent never-ending loop\n\n if ( target === copy ) {\n\n continue;\n\n }\n\n\n\n // Recurse if we're merging plain objects or arrays\n\n if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) {\n\n if ( copyIsArray ) {\n\n copyIsArray = false;\n\n clone = src && jQuery.isArray(src) ? src : [];\n\n\n\n } else {\n\n clone = src && jQuery.isPlainObject(src) ? src : {};\n\n }\n\n\n\n // Never move original objects, clone them\n\n target[ name ] = jQuery.extend( deep, clone, copy );\n\n\n\n // Don't bring in undefined values\n\n } else if ( copy !== undefined ) {\n\n target[ name ] = copy;\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Return the modified object\n\n return target;\n\n };\n\n\n\n jQuery.extend({\n\n // Unique for each copy of jQuery on the page\n\n expando: \"jQuery\" + ( version + Math.random() ).replace( /\\D/g, \"\" ),\n\n\n\n // Assume jQuery is ready without the ready module\n\n isReady: true,\n\n\n\n error: function( msg ) {\n\n throw new Error( msg );\n\n },\n\n\n\n noop: function() {},\n\n\n\n // See test/unit/core.js for details concerning isFunction.\n\n // Since version 1.3, DOM methods and functions like alert\n\n // aren't supported. They return false on IE (#2968).\n\n isFunction: function( obj ) {\n\n return jQuery.type(obj) === \"function\";\n\n },\n\n\n\n isArray: Array.isArray || function( obj ) {\n\n return jQuery.type(obj) === \"array\";\n\n },\n\n\n\n isWindow: function( obj ) {\n\n /* jshint eqeqeq: false */\n\n return obj != null && obj == obj.window;\n\n },\n\n\n\n isNumeric: function( obj ) {\n\n // parseFloat NaNs numeric-cast false positives (null|true|false|\"\")\n\n // ...but misinterprets leading-number strings, particularly hex literals (\"0x...\")\n\n // subtraction forces infinities to NaN\n\n // adding 1 corrects loss of precision from parseFloat (#15100)\n\n return !jQuery.isArray( obj ) && (obj - parseFloat( obj ) + 1) >= 0;\n\n },\n\n\n\n isEmptyObject: function( obj ) {\n\n var name;\n\n for ( name in obj ) {\n\n return false;\n\n }\n\n return true;\n\n },\n\n\n\n isPlainObject: function( obj ) {\n\n var key;\n\n\n\n // Must be an Object.\n\n // Because of IE, we also have to check the presence of the constructor property.\n\n // Make sure that DOM nodes and window objects don't pass through, as well\n\n if ( !obj || jQuery.type(obj) !== \"object\" || obj.nodeType || jQuery.isWindow( obj ) ) {\n\n return false;\n\n }\n\n\n\n try {\n\n // Not own constructor property must be Object\n\n if ( obj.constructor &&\n\n !hasOwn.call(obj, \"constructor\") &&\n\n !hasOwn.call(obj.constructor.prototype, \"isPrototypeOf\") ) {\n\n return false;\n\n }\n\n } catch ( e ) {\n\n // IE8,9 Will throw exceptions on certain host objects #9897\n\n return false;\n\n }\n\n\n\n // Support: IE<9\n\n // Handle iteration over inherited properties before own properties.\n\n if ( support.ownLast ) {\n\n for ( key in obj ) {\n\n return hasOwn.call( obj, key );\n\n }\n\n }\n\n\n\n // Own properties are enumerated firstly, so to speed up,\n\n // if last one is own, then all properties are own.\n\n for ( key in obj ) {}\n\n\n\n return key === undefined || hasOwn.call( obj, key );\n\n },\n\n\n\n type: function( obj ) {\n\n if ( obj == null ) {\n\n return obj + \"\";\n\n }\n\n return typeof obj === \"object\" || typeof obj === \"function\" ?\n\n class2type[ toString.call(obj) ] || \"object\" :\n\n typeof obj;\n\n },\n\n\n\n // Evaluates a script in a global context\n\n // Workarounds based on findings by Jim Driscoll\n\n // http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context\n\n globalEval: function( data ) {\n\n if ( data && jQuery.trim( data ) ) {\n\n // We use execScript on Internet Explorer\n\n // We use an anonymous function so that context is window\n\n // rather than jQuery in Firefox\n\n ( window.execScript || function( data ) {\n\n window[ \"eval\" ].call( window, data );\n\n } )( data );\n\n }\n\n },\n\n\n\n // Convert dashed to camelCase; used by the css and data modules\n\n // Microsoft forgot to hump their vendor prefix (#9572)\n\n camelCase: function( string ) {\n\n return string.replace( rmsPrefix, \"ms-\" ).replace( rdashAlpha, fcamelCase );\n\n },\n\n\n\n nodeName: function( elem, name ) {\n\n return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase();\n\n },\n\n\n\n // args is for internal usage only\n\n each: function( obj, callback, args ) {\n\n var value,\n\n i = 0,\n\n length = obj.length,\n\n isArray = isArraylike( obj );\n\n\n\n if ( args ) {\n\n if ( isArray ) {\n\n for ( ; i < length; i++ ) {\n\n value = callback.apply( obj[ i ], args );\n\n\n\n if ( value === false ) {\n\n break;\n\n }\n\n }\n\n } else {\n\n for ( i in obj ) {\n\n value = callback.apply( obj[ i ], args );\n\n\n\n if ( value === false ) {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n // A special, fast, case for the most common use of each\n\n } else {\n\n if ( isArray ) {\n\n for ( ; i < length; i++ ) {\n\n value = callback.call( obj[ i ], i, obj[ i ] );\n\n\n\n if ( value === false ) {\n\n break;\n\n }\n\n }\n\n } else {\n\n for ( i in obj ) {\n\n value = callback.call( obj[ i ], i, obj[ i ] );\n\n\n\n if ( value === false ) {\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n\n\n return obj;\n\n },\n\n\n\n // Support: Android<4.1, IE<9\n\n trim: function( text ) {\n\n return text == null ?\n\n \"\" :\n\n ( text + \"\" ).replace( rtrim, \"\" );\n\n },\n\n\n\n // results is for internal usage only\n\n makeArray: function( arr, results ) {\n\n var ret = results || [];\n\n\n\n if ( arr != null ) {\n\n if ( isArraylike( Object(arr) ) ) {\n\n jQuery.merge( ret,\n\n typeof arr === \"string\" ?\n\n [ arr ] : arr\n\n );\n\n } else {\n\n push.call( ret, arr );\n\n }\n\n }\n\n\n\n return ret;\n\n },\n\n\n\n inArray: function( elem, arr, i ) {\n\n var len;\n\n\n\n if ( arr ) {\n\n if ( indexOf ) {\n\n return indexOf.call( arr, elem, i );\n\n }\n\n\n\n len = arr.length;\n\n i = i ? i < 0 ? Math.max( 0, len + i ) : i : 0;\n\n\n\n for ( ; i < len; i++ ) {\n\n // Skip accessing in sparse arrays\n\n if ( i in arr && arr[ i ] === elem ) {\n\n return i;\n\n }\n\n }\n\n }\n\n\n\n return -1;\n\n },\n\n\n\n merge: function( first, second ) {\n\n var len = +second.length,\n\n j = 0,\n\n i = first.length;\n\n\n\n while ( j < len ) {\n\n first[ i++ ] = second[ j++ ];\n\n }\n\n\n\n // Support: IE<9\n\n // Workaround casting of .length to NaN on otherwise arraylike objects (e.g., NodeLists)\n\n if ( len !== len ) {\n\n while ( second[j] !== undefined ) {\n\n first[ i++ ] = second[ j++ ];\n\n }\n\n }\n\n\n\n first.length = i;\n\n\n\n return first;\n\n },\n\n\n\n grep: function( elems, callback, invert ) {\n\n var callbackInverse,\n\n matches = [],\n\n i = 0,\n\n length = elems.length,\n\n callbackExpect = !invert;\n\n\n\n // Go through the array, only saving the items\n\n // that pass the validator function\n\n for ( ; i < length; i++ ) {\n\n callbackInverse = !callback( elems[ i ], i );\n\n if ( callbackInverse !== callbackExpect ) {\n\n matches.push( elems[ i ] );\n\n }\n\n }\n\n\n\n return matches;\n\n },\n\n\n\n // arg is for internal usage only\n\n map: function( elems, callback, arg ) {\n\n var value,\n\n i = 0,\n\n length = elems.length,\n\n isArray = isArraylike( elems ),\n\n ret = [];\n\n\n\n // Go through the array, translating each of the items to their new values\n\n if ( isArray ) {\n\n for ( ; i < length; i++ ) {\n\n value = callback( elems[ i ], i, arg );\n\n\n\n if ( value != null ) {\n\n ret.push( value );\n\n }\n\n }\n\n\n\n // Go through every key on the object,\n\n } else {\n\n for ( i in elems ) {\n\n value = callback( elems[ i ], i, arg );\n\n\n\n if ( value != null ) {\n\n ret.push( value );\n\n }\n\n }\n\n }\n\n\n\n // Flatten any nested arrays\n\n return concat.apply( [], ret );\n\n },\n\n\n\n // A global GUID counter for objects\n\n guid: 1,\n\n\n\n // Bind a function to a context, optionally partially applying any\n\n // arguments.\n\n proxy: function( fn, context ) {\n\n var args, proxy, tmp;\n\n\n\n if ( typeof context === \"string\" ) {\n\n tmp = fn[ context ];\n\n context = fn;\n\n fn = tmp;\n\n }\n\n\n\n // Quick check to determine if target is callable, in the spec\n\n // this throws a TypeError, but we will just return undefined.\n\n if ( !jQuery.isFunction( fn ) ) {\n\n return undefined;\n\n }\n\n\n\n // Simulated bind\n\n args = slice.call( arguments, 2 );\n\n proxy = function() {\n\n return fn.apply( context || this, args.concat( slice.call( arguments ) ) );\n\n };\n\n\n\n // Set the guid of unique handler to the same of original handler, so it can be removed\n\n proxy.guid = fn.guid = fn.guid || jQuery.guid++;\n\n\n\n return proxy;\n\n },\n\n\n\n now: function() {\n\n return +( new Date() );\n\n },\n\n\n\n // jQuery.support is not used in Core but other projects attach their\n\n // properties to it so it needs to exist.\n\n support: support\n\n });\n\n\n\n// Populate the class2type map\n\n jQuery.each(\"Boolean Number String Function Array Date RegExp Object Error\".split(\" \"), function(i, name) {\n\n class2type[ \"[object \" + name + \"]\" ] = name.toLowerCase();\n\n });\n\n\n\n function isArraylike( obj ) {\n\n\n\n // Support: iOS 8.2 (not reproducible in simulator)\n\n // `in` check used to prevent JIT error (gh-2145)\n\n // hasOwn isn't used here due to false negatives\n\n // regarding Nodelist length in IE\n\n var length = \"length\" in obj && obj.length,\n\n type = jQuery.type( obj );\n\n\n\n if ( type === \"function\" || jQuery.isWindow( obj ) ) {\n\n return false;\n\n }\n\n\n\n if ( obj.nodeType === 1 && length ) {\n\n return true;\n\n }\n\n\n\n return type === \"array\" || length === 0 ||\n\n typeof length === \"number\" && length > 0 && ( length - 1 ) in obj;\n\n }\n\n var Sizzle =\n\n /*!\n\n * Sizzle CSS Selector Engine v2.2.0-pre\n\n * http://sizzlejs.com/\n\n *\n\n * Copyright 2008, 2014 jQuery Foundation, Inc. and other contributors\n\n * Released under the MIT license\n\n * http://jquery.org/license\n\n *\n\n * Date: 2014-12-16\n\n */\n\n (function( window ) {\n\n\n\n var i,\n\n support,\n\n Expr,\n\n getText,\n\n isXML,\n\n tokenize,\n\n compile,\n\n select,\n\n outermostContext,\n\n sortInput,\n\n hasDuplicate,\n\n\n\n // Local document vars\n\n setDocument,\n\n document,\n\n docElem,\n\n documentIsHTML,\n\n rbuggyQSA,\n\n rbuggyMatches,\n\n matches,\n\n contains,\n\n\n\n // Instance-specific data\n\n expando = \"sizzle\" + 1 * new Date(),\n\n preferredDoc = window.document,\n\n dirruns = 0,\n\n done = 0,\n\n classCache = createCache(),\n\n tokenCache = createCache(),\n\n compilerCache = createCache(),\n\n sortOrder = function( a, b ) {\n\n if ( a === b ) {\n\n hasDuplicate = true;\n\n }\n\n return 0;\n\n },\n\n\n\n // General-purpose constants\n\n MAX_NEGATIVE = 1 << 31,\n\n\n\n // Instance methods\n\n hasOwn = ({}).hasOwnProperty,\n\n arr = [],\n\n pop = arr.pop,\n\n push_native = arr.push,\n\n push = arr.push,\n\n slice = arr.slice,\n\n // Use a stripped-down indexOf as it's faster than native\n\n // http://jsperf.com/thor-indexof-vs-for/5\n\n indexOf = function( list, elem ) {\n\n var i = 0,\n\n len = list.length;\n\n for ( ; i < len; i++ ) {\n\n if ( list[i] === elem ) {\n\n return i;\n\n }\n\n }\n\n return -1;\n\n },\n\n\n\n booleans = \"checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped\",\n\n\n\n // Regular expressions\n\n\n\n // Whitespace characters http://www.w3.org/TR/css3-selectors/#whitespace\n\n whitespace = \"[\\\\x20\\\\t\\\\r\\\\n\\\\f]\",\n\n // http://www.w3.org/TR/css3-syntax/#characters\n\n characterEncoding = \"(?:\\\\\\\\.|[\\\\w-]|[^\\\\x00-\\\\xa0])+\",\n\n\n\n // Loosely modeled on CSS identifier characters\n\n // An unquoted value should be a CSS identifier http://www.w3.org/TR/css3-selectors/#attribute-selectors\n\n // Proper syntax: http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier\n\n identifier = characterEncoding.replace( \"w\", \"w#\" ),\n\n\n\n // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors\n\n attributes = \"\\\\[\" + whitespace + \"*(\" + characterEncoding + \")(?:\" + whitespace +\n\n // Operator (capture 2)\n\n \"*([*^$|!~]?=)\" + whitespace +\n\n // \"Attribute values must be CSS identifiers [capture 5] or strings [capture 3 or capture 4]\"\n\n \"*(?:'((?:\\\\\\\\.|[^\\\\\\\\'])*)'|\\\"((?:\\\\\\\\.|[^\\\\\\\\\\\"])*)\\\"|(\" + identifier + \"))|)\" + whitespace +\n\n \"*\\\\]\",\n\n\n\n pseudos = \":(\" + characterEncoding + \")(?:\\\\((\" +\n\n // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments:\n\n // 1. quoted (capture 3; capture 4 or capture 5)\n\n \"('((?:\\\\\\\\.|[^\\\\\\\\'])*)'|\\\"((?:\\\\\\\\.|[^\\\\\\\\\\\"])*)\\\")|\" +\n\n // 2. simple (capture 6)\n\n \"((?:\\\\\\\\.|[^\\\\\\\\()[\\\\]]|\" + attributes + \")*)|\" +\n\n // 3. anything else (capture 2)\n\n \".*\" +\n\n \")\\\\)|)\",\n\n\n\n // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter\n\n rwhitespace = new RegExp( whitespace + \"+\", \"g\" ),\n\n rtrim = new RegExp( \"^\" + whitespace + \"+|((?:^|[^\\\\\\\\])(?:\\\\\\\\.)*)\" + whitespace + \"+$\", \"g\" ),\n\n\n\n rcomma = new RegExp( \"^\" + whitespace + \"*,\" + whitespace + \"*\" ),\n\n rcombinators = new RegExp( \"^\" + whitespace + \"*([>+~]|\" + whitespace + \")\" + whitespace + \"*\" ),\n\n\n\n rattributeQuotes = new RegExp( \"=\" + whitespace + \"*([^\\\\]'\\\"]*?)\" + whitespace + \"*\\\\]\", \"g\" ),\n\n\n\n rpseudo = new RegExp( pseudos ),\n\n ridentifier = new RegExp( \"^\" + identifier + \"$\" ),\n\n\n\n matchExpr = {\n\n \"ID\": new RegExp( \"^#(\" + characterEncoding + \")\" ),\n\n \"CLASS\": new RegExp( \"^\\\\.(\" + characterEncoding + \")\" ),\n\n \"TAG\": new RegExp( \"^(\" + characterEncoding.replace( \"w\", \"w*\" ) + \")\" ),\n\n \"ATTR\": new RegExp( \"^\" + attributes ),\n\n \"PSEUDO\": new RegExp( \"^\" + pseudos ),\n\n \"CHILD\": new RegExp( \"^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\\\(\" + whitespace +\n\n \"*(even|odd|(([+-]|)(\\\\d*)n|)\" + whitespace + \"*(?:([+-]|)\" + whitespace +\n\n \"*(\\\\d+)|))\" + whitespace + \"*\\\\)|)\", \"i\" ),\n\n \"bool\": new RegExp( \"^(?:\" + booleans + \")$\", \"i\" ),\n\n // For use in libraries implementing .is()\n\n // We use this for POS matching in `select`\n\n \"needsContext\": new RegExp( \"^\" + whitespace + \"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\\\(\" +\n\n whitespace + \"*((?:-\\\\d)?\\\\d*)\" + whitespace + \"*\\\\)|)(?=[^-]|$)\", \"i\" )\n\n },\n\n\n\n rinputs = /^(?:input|select|textarea|button)$/i,\n\n rheader = /^h\\d$/i,\n\n\n\n rnative = /^[^{]+\\{\\s*\\[native \\w/,\n\n\n\n // Easily-parseable/retrievable ID or TAG or CLASS selectors\n\n rquickExpr = /^(?:#([\\w-]+)|(\\w+)|\\.([\\w-]+))$/,\n\n\n\n rsibling = /[+~]/,\n\n rescape = /'|\\\\/g,\n\n\n\n // CSS escapes http://www.w3.org/TR/CSS21/syndata.html#escaped-characters\n\n runescape = new RegExp( \"\\\\\\\\([\\\\da-f]{1,6}\" + whitespace + \"?|(\" + whitespace + \")|.)\", \"ig\" ),\n\n funescape = function( _, escaped, escapedWhitespace ) {\n\n var high = \"0x\" + escaped - 0x10000;\n\n // NaN means non-codepoint\n\n // Support: Firefox<24\n\n // Workaround erroneous numeric interpretation of +\"0x\"\n\n return high !== high || escapedWhitespace ?\n\n escaped :\n\n high < 0 ?\n\n // BMP codepoint\n\n String.fromCharCode( high + 0x10000 ) :\n\n // Supplemental Plane codepoint (surrogate pair)\n\n String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 );\n\n },\n\n\n\n // Used for iframes\n\n // See setDocument()\n\n // Removing the function wrapper causes a \"Permission Denied\"\n\n // error in IE\n\n unloadHandler = function() {\n\n setDocument();\n\n };\n\n\n\n// Optimize for push.apply( _, NodeList )\n\n try {\n\n push.apply(\n\n (arr = slice.call( preferredDoc.childNodes )),\n\n preferredDoc.childNodes\n\n );\n\n // Support: Android<4.0\n\n // Detect silently failing push.apply\n\n arr[ preferredDoc.childNodes.length ].nodeType;\n\n } catch ( e ) {\n\n push = { apply: arr.length ?\n\n\n\n // Leverage slice if possible\n\n function( target, els ) {\n\n push_native.apply( target, slice.call(els) );\n\n } :\n\n\n\n // Support: IE<9\n\n // Otherwise append directly\n\n function( target, els ) {\n\n var j = target.length,\n\n i = 0;\n\n // Can't trust NodeList.length\n\n while ( (target[j++] = els[i++]) ) {}\n\n target.length = j - 1;\n\n }\n\n };\n\n }\n\n\n\n function Sizzle( selector, context, results, seed ) {\n\n var match, elem, m, nodeType,\n\n // QSA vars\n\n i, groups, old, nid, newContext, newSelector;\n\n\n\n if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) {\n\n setDocument( context );\n\n }\n\n\n\n context = context || document;\n\n results = results || [];\n\n nodeType = context.nodeType;\n\n\n\n if ( typeof selector !== \"string\" || !selector ||\n\n nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) {\n\n\n\n return results;\n\n }\n\n\n\n if ( !seed && documentIsHTML ) {\n\n\n\n // Try to shortcut find operations when possible (e.g., not under DocumentFragment)\n\n if ( nodeType !== 11 && (match = rquickExpr.exec( selector )) ) {\n\n // Speed-up: Sizzle(\"#ID\")\n\n if ( (m = match[1]) ) {\n\n if ( nodeType === 9 ) {\n\n elem = context.getElementById( m );\n\n // Check parentNode to catch when Blackberry 4.6 returns\n\n // nodes that are no longer in the document (jQuery #6963)\n\n if ( elem && elem.parentNode ) {\n\n // Handle the case where IE, Opera, and Webkit return items\n\n // by name instead of ID\n\n if ( elem.id === m ) {\n\n results.push( elem );\n\n return results;\n\n }\n\n } else {\n\n return results;\n\n }\n\n } else {\n\n // Context is not a document\n\n if ( context.ownerDocument && (elem = context.ownerDocument.getElementById( m )) &&\n\n contains( context, elem ) && elem.id === m ) {\n\n results.push( elem );\n\n return results;\n\n }\n\n }\n\n\n\n // Speed-up: Sizzle(\"TAG\")\n\n } else if ( match[2] ) {\n\n push.apply( results, context.getElementsByTagName( selector ) );\n\n return results;\n\n\n\n // Speed-up: Sizzle(\".CLASS\")\n\n } else if ( (m = match[3]) && support.getElementsByClassName ) {\n\n push.apply( results, context.getElementsByClassName( m ) );\n\n return results;\n\n }\n\n }\n\n\n\n // QSA path\n\n if ( support.qsa && (!rbuggyQSA || !rbuggyQSA.test( selector )) ) {\n\n nid = old = expando;\n\n newContext = context;\n\n newSelector = nodeType !== 1 && selector;\n\n\n\n // qSA works strangely on Element-rooted queries\n\n // We can work around this by specifying an extra ID on the root\n\n // and working up from there (Thanks to Andrew Dupont for the technique)\n\n // IE 8 doesn't work on object elements\n\n if ( nodeType === 1 && context.nodeName.toLowerCase() !== \"object\" ) {\n\n groups = tokenize( selector );\n\n\n\n if ( (old = context.getAttribute(\"id\")) ) {\n\n nid = old.replace( rescape, \"\\\\$&\" );\n\n } else {\n\n context.setAttribute( \"id\", nid );\n\n }\n\n nid = \"[id='\" + nid + \"'] \";\n\n\n\n i = groups.length;\n\n while ( i-- ) {\n\n groups[i] = nid + toSelector( groups[i] );\n\n }\n\n newContext = rsibling.test( selector ) && testContext( context.parentNode ) || context;\n\n newSelector = groups.join(\",\");\n\n }\n\n\n\n if ( newSelector ) {\n\n try {\n\n push.apply( results,\n\n newContext.querySelectorAll( newSelector )\n\n );\n\n return results;\n\n } catch(qsaError) {\n\n } finally {\n\n if ( !old ) {\n\n context.removeAttribute(\"id\");\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n // All others\n\n return select( selector.replace( rtrim, \"$1\" ), context, results, seed );\n\n }\n\n\n\n /**\n\n * Create key-value caches of limited size\n\n * @returns {Function(string, Object)} Returns the Object data after storing it on itself with\n\n *\tproperty name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength)\n\n *\tdeleting the oldest entry\n\n */\n\n function createCache() {\n\n var keys = [];\n\n\n\n function cache( key, value ) {\n\n // Use (key + \" \") to avoid collision with native prototype properties (see Issue #157)\n\n if ( keys.push( key + \" \" ) > Expr.cacheLength ) {\n\n // Only keep the most recent entries\n\n delete cache[ keys.shift() ];\n\n }\n\n return (cache[ key + \" \" ] = value);\n\n }\n\n return cache;\n\n }\n\n\n\n /**\n\n * Mark a function for special use by Sizzle\n\n * @param {Function} fn The function to mark\n\n */\n\n function markFunction( fn ) {\n\n fn[ expando ] = true;\n\n return fn;\n\n }\n\n\n\n /**\n\n * Support testing using an element\n\n * @param {Function} fn Passed the created div and expects a boolean result\n\n */\n\n function assert( fn ) {\n\n var div = document.createElement(\"div\");\n\n\n\n try {\n\n return !!fn( div );\n\n } catch (e) {\n\n return false;\n\n } finally {\n\n // Remove from its parent by default\n\n if ( div.parentNode ) {\n\n div.parentNode.removeChild( div );\n\n }\n\n // release memory in IE\n\n div = null;\n\n }\n\n }\n\n\n\n /**\n\n * Adds the same handler for all of the specified attrs\n\n * @param {String} attrs Pipe-separated list of attributes\n\n * @param {Function} handler The method that will be applied\n\n */\n\n function addHandle( attrs, handler ) {\n\n var arr = attrs.split(\"|\"),\n\n i = attrs.length;\n\n\n\n while ( i-- ) {\n\n Expr.attrHandle[ arr[i] ] = handler;\n\n }\n\n }\n\n\n\n /**\n\n * Checks document order of two siblings\n\n * @param {Element} a\n\n * @param {Element} b\n\n * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b\n\n */\n\n function siblingCheck( a, b ) {\n\n var cur = b && a,\n\n diff = cur && a.nodeType === 1 && b.nodeType === 1 &&\n\n ( ~b.sourceIndex || MAX_NEGATIVE ) -\n\n ( ~a.sourceIndex || MAX_NEGATIVE );\n\n\n\n // Use IE sourceIndex if available on both nodes\n\n if ( diff ) {\n\n return diff;\n\n }\n\n\n\n // Check if b follows a\n\n if ( cur ) {\n\n while ( (cur = cur.nextSibling) ) {\n\n if ( cur === b ) {\n\n return -1;\n\n }\n\n }\n\n }\n\n\n\n return a ? 1 : -1;\n\n }\n\n\n\n /**\n\n * Returns a function to use in pseudos for input types\n\n * @param {String} type\n\n */\n\n function createInputPseudo( type ) {\n\n return function( elem ) {\n\n var name = elem.nodeName.toLowerCase();\n\n return name === \"input\" && elem.type === type;\n\n };\n\n }\n\n\n\n /**\n\n * Returns a function to use in pseudos for buttons\n\n * @param {String} type\n\n */\n\n function createButtonPseudo( type ) {\n\n return function( elem ) {\n\n var name = elem.nodeName.toLowerCase();\n\n return (name === \"input\" || name === \"button\") && elem.type === type;\n\n };\n\n }\n\n\n\n /**\n\n * Returns a function to use in pseudos for positionals\n\n * @param {Function} fn\n\n */\n\n function createPositionalPseudo( fn ) {\n\n return markFunction(function( argument ) {\n\n argument = +argument;\n\n return markFunction(function( seed, matches ) {\n\n var j,\n\n matchIndexes = fn( [], seed.length, argument ),\n\n i = matchIndexes.length;\n\n\n\n // Match elements found at the specified indexes\n\n while ( i-- ) {\n\n if ( seed[ (j = matchIndexes[i]) ] ) {\n\n seed[j] = !(matches[j] = seed[j]);\n\n }\n\n }\n\n });\n\n });\n\n }\n\n\n\n /**\n\n * Checks a node for validity as a Sizzle context\n\n * @param {Element|Object=} context\n\n * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value\n\n */\n\n function testContext( context ) {\n\n return context && typeof context.getElementsByTagName !== \"undefined\" && context;\n\n }\n\n\n\n// Expose support vars for convenience\n\n support = Sizzle.support = {};\n\n\n\n /**\n\n * Detects XML nodes\n\n * @param {Element|Object} elem An element or a document\n\n * @returns {Boolean} True iff elem is a non-HTML XML node\n\n */\n\n isXML = Sizzle.isXML = function( elem ) {\n\n // documentElement is verified for cases where it doesn't yet exist\n\n // (such as loading iframes in IE - #4833)\n\n var documentElement = elem && (elem.ownerDocument || elem).documentElement;\n\n return documentElement ? documentElement.nodeName !== \"HTML\" : false;\n\n };\n\n\n\n /**\n\n * Sets document-related variables once based on the current document\n\n * @param {Element|Object} [doc] An element or document object to use to set the document\n\n * @returns {Object} Returns the current document\n\n */\n\n setDocument = Sizzle.setDocument = function( node ) {\n\n var hasCompare, parent,\n\n doc = node ? node.ownerDocument || node : preferredDoc;\n\n\n\n // If no document and documentElement is available, return\n\n if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) {\n\n return document;\n\n }\n\n\n\n // Set our document\n\n document = doc;\n\n docElem = doc.documentElement;\n\n parent = doc.defaultView;\n\n\n\n // Support: IE>8\n\n // If iframe document is assigned to \"document\" variable and if iframe has been reloaded,\n\n // IE will throw \"permission denied\" error when accessing \"document\" variable, see jQuery #13936\n\n // IE6-8 do not support the defaultView property so parent will be undefined\n\n if ( parent && parent !== parent.top ) {\n\n // IE11 does not have attachEvent, so all must suffer\n\n if ( parent.addEventListener ) {\n\n parent.addEventListener( \"unload\", unloadHandler, false );\n\n } else if ( parent.attachEvent ) {\n\n parent.attachEvent( \"onunload\", unloadHandler );\n\n }\n\n }\n\n\n\n /* Support tests\n\n ---------------------------------------------------------------------- */\n\n documentIsHTML = !isXML( doc );\n\n\n\n /* Attributes\n\n ---------------------------------------------------------------------- */\n\n\n\n // Support: IE<8\n\n // Verify that getAttribute really returns attributes and not properties\n\n // (excepting IE8 booleans)\n\n support.attributes = assert(function( div ) {\n\n div.className = \"i\";\n\n return !div.getAttribute(\"className\");\n\n });\n\n\n\n /* getElement(s)By*\n\n ---------------------------------------------------------------------- */\n\n\n\n // Check if getElementsByTagName(\"*\") returns only elements\n\n support.getElementsByTagName = assert(function( div ) {\n\n div.appendChild( doc.createComment(\"\") );\n\n return !div.getElementsByTagName(\"*\").length;\n\n });\n\n\n\n // Support: IE<9\n\n support.getElementsByClassName = rnative.test( doc.getElementsByClassName );\n\n\n\n // Support: IE<10\n\n // Check if getElementById returns elements by name\n\n // The broken getElementById methods don't pick up programatically-set names,\n\n // so use a roundabout getElementsByName test\n\n support.getById = assert(function( div ) {\n\n docElem.appendChild( div ).id = expando;\n\n return !doc.getElementsByName || !doc.getElementsByName( expando ).length;\n\n });\n\n\n\n // ID find and filter\n\n if ( support.getById ) {\n\n Expr.find[\"ID\"] = function( id, context ) {\n\n if ( typeof context.getElementById !== \"undefined\" && documentIsHTML ) {\n\n var m = context.getElementById( id );\n\n // Check parentNode to catch when Blackberry 4.6 returns\n\n // nodes that are no longer in the document #6963\n\n return m && m.parentNode ? [ m ] : [];\n\n }\n\n };\n\n Expr.filter[\"ID\"] = function( id ) {\n\n var attrId = id.replace( runescape, funescape );\n\n return function( elem ) {\n\n return elem.getAttribute(\"id\") === attrId;\n\n };\n\n };\n\n } else {\n\n // Support: IE6/7\n\n // getElementById is not reliable as a find shortcut\n\n delete Expr.find[\"ID\"];\n\n\n\n Expr.filter[\"ID\"] = function( id ) {\n\n var attrId = id.replace( runescape, funescape );\n\n return function( elem ) {\n\n var node = typeof elem.getAttributeNode !== \"undefined\" && elem.getAttributeNode(\"id\");\n\n return node && node.value === attrId;\n\n };\n\n };\n\n }\n\n\n\n // Tag\n\n Expr.find[\"TAG\"] = support.getElementsByTagName ?\n\n function( tag, context ) {\n\n if ( typeof context.getElementsByTagName !== \"undefined\" ) {\n\n return context.getElementsByTagName( tag );\n\n\n\n // DocumentFragment nodes don't have gEBTN\n\n } else if ( support.qsa ) {\n\n return context.querySelectorAll( tag );\n\n }\n\n } :\n\n\n\n function( tag, context ) {\n\n var elem,\n\n tmp = [],\n\n i = 0,\n\n // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too\n\n results = context.getElementsByTagName( tag );\n\n\n\n // Filter out possible comments\n\n if ( tag === \"*\" ) {\n\n while ( (elem = results[i++]) ) {\n\n if ( elem.nodeType === 1 ) {\n\n tmp.push( elem );\n\n }\n\n }\n\n\n\n return tmp;\n\n }\n\n return results;\n\n };\n\n\n\n // Class\n\n Expr.find[\"CLASS\"] = support.getElementsByClassName && function( className, context ) {\n\n if ( documentIsHTML ) {\n\n return context.getElementsByClassName( className );\n\n }\n\n };\n\n\n\n /* QSA/matchesSelector\n\n ---------------------------------------------------------------------- */\n\n\n\n // QSA and matchesSelector support\n\n\n\n // matchesSelector(:active) reports false when true (IE9/Opera 11.5)\n\n rbuggyMatches = [];\n\n\n\n // qSa(:focus) reports false when true (Chrome 21)\n\n // We allow this because of a bug in IE8/9 that throws an error\n\n // whenever `document.activeElement` is accessed on an iframe\n\n // So, we allow :focus to pass through QSA all the time to avoid the IE error\n\n // See http://bugs.jquery.com/ticket/13378\n\n rbuggyQSA = [];\n\n\n\n if ( (support.qsa = rnative.test( doc.querySelectorAll )) ) {\n\n // Build QSA regex\n\n // Regex strategy adopted from Diego Perini\n\n assert(function( div ) {\n\n // Select is set to empty string on purpose\n\n // This is to test IE's treatment of not explicitly\n\n // setting a boolean content attribute,\n\n // since its presence should be enough\n\n // http://bugs.jquery.com/ticket/12359\n\n docElem.appendChild( div ).innerHTML = \"<a id='\" + expando + \"'></a>\" +\n\n \"<select id='\" + expando + \"-\\f]' msallowcapture=''>\" +\n\n \"<option selected=''></option></select>\";\n\n\n\n // Support: IE8, Opera 11-12.16\n\n // Nothing should be selected when empty strings follow ^= or $= or *=\n\n // The test attribute must be unknown in Opera but \"safe\" for WinRT\n\n // http://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section\n\n if ( div.querySelectorAll(\"[msallowcapture^='']\").length ) {\n\n rbuggyQSA.push( \"[*^$]=\" + whitespace + \"*(?:''|\\\"\\\")\" );\n\n }\n\n\n\n // Support: IE8\n\n // Boolean attributes and \"value\" are not treated correctly\n\n if ( !div.querySelectorAll(\"[selected]\").length ) {\n\n rbuggyQSA.push( \"\\\\[\" + whitespace + \"*(?:value|\" + booleans + \")\" );\n\n }\n\n\n\n // Support: Chrome<29, Android<4.2+, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.7+\n\n if ( !div.querySelectorAll( \"[id~=\" + expando + \"-]\" ).length ) {\n\n rbuggyQSA.push(\"~=\");\n\n }\n\n\n\n // Webkit/Opera - :checked should return selected option elements\n\n // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked\n\n // IE8 throws error here and will not see later tests\n\n if ( !div.querySelectorAll(\":checked\").length ) {\n\n rbuggyQSA.push(\":checked\");\n\n }\n\n\n\n // Support: Safari 8+, iOS 8+\n\n // https://bugs.webkit.org/show_bug.cgi?id=136851\n\n // In-page `selector#id sibing-combinator selector` fails\n\n if ( !div.querySelectorAll( \"a#\" + expando + \"+*\" ).length ) {\n\n rbuggyQSA.push(\".#.+[+~]\");\n\n }\n\n });\n\n\n\n assert(function( div ) {\n\n // Support: Windows 8 Native Apps\n\n // The type and name attributes are restricted during .innerHTML assignment\n\n var input = doc.createElement(\"input\");\n\n input.setAttribute( \"type\", \"hidden\" );\n\n div.appendChild( input ).setAttribute( \"name\", \"D\" );\n\n\n\n // Support: IE8\n\n // Enforce case-sensitivity of name attribute\n\n if ( div.querySelectorAll(\"[name=d]\").length ) {\n\n rbuggyQSA.push( \"name\" + whitespace + \"*[*^$|!~]?=\" );\n\n }\n\n\n\n // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled)\n\n // IE8 throws error here and will not see later tests\n\n if ( !div.querySelectorAll(\":enabled\").length ) {\n\n rbuggyQSA.push( \":enabled\", \":disabled\" );\n\n }\n\n\n\n // Opera 10-11 does not throw on post-comma invalid pseudos\n\n div.querySelectorAll(\"*,:x\");\n\n rbuggyQSA.push(\",.*:\");\n\n });\n\n }\n\n\n\n if ( (support.matchesSelector = rnative.test( (matches = docElem.matches ||\n\n docElem.webkitMatchesSelector ||\n\n docElem.mozMatchesSelector ||\n\n docElem.oMatchesSelector ||\n\n docElem.msMatchesSelector) )) ) {\n\n\n\n assert(function( div ) {\n\n // Check to see if it's possible to do matchesSelector\n\n // on a disconnected node (IE 9)\n\n support.disconnectedMatch = matches.call( div, \"div\" );\n\n\n\n // This should fail with an exception\n\n // Gecko does not error, returns false instead\n\n matches.call( div, \"[s!='']:x\" );\n\n rbuggyMatches.push( \"!=\", pseudos );\n\n });\n\n }\n\n\n\n rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join(\"|\") );\n\n rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join(\"|\") );\n\n\n\n /* Contains\n\n ---------------------------------------------------------------------- */\n\n hasCompare = rnative.test( docElem.compareDocumentPosition );\n\n\n\n // Element contains another\n\n // Purposefully does not implement inclusive descendent\n\n // As in, an element does not contain itself\n\n contains = hasCompare || rnative.test( docElem.contains ) ?\n\n function( a, b ) {\n\n var adown = a.nodeType === 9 ? a.documentElement : a,\n\n bup = b && b.parentNode;\n\n return a === bup || !!( bup && bup.nodeType === 1 && (\n\n adown.contains ?\n\n adown.contains( bup ) :\n\n a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16\n\n ));\n\n } :\n\n function( a, b ) {\n\n if ( b ) {\n\n while ( (b = b.parentNode) ) {\n\n if ( b === a ) {\n\n return true;\n\n }\n\n }\n\n }\n\n return false;\n\n };\n\n\n\n /* Sorting\n\n ---------------------------------------------------------------------- */\n\n\n\n // Document order sorting\n\n sortOrder = hasCompare ?\n\n function( a, b ) {\n\n\n\n // Flag for duplicate removal\n\n if ( a === b ) {\n\n hasDuplicate = true;\n\n return 0;\n\n }\n\n\n\n // Sort on method existence if only one input has compareDocumentPosition\n\n var compare = !a.compareDocumentPosition - !b.compareDocumentPosition;\n\n if ( compare ) {\n\n return compare;\n\n }\n\n\n\n // Calculate position if both inputs belong to the same document\n\n compare = ( a.ownerDocument || a ) === ( b.ownerDocument || b ) ?\n\n a.compareDocumentPosition( b ) :\n\n\n\n // Otherwise we know they are disconnected\n\n 1;\n\n\n\n // Disconnected nodes\n\n if ( compare & 1 ||\n\n (!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) {\n\n\n\n // Choose the first element that is related to our preferred document\n\n if ( a === doc || a.ownerDocument === preferredDoc && contains(preferredDoc, a) ) {\n\n return -1;\n\n }\n\n if ( b === doc || b.ownerDocument === preferredDoc && contains(preferredDoc, b) ) {\n\n return 1;\n\n }\n\n\n\n // Maintain original order\n\n return sortInput ?\n\n ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) :\n\n 0;\n\n }\n\n\n\n return compare & 4 ? -1 : 1;\n\n } :\n\n function( a, b ) {\n\n // Exit early if the nodes are identical\n\n if ( a === b ) {\n\n hasDuplicate = true;\n\n return 0;\n\n }\n\n\n\n var cur,\n\n i = 0,\n\n aup = a.parentNode,\n\n bup = b.parentNode,\n\n ap = [ a ],\n\n bp = [ b ];\n\n\n\n // Parentless nodes are either documents or disconnected\n\n if ( !aup || !bup ) {\n\n return a === doc ? -1 :\n\n b === doc ? 1 :\n\n aup ? -1 :\n\n bup ? 1 :\n\n sortInput ?\n\n ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) :\n\n 0;\n\n\n\n // If the nodes are siblings, we can do a quick check\n\n } else if ( aup === bup ) {\n\n return siblingCheck( a, b );\n\n }\n\n\n\n // Otherwise we need full lists of their ancestors for comparison\n\n cur = a;\n\n while ( (cur = cur.parentNode) ) {\n\n ap.unshift( cur );\n\n }\n\n cur = b;\n\n while ( (cur = cur.parentNode) ) {\n\n bp.unshift( cur );\n\n }\n\n\n\n // Walk down the tree looking for a discrepancy\n\n while ( ap[i] === bp[i] ) {\n\n i++;\n\n }\n\n\n\n return i ?\n\n // Do a sibling check if the nodes have a common ancestor\n\n siblingCheck( ap[i], bp[i] ) :\n\n\n\n // Otherwise nodes in our document sort first\n\n ap[i] === preferredDoc ? -1 :\n\n bp[i] === preferredDoc ? 1 :\n\n 0;\n\n };\n\n\n\n return doc;\n\n };\n\n\n\n Sizzle.matches = function( expr, elements ) {\n\n return Sizzle( expr, null, null, elements );\n\n };\n\n\n\n Sizzle.matchesSelector = function( elem, expr ) {\n\n // Set document vars if needed\n\n if ( ( elem.ownerDocument || elem ) !== document ) {\n\n setDocument( elem );\n\n }\n\n\n\n // Make sure that attribute selectors are quoted\n\n expr = expr.replace( rattributeQuotes, \"='$1']\" );\n\n\n\n if ( support.matchesSelector && documentIsHTML &&\n\n ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) &&\n\n ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) {\n\n\n\n try {\n\n var ret = matches.call( elem, expr );\n\n\n\n // IE 9's matchesSelector returns false on disconnected nodes\n\n if ( ret || support.disconnectedMatch ||\n\n // As well, disconnected nodes are said to be in a document\n\n // fragment in IE 9\n\n elem.document && elem.document.nodeType !== 11 ) {\n\n return ret;\n\n }\n\n } catch (e) {}\n\n }\n\n\n\n return Sizzle( expr, document, null, [ elem ] ).length > 0;\n\n };\n\n\n\n Sizzle.contains = function( context, elem ) {\n\n // Set document vars if needed\n\n if ( ( context.ownerDocument || context ) !== document ) {\n\n setDocument( context );\n\n }\n\n return contains( context, elem );\n\n };\n\n\n\n Sizzle.attr = function( elem, name ) {\n\n // Set document vars if needed\n\n if ( ( elem.ownerDocument || elem ) !== document ) {\n\n setDocument( elem );\n\n }\n\n\n\n var fn = Expr.attrHandle[ name.toLowerCase() ],\n\n // Don't get fooled by Object.prototype properties (jQuery #13807)\n\n val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ?\n\n fn( elem, name, !documentIsHTML ) :\n\n undefined;\n\n\n\n return val !== undefined ?\n\n val :\n\n support.attributes || !documentIsHTML ?\n\n elem.getAttribute( name ) :\n\n (val = elem.getAttributeNode(name)) && val.specified ?\n\n val.value :\n\n null;\n\n };\n\n\n\n Sizzle.error = function( msg ) {\n\n throw new Error( \"Syntax error, unrecognized expression: \" + msg );\n\n };\n\n\n\n /**\n\n * Document sorting and removing duplicates\n\n * @param {ArrayLike} results\n\n */\n\n Sizzle.uniqueSort = function( results ) {\n\n var elem,\n\n duplicates = [],\n\n j = 0,\n\n i = 0;\n\n\n\n // Unless we *know* we can detect duplicates, assume their presence\n\n hasDuplicate = !support.detectDuplicates;\n\n sortInput = !support.sortStable && results.slice( 0 );\n\n results.sort( sortOrder );\n\n\n\n if ( hasDuplicate ) {\n\n while ( (elem = results[i++]) ) {\n\n if ( elem === results[ i ] ) {\n\n j = duplicates.push( i );\n\n }\n\n }\n\n while ( j-- ) {\n\n results.splice( duplicates[ j ], 1 );\n\n }\n\n }\n\n\n\n // Clear input after sorting to release objects\n\n // See https://github.com/jquery/sizzle/pull/225\n\n sortInput = null;\n\n\n\n return results;\n\n };\n\n\n\n /**\n\n * Utility function for retrieving the text value of an array of DOM nodes\n\n * @param {Array|Element} elem\n\n */\n\n getText = Sizzle.getText = function( elem ) {\n\n var node,\n\n ret = \"\",\n\n i = 0,\n\n nodeType = elem.nodeType;\n\n\n\n if ( !nodeType ) {\n\n // If no nodeType, this is expected to be an array\n\n while ( (node = elem[i++]) ) {\n\n // Do not traverse comment nodes\n\n ret += getText( node );\n\n }\n\n } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) {\n\n // Use textContent for elements\n\n // innerText usage removed for consistency of new lines (jQuery #11153)\n\n if ( typeof elem.textContent === \"string\" ) {\n\n return elem.textContent;\n\n } else {\n\n // Traverse its children\n\n for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) {\n\n ret += getText( elem );\n\n }\n\n }\n\n } else if ( nodeType === 3 || nodeType === 4 ) {\n\n return elem.nodeValue;\n\n }\n\n // Do not include comment or processing instruction nodes\n\n\n\n return ret;\n\n };\n\n\n\n Expr = Sizzle.selectors = {\n\n\n\n // Can be adjusted by the user\n\n cacheLength: 50,\n\n\n\n createPseudo: markFunction,\n\n\n\n match: matchExpr,\n\n\n\n attrHandle: {},\n\n\n\n find: {},\n\n\n\n relative: {\n\n \">\": { dir: \"parentNode\", first: true },\n\n \" \": { dir: \"parentNode\" },\n\n \"+\": { dir: \"previousSibling\", first: true },\n\n \"~\": { dir: \"previousSibling\" }\n\n },\n\n\n\n preFilter: {\n\n \"ATTR\": function( match ) {\n\n match[1] = match[1].replace( runescape, funescape );\n\n\n\n // Move the given value to match[3] whether quoted or unquoted\n\n match[3] = ( match[3] || match[4] || match[5] || \"\" ).replace( runescape, funescape );\n\n\n\n if ( match[2] === \"~=\" ) {\n\n match[3] = \" \" + match[3] + \" \";\n\n }\n\n\n\n return match.slice( 0, 4 );\n\n },\n\n\n\n \"CHILD\": function( match ) {\n\n /* matches from matchExpr[\"CHILD\"]\n\n 1 type (only|nth|...)\n\n 2 what (child|of-type)\n\n 3 argument (even|odd|\\d*|\\d*n([+-]\\d+)?|...)\n\n 4 xn-component of xn+y argument ([+-]?\\d*n|)\n\n 5 sign of xn-component\n\n 6 x of xn-component\n\n 7 sign of y-component\n\n 8 y of y-component\n\n */\n\n match[1] = match[1].toLowerCase();\n\n\n\n if ( match[1].slice( 0, 3 ) === \"nth\" ) {\n\n // nth-* requires argument\n\n if ( !match[3] ) {\n\n Sizzle.error( match[0] );\n\n }\n\n\n\n // numeric x and y parameters for Expr.filter.CHILD\n\n // remember that false/true cast respectively to 0/1\n\n match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === \"even\" || match[3] === \"odd\" ) );\n\n match[5] = +( ( match[7] + match[8] ) || match[3] === \"odd\" );\n\n\n\n // other types prohibit arguments\n\n } else if ( match[3] ) {\n\n Sizzle.error( match[0] );\n\n }\n\n\n\n return match;\n\n },\n\n\n\n \"PSEUDO\": function( match ) {\n\n var excess,\n\n unquoted = !match[6] && match[2];\n\n\n\n if ( matchExpr[\"CHILD\"].test( match[0] ) ) {\n\n return null;\n\n }\n\n\n\n // Accept quoted arguments as-is\n\n if ( match[3] ) {\n\n match[2] = match[4] || match[5] || \"\";\n\n\n\n // Strip excess characters from unquoted arguments\n\n } else if ( unquoted && rpseudo.test( unquoted ) &&\n\n // Get excess from tokenize (recursively)\n\n (excess = tokenize( unquoted, true )) &&\n\n // advance to the next closing parenthesis\n\n (excess = unquoted.indexOf( \")\", unquoted.length - excess ) - unquoted.length) ) {\n\n\n\n // excess is a negative index\n\n match[0] = match[0].slice( 0, excess );\n\n match[2] = unquoted.slice( 0, excess );\n\n }\n\n\n\n // Return only captures needed by the pseudo filter method (type and argument)\n\n return match.slice( 0, 3 );\n\n }\n\n },\n\n\n\n filter: {\n\n\n\n \"TAG\": function( nodeNameSelector ) {\n\n var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase();\n\n return nodeNameSelector === \"*\" ?\n\n function() { return true; } :\n\n function( elem ) {\n\n return elem.nodeName && elem.nodeName.toLowerCase() === nodeName;\n\n };\n\n },\n\n\n\n \"CLASS\": function( className ) {\n\n var pattern = classCache[ className + \" \" ];\n\n\n\n return pattern ||\n\n (pattern = new RegExp( \"(^|\" + whitespace + \")\" + className + \"(\" + whitespace + \"|$)\" )) &&\n\n classCache( className, function( elem ) {\n\n return pattern.test( typeof elem.className === \"string\" && elem.className || typeof elem.getAttribute !== \"undefined\" && elem.getAttribute(\"class\") || \"\" );\n\n });\n\n },\n\n\n\n \"ATTR\": function( name, operator, check ) {\n\n return function( elem ) {\n\n var result = Sizzle.attr( elem, name );\n\n\n\n if ( result == null ) {\n\n return operator === \"!=\";\n\n }\n\n if ( !operator ) {\n\n return true;\n\n }\n\n\n\n result += \"\";\n\n\n\n return operator === \"=\" ? result === check :\n\n operator === \"!=\" ? result !== check :\n\n operator === \"^=\" ? check && result.indexOf( check ) === 0 :\n\n operator === \"*=\" ? check && result.indexOf( check ) > -1 :\n\n operator === \"$=\" ? check && result.slice( -check.length ) === check :\n\n operator === \"~=\" ? ( \" \" + result.replace( rwhitespace, \" \" ) + \" \" ).indexOf( check ) > -1 :\n\n operator === \"|=\" ? result === check || result.slice( 0, check.length + 1 ) === check + \"-\" :\n\n false;\n\n };\n\n },\n\n\n\n \"CHILD\": function( type, what, argument, first, last ) {\n\n var simple = type.slice( 0, 3 ) !== \"nth\",\n\n forward = type.slice( -4 ) !== \"last\",\n\n ofType = what === \"of-type\";\n\n\n\n return first === 1 && last === 0 ?\n\n\n\n // Shortcut for :nth-*(n)\n\n function( elem ) {\n\n return !!elem.parentNode;\n\n } :\n\n\n\n function( elem, context, xml ) {\n\n var cache, outerCache, node, diff, nodeIndex, start,\n\n dir = simple !== forward ? \"nextSibling\" : \"previousSibling\",\n\n parent = elem.parentNode,\n\n name = ofType && elem.nodeName.toLowerCase(),\n\n useCache = !xml && !ofType;\n\n\n\n if ( parent ) {\n\n\n\n // :(first|last|only)-(child|of-type)\n\n if ( simple ) {\n\n while ( dir ) {\n\n node = elem;\n\n while ( (node = node[ dir ]) ) {\n\n if ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) {\n\n return false;\n\n }\n\n }\n\n // Reverse direction for :only-* (if we haven't yet done so)\n\n start = dir = type === \"only\" && !start && \"nextSibling\";\n\n }\n\n return true;\n\n }\n\n\n\n start = [ forward ? parent.firstChild : parent.lastChild ];\n\n\n\n // non-xml :nth-child(...) stores cache data on `parent`\n\n if ( forward && useCache ) {\n\n // Seek `elem` from a previously-cached index\n\n outerCache = parent[ expando ] || (parent[ expando ] = {});\n\n cache = outerCache[ type ] || [];\n\n nodeIndex = cache[0] === dirruns && cache[1];\n\n diff = cache[0] === dirruns && cache[2];\n\n node = nodeIndex && parent.childNodes[ nodeIndex ];\n\n\n\n while ( (node = ++nodeIndex && node && node[ dir ] ||\n\n\n\n // Fallback to seeking `elem` from the start\n\n (diff = nodeIndex = 0) || start.pop()) ) {\n\n\n\n // When found, cache indexes on `parent` and break\n\n if ( node.nodeType === 1 && ++diff && node === elem ) {\n\n outerCache[ type ] = [ dirruns, nodeIndex, diff ];\n\n break;\n\n }\n\n }\n\n\n\n // Use previously-cached element index if available\n\n } else if ( useCache && (cache = (elem[ expando ] || (elem[ expando ] = {}))[ type ]) && cache[0] === dirruns ) {\n\n diff = cache[1];\n\n\n\n // xml :nth-child(...) or :nth-last-child(...) or :nth(-last)?-of-type(...)\n\n } else {\n\n // Use the same loop as above to seek `elem` from the start\n\n while ( (node = ++nodeIndex && node && node[ dir ] ||\n\n (diff = nodeIndex = 0) || start.pop()) ) {\n\n\n\n if ( ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) && ++diff ) {\n\n // Cache the index of each encountered element\n\n if ( useCache ) {\n\n (node[ expando ] || (node[ expando ] = {}))[ type ] = [ dirruns, diff ];\n\n }\n\n\n\n if ( node === elem ) {\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Incorporate the offset, then check against cycle size\n\n diff -= last;\n\n return diff === first || ( diff % first === 0 && diff / first >= 0 );\n\n }\n\n };\n\n },\n\n\n\n \"PSEUDO\": function( pseudo, argument ) {\n\n // pseudo-class names are case-insensitive\n\n // http://www.w3.org/TR/selectors/#pseudo-classes\n\n // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters\n\n // Remember that setFilters inherits from pseudos\n\n var args,\n\n fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] ||\n\n Sizzle.error( \"unsupported pseudo: \" + pseudo );\n\n\n\n // The user may use createPseudo to indicate that\n\n // arguments are needed to create the filter function\n\n // just as Sizzle does\n\n if ( fn[ expando ] ) {\n\n return fn( argument );\n\n }\n\n\n\n // But maintain support for old signatures\n\n if ( fn.length > 1 ) {\n\n args = [ pseudo, pseudo, \"\", argument ];\n\n return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ?\n\n markFunction(function( seed, matches ) {\n\n var idx,\n\n matched = fn( seed, argument ),\n\n i = matched.length;\n\n while ( i-- ) {\n\n idx = indexOf( seed, matched[i] );\n\n seed[ idx ] = !( matches[ idx ] = matched[i] );\n\n }\n\n }) :\n\n function( elem ) {\n\n return fn( elem, 0, args );\n\n };\n\n }\n\n\n\n return fn;\n\n }\n\n },\n\n\n\n pseudos: {\n\n // Potentially complex pseudos\n\n \"not\": markFunction(function( selector ) {\n\n // Trim the selector passed to compile\n\n // to avoid treating leading and trailing\n\n // spaces as combinators\n\n var input = [],\n\n results = [],\n\n matcher = compile( selector.replace( rtrim, \"$1\" ) );\n\n\n\n return matcher[ expando ] ?\n\n markFunction(function( seed, matches, context, xml ) {\n\n var elem,\n\n unmatched = matcher( seed, null, xml, [] ),\n\n i = seed.length;\n\n\n\n // Match elements unmatched by `matcher`\n\n while ( i-- ) {\n\n if ( (elem = unmatched[i]) ) {\n\n seed[i] = !(matches[i] = elem);\n\n }\n\n }\n\n }) :\n\n function( elem, context, xml ) {\n\n input[0] = elem;\n\n matcher( input, null, xml, results );\n\n // Don't keep the element (issue #299)\n\n input[0] = null;\n\n return !results.pop();\n\n };\n\n }),\n\n\n\n \"has\": markFunction(function( selector ) {\n\n return function( elem ) {\n\n return Sizzle( selector, elem ).length > 0;\n\n };\n\n }),\n\n\n\n \"contains\": markFunction(function( text ) {\n\n text = text.replace( runescape, funescape );\n\n return function( elem ) {\n\n return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1;\n\n };\n\n }),\n\n\n\n // \"Whether an element is represented by a :lang() selector\n\n // is based solely on the element's language value\n\n // being equal to the identifier C,\n\n // or beginning with the identifier C immediately followed by \"-\".\n\n // The matching of C against the element's language value is performed case-insensitively.\n\n // The identifier C does not have to be a valid language name.\"\n\n // http://www.w3.org/TR/selectors/#lang-pseudo\n\n \"lang\": markFunction( function( lang ) {\n\n // lang value must be a valid identifier\n\n if ( !ridentifier.test(lang || \"\") ) {\n\n Sizzle.error( \"unsupported lang: \" + lang );\n\n }\n\n lang = lang.replace( runescape, funescape ).toLowerCase();\n\n return function( elem ) {\n\n var elemLang;\n\n do {\n\n if ( (elemLang = documentIsHTML ?\n\n elem.lang :\n\n elem.getAttribute(\"xml:lang\") || elem.getAttribute(\"lang\")) ) {\n\n\n\n elemLang = elemLang.toLowerCase();\n\n return elemLang === lang || elemLang.indexOf( lang + \"-\" ) === 0;\n\n }\n\n } while ( (elem = elem.parentNode) && elem.nodeType === 1 );\n\n return false;\n\n };\n\n }),\n\n\n\n // Miscellaneous\n\n \"target\": function( elem ) {\n\n var hash = window.location && window.location.hash;\n\n return hash && hash.slice( 1 ) === elem.id;\n\n },\n\n\n\n \"root\": function( elem ) {\n\n return elem === docElem;\n\n },\n\n\n\n \"focus\": function( elem ) {\n\n return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex);\n\n },\n\n\n\n // Boolean properties\n\n \"enabled\": function( elem ) {\n\n return elem.disabled === false;\n\n },\n\n\n\n \"disabled\": function( elem ) {\n\n return elem.disabled === true;\n\n },\n\n\n\n \"checked\": function( elem ) {\n\n // In CSS3, :checked should return both checked and selected elements\n\n // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked\n\n var nodeName = elem.nodeName.toLowerCase();\n\n return (nodeName === \"input\" && !!elem.checked) || (nodeName === \"option\" && !!elem.selected);\n\n },\n\n\n\n \"selected\": function( elem ) {\n\n // Accessing this property makes selected-by-default\n\n // options in Safari work properly\n\n if ( elem.parentNode ) {\n\n elem.parentNode.selectedIndex;\n\n }\n\n\n\n return elem.selected === true;\n\n },\n\n\n\n // Contents\n\n \"empty\": function( elem ) {\n\n // http://www.w3.org/TR/selectors/#empty-pseudo\n\n // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5),\n\n // but not by others (comment: 8; processing instruction: 7; etc.)\n\n // nodeType < 6 works because attributes (2) do not appear as children\n\n for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) {\n\n if ( elem.nodeType < 6 ) {\n\n return false;\n\n }\n\n }\n\n return true;\n\n },\n\n\n\n \"parent\": function( elem ) {\n\n return !Expr.pseudos[\"empty\"]( elem );\n\n },\n\n\n\n // Element/input types\n\n \"header\": function( elem ) {\n\n return rheader.test( elem.nodeName );\n\n },\n\n\n\n \"input\": function( elem ) {\n\n return rinputs.test( elem.nodeName );\n\n },\n\n\n\n \"button\": function( elem ) {\n\n var name = elem.nodeName.toLowerCase();\n\n return name === \"input\" && elem.type === \"button\" || name === \"button\";\n\n },\n\n\n\n \"text\": function( elem ) {\n\n var attr;\n\n return elem.nodeName.toLowerCase() === \"input\" &&\n\n elem.type === \"text\" &&\n\n\n\n // Support: IE<8\n\n // New HTML5 attribute values (e.g., \"search\") appear with elem.type === \"text\"\n\n ( (attr = elem.getAttribute(\"type\")) == null || attr.toLowerCase() === \"text\" );\n\n },\n\n\n\n // Position-in-collection\n\n \"first\": createPositionalPseudo(function() {\n\n return [ 0 ];\n\n }),\n\n\n\n \"last\": createPositionalPseudo(function( matchIndexes, length ) {\n\n return [ length - 1 ];\n\n }),\n\n\n\n \"eq\": createPositionalPseudo(function( matchIndexes, length, argument ) {\n\n return [ argument < 0 ? argument + length : argument ];\n\n }),\n\n\n\n \"even\": createPositionalPseudo(function( matchIndexes, length ) {\n\n var i = 0;\n\n for ( ; i < length; i += 2 ) {\n\n matchIndexes.push( i );\n\n }\n\n return matchIndexes;\n\n }),\n\n\n\n \"odd\": createPositionalPseudo(function( matchIndexes, length ) {\n\n var i = 1;\n\n for ( ; i < length; i += 2 ) {\n\n matchIndexes.push( i );\n\n }\n\n return matchIndexes;\n\n }),\n\n\n\n \"lt\": createPositionalPseudo(function( matchIndexes, length, argument ) {\n\n var i = argument < 0 ? argument + length : argument;\n\n for ( ; --i >= 0; ) {\n\n matchIndexes.push( i );\n\n }\n\n return matchIndexes;\n\n }),\n\n\n\n \"gt\": createPositionalPseudo(function( matchIndexes, length, argument ) {\n\n var i = argument < 0 ? argument + length : argument;\n\n for ( ; ++i < length; ) {\n\n matchIndexes.push( i );\n\n }\n\n return matchIndexes;\n\n })\n\n }\n\n };\n\n\n\n Expr.pseudos[\"nth\"] = Expr.pseudos[\"eq\"];\n\n\n\n// Add button/input type pseudos\n\n for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) {\n\n Expr.pseudos[ i ] = createInputPseudo( i );\n\n }\n\n for ( i in { submit: true, reset: true } ) {\n\n Expr.pseudos[ i ] = createButtonPseudo( i );\n\n }\n\n\n\n// Easy API for creating new setFilters\n\n function setFilters() {}\n\n setFilters.prototype = Expr.filters = Expr.pseudos;\n\n Expr.setFilters = new setFilters();\n\n\n\n tokenize = Sizzle.tokenize = function( selector, parseOnly ) {\n\n var matched, match, tokens, type,\n\n soFar, groups, preFilters,\n\n cached = tokenCache[ selector + \" \" ];\n\n\n\n if ( cached ) {\n\n return parseOnly ? 0 : cached.slice( 0 );\n\n }\n\n\n\n soFar = selector;\n\n groups = [];\n\n preFilters = Expr.preFilter;\n\n\n\n while ( soFar ) {\n\n\n\n // Comma and first run\n\n if ( !matched || (match = rcomma.exec( soFar )) ) {\n\n if ( match ) {\n\n // Don't consume trailing commas as valid\n\n soFar = soFar.slice( match[0].length ) || soFar;\n\n }\n\n groups.push( (tokens = []) );\n\n }\n\n\n\n matched = false;\n\n\n\n // Combinators\n\n if ( (match = rcombinators.exec( soFar )) ) {\n\n matched = match.shift();\n\n tokens.push({\n\n value: matched,\n\n // Cast descendant combinators to space\n\n type: match[0].replace( rtrim, \" \" )\n\n });\n\n soFar = soFar.slice( matched.length );\n\n }\n\n\n\n // Filters\n\n for ( type in Expr.filter ) {\n\n if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] ||\n\n (match = preFilters[ type ]( match ))) ) {\n\n matched = match.shift();\n\n tokens.push({\n\n value: matched,\n\n type: type,\n\n matches: match\n\n });\n\n soFar = soFar.slice( matched.length );\n\n }\n\n }\n\n\n\n if ( !matched ) {\n\n break;\n\n }\n\n }\n\n\n\n // Return the length of the invalid excess\n\n // if we're just parsing\n\n // Otherwise, throw an error or return tokens\n\n return parseOnly ?\n\n soFar.length :\n\n soFar ?\n\n Sizzle.error( selector ) :\n\n // Cache the tokens\n\n tokenCache( selector, groups ).slice( 0 );\n\n };\n\n\n\n function toSelector( tokens ) {\n\n var i = 0,\n\n len = tokens.length,\n\n selector = \"\";\n\n for ( ; i < len; i++ ) {\n\n selector += tokens[i].value;\n\n }\n\n return selector;\n\n }\n\n\n\n function addCombinator( matcher, combinator, base ) {\n\n var dir = combinator.dir,\n\n checkNonElements = base && dir === \"parentNode\",\n\n doneName = done++;\n\n\n\n return combinator.first ?\n\n // Check against closest ancestor/preceding element\n\n function( elem, context, xml ) {\n\n while ( (elem = elem[ dir ]) ) {\n\n if ( elem.nodeType === 1 || checkNonElements ) {\n\n return matcher( elem, context, xml );\n\n }\n\n }\n\n } :\n\n\n\n // Check against all ancestor/preceding elements\n\n function( elem, context, xml ) {\n\n var oldCache, outerCache,\n\n newCache = [ dirruns, doneName ];\n\n\n\n // We can't set arbitrary data on XML nodes, so they don't benefit from dir caching\n\n if ( xml ) {\n\n while ( (elem = elem[ dir ]) ) {\n\n if ( elem.nodeType === 1 || checkNonElements ) {\n\n if ( matcher( elem, context, xml ) ) {\n\n return true;\n\n }\n\n }\n\n }\n\n } else {\n\n while ( (elem = elem[ dir ]) ) {\n\n if ( elem.nodeType === 1 || checkNonElements ) {\n\n outerCache = elem[ expando ] || (elem[ expando ] = {});\n\n if ( (oldCache = outerCache[ dir ]) &&\n\n oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) {\n\n\n\n // Assign to newCache so results back-propagate to previous elements\n\n return (newCache[ 2 ] = oldCache[ 2 ]);\n\n } else {\n\n // Reuse newcache so results back-propagate to previous elements\n\n outerCache[ dir ] = newCache;\n\n\n\n // A match means we're done; a fail means we have to keep checking\n\n if ( (newCache[ 2 ] = matcher( elem, context, xml )) ) {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n };\n\n }\n\n\n\n function elementMatcher( matchers ) {\n\n return matchers.length > 1 ?\n\n function( elem, context, xml ) {\n\n var i = matchers.length;\n\n while ( i-- ) {\n\n if ( !matchers[i]( elem, context, xml ) ) {\n\n return false;\n\n }\n\n }\n\n return true;\n\n } :\n\n matchers[0];\n\n }\n\n\n\n function multipleContexts( selector, contexts, results ) {\n\n var i = 0,\n\n len = contexts.length;\n\n for ( ; i < len; i++ ) {\n\n Sizzle( selector, contexts[i], results );\n\n }\n\n return results;\n\n }\n\n\n\n function condense( unmatched, map, filter, context, xml ) {\n\n var elem,\n\n newUnmatched = [],\n\n i = 0,\n\n len = unmatched.length,\n\n mapped = map != null;\n\n\n\n for ( ; i < len; i++ ) {\n\n if ( (elem = unmatched[i]) ) {\n\n if ( !filter || filter( elem, context, xml ) ) {\n\n newUnmatched.push( elem );\n\n if ( mapped ) {\n\n map.push( i );\n\n }\n\n }\n\n }\n\n }\n\n\n\n return newUnmatched;\n\n }\n\n\n\n function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) {\n\n if ( postFilter && !postFilter[ expando ] ) {\n\n postFilter = setMatcher( postFilter );\n\n }\n\n if ( postFinder && !postFinder[ expando ] ) {\n\n postFinder = setMatcher( postFinder, postSelector );\n\n }\n\n return markFunction(function( seed, results, context, xml ) {\n\n var temp, i, elem,\n\n preMap = [],\n\n postMap = [],\n\n preexisting = results.length,\n\n\n\n // Get initial elements from seed or context\n\n elems = seed || multipleContexts( selector || \"*\", context.nodeType ? [ context ] : context, [] ),\n\n\n\n // Prefilter to get matcher input, preserving a map for seed-results synchronization\n\n matcherIn = preFilter && ( seed || !selector ) ?\n\n condense( elems, preMap, preFilter, context, xml ) :\n\n elems,\n\n\n\n matcherOut = matcher ?\n\n // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results,\n\n postFinder || ( seed ? preFilter : preexisting || postFilter ) ?\n\n\n\n // ...intermediate processing is necessary\n\n [] :\n\n\n\n // ...otherwise use results directly\n\n results :\n\n matcherIn;\n\n\n\n // Find primary matches\n\n if ( matcher ) {\n\n matcher( matcherIn, matcherOut, context, xml );\n\n }\n\n\n\n // Apply postFilter\n\n if ( postFilter ) {\n\n temp = condense( matcherOut, postMap );\n\n postFilter( temp, [], context, xml );\n\n\n\n // Un-match failing elements by moving them back to matcherIn\n\n i = temp.length;\n\n while ( i-- ) {\n\n if ( (elem = temp[i]) ) {\n\n matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem);\n\n }\n\n }\n\n }\n\n\n\n if ( seed ) {\n\n if ( postFinder || preFilter ) {\n\n if ( postFinder ) {\n\n // Get the final matcherOut by condensing this intermediate into postFinder contexts\n\n temp = [];\n\n i = matcherOut.length;\n\n while ( i-- ) {\n\n if ( (elem = matcherOut[i]) ) {\n\n // Restore matcherIn since elem is not yet a final match\n\n temp.push( (matcherIn[i] = elem) );\n\n }\n\n }\n\n postFinder( null, (matcherOut = []), temp, xml );\n\n }\n\n\n\n // Move matched elements from seed to results to keep them synchronized\n\n i = matcherOut.length;\n\n while ( i-- ) {\n\n if ( (elem = matcherOut[i]) &&\n\n (temp = postFinder ? indexOf( seed, elem ) : preMap[i]) > -1 ) {\n\n\n\n seed[temp] = !(results[temp] = elem);\n\n }\n\n }\n\n }\n\n\n\n // Add elements to results, through postFinder if defined\n\n } else {\n\n matcherOut = condense(\n\n matcherOut === results ?\n\n matcherOut.splice( preexisting, matcherOut.length ) :\n\n matcherOut\n\n );\n\n if ( postFinder ) {\n\n postFinder( null, results, matcherOut, xml );\n\n } else {\n\n push.apply( results, matcherOut );\n\n }\n\n }\n\n });\n\n }\n\n\n\n function matcherFromTokens( tokens ) {\n\n var checkContext, matcher, j,\n\n len = tokens.length,\n\n leadingRelative = Expr.relative[ tokens[0].type ],\n\n implicitRelative = leadingRelative || Expr.relative[\" \"],\n\n i = leadingRelative ? 1 : 0,\n\n\n\n // The foundational matcher ensures that elements are reachable from top-level context(s)\n\n matchContext = addCombinator( function( elem ) {\n\n return elem === checkContext;\n\n }, implicitRelative, true ),\n\n matchAnyContext = addCombinator( function( elem ) {\n\n return indexOf( checkContext, elem ) > -1;\n\n }, implicitRelative, true ),\n\n matchers = [ function( elem, context, xml ) {\n\n var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || (\n\n (checkContext = context).nodeType ?\n\n matchContext( elem, context, xml ) :\n\n matchAnyContext( elem, context, xml ) );\n\n // Avoid hanging onto element (issue #299)\n\n checkContext = null;\n\n return ret;\n\n } ];\n\n\n\n for ( ; i < len; i++ ) {\n\n if ( (matcher = Expr.relative[ tokens[i].type ]) ) {\n\n matchers = [ addCombinator(elementMatcher( matchers ), matcher) ];\n\n } else {\n\n matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches );\n\n\n\n // Return special upon seeing a positional matcher\n\n if ( matcher[ expando ] ) {\n\n // Find the next relative operator (if any) for proper handling\n\n j = ++i;\n\n for ( ; j < len; j++ ) {\n\n if ( Expr.relative[ tokens[j].type ] ) {\n\n break;\n\n }\n\n }\n\n return setMatcher(\n\n i > 1 && elementMatcher( matchers ),\n\n i > 1 && toSelector(\n\n // If the preceding token was a descendant combinator, insert an implicit any-element `*`\n\n tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === \" \" ? \"*\" : \"\" })\n\n ).replace( rtrim, \"$1\" ),\n\n matcher,\n\n i < j && matcherFromTokens( tokens.slice( i, j ) ),\n\n j < len && matcherFromTokens( (tokens = tokens.slice( j )) ),\n\n j < len && toSelector( tokens )\n\n );\n\n }\n\n matchers.push( matcher );\n\n }\n\n }\n\n\n\n return elementMatcher( matchers );\n\n }\n\n\n\n function matcherFromGroupMatchers( elementMatchers, setMatchers ) {\n\n var bySet = setMatchers.length > 0,\n\n byElement = elementMatchers.length > 0,\n\n superMatcher = function( seed, context, xml, results, outermost ) {\n\n var elem, j, matcher,\n\n matchedCount = 0,\n\n i = \"0\",\n\n unmatched = seed && [],\n\n setMatched = [],\n\n contextBackup = outermostContext,\n\n // We must always have either seed elements or outermost context\n\n elems = seed || byElement && Expr.find[\"TAG\"]( \"*\", outermost ),\n\n // Use integer dirruns iff this is the outermost matcher\n\n dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1),\n\n len = elems.length;\n\n\n\n if ( outermost ) {\n\n outermostContext = context !== document && context;\n\n }\n\n\n\n // Add elements passing elementMatchers directly to results\n\n // Keep `i` a string if there are no elements so `matchedCount` will be \"00\" below\n\n // Support: IE<9, Safari\n\n // Tolerate NodeList properties (IE: \"length\"; Safari: <number>) matching elements by id\n\n for ( ; i !== len && (elem = elems[i]) != null; i++ ) {\n\n if ( byElement && elem ) {\n\n j = 0;\n\n while ( (matcher = elementMatchers[j++]) ) {\n\n if ( matcher( elem, context, xml ) ) {\n\n results.push( elem );\n\n break;\n\n }\n\n }\n\n if ( outermost ) {\n\n dirruns = dirrunsUnique;\n\n }\n\n }\n\n\n\n // Track unmatched elements for set filters\n\n if ( bySet ) {\n\n // They will have gone through all possible matchers\n\n if ( (elem = !matcher && elem) ) {\n\n matchedCount--;\n\n }\n\n\n\n // Lengthen the array for every element, matched or not\n\n if ( seed ) {\n\n unmatched.push( elem );\n\n }\n\n }\n\n }\n\n\n\n // Apply set filters to unmatched elements\n\n matchedCount += i;\n\n if ( bySet && i !== matchedCount ) {\n\n j = 0;\n\n while ( (matcher = setMatchers[j++]) ) {\n\n matcher( unmatched, setMatched, context, xml );\n\n }\n\n\n\n if ( seed ) {\n\n // Reintegrate element matches to eliminate the need for sorting\n\n if ( matchedCount > 0 ) {\n\n while ( i-- ) {\n\n if ( !(unmatched[i] || setMatched[i]) ) {\n\n setMatched[i] = pop.call( results );\n\n }\n\n }\n\n }\n\n\n\n // Discard index placeholder values to get only actual matches\n\n setMatched = condense( setMatched );\n\n }\n\n\n\n // Add matches to results\n\n push.apply( results, setMatched );\n\n\n\n // Seedless set matches succeeding multiple successful matchers stipulate sorting\n\n if ( outermost && !seed && setMatched.length > 0 &&\n\n ( matchedCount + setMatchers.length ) > 1 ) {\n\n\n\n Sizzle.uniqueSort( results );\n\n }\n\n }\n\n\n\n // Override manipulation of globals by nested matchers\n\n if ( outermost ) {\n\n dirruns = dirrunsUnique;\n\n outermostContext = contextBackup;\n\n }\n\n\n\n return unmatched;\n\n };\n\n\n\n return bySet ?\n\n markFunction( superMatcher ) :\n\n superMatcher;\n\n }\n\n\n\n compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) {\n\n var i,\n\n setMatchers = [],\n\n elementMatchers = [],\n\n cached = compilerCache[ selector + \" \" ];\n\n\n\n if ( !cached ) {\n\n // Generate a function of recursive functions that can be used to check each element\n\n if ( !match ) {\n\n match = tokenize( selector );\n\n }\n\n i = match.length;\n\n while ( i-- ) {\n\n cached = matcherFromTokens( match[i] );\n\n if ( cached[ expando ] ) {\n\n setMatchers.push( cached );\n\n } else {\n\n elementMatchers.push( cached );\n\n }\n\n }\n\n\n\n // Cache the compiled function\n\n cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) );\n\n\n\n // Save selector and tokenization\n\n cached.selector = selector;\n\n }\n\n return cached;\n\n };\n\n\n\n /**\n\n * A low-level selection function that works with Sizzle's compiled\n\n * selector functions\n\n * @param {String|Function} selector A selector or a pre-compiled\n\n * selector function built with Sizzle.compile\n\n * @param {Element} context\n\n * @param {Array} [results]\n\n * @param {Array} [seed] A set of elements to match against\n\n */\n\n select = Sizzle.select = function( selector, context, results, seed ) {\n\n var i, tokens, token, type, find,\n\n compiled = typeof selector === \"function\" && selector,\n\n match = !seed && tokenize( (selector = compiled.selector || selector) );\n\n\n\n results = results || [];\n\n\n\n // Try to minimize operations if there is no seed and only one group\n\n if ( match.length === 1 ) {\n\n\n\n // Take a shortcut and set the context if the root selector is an ID\n\n tokens = match[0] = match[0].slice( 0 );\n\n if ( tokens.length > 2 && (token = tokens[0]).type === \"ID\" &&\n\n support.getById && context.nodeType === 9 && documentIsHTML &&\n\n Expr.relative[ tokens[1].type ] ) {\n\n\n\n context = ( Expr.find[\"ID\"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0];\n\n if ( !context ) {\n\n return results;\n\n\n\n // Precompiled matchers will still verify ancestry, so step up a level\n\n } else if ( compiled ) {\n\n context = context.parentNode;\n\n }\n\n\n\n selector = selector.slice( tokens.shift().value.length );\n\n }\n\n\n\n // Fetch a seed set for right-to-left matching\n\n i = matchExpr[\"needsContext\"].test( selector ) ? 0 : tokens.length;\n\n while ( i-- ) {\n\n token = tokens[i];\n\n\n\n // Abort if we hit a combinator\n\n if ( Expr.relative[ (type = token.type) ] ) {\n\n break;\n\n }\n\n if ( (find = Expr.find[ type ]) ) {\n\n // Search, expanding context for leading sibling combinators\n\n if ( (seed = find(\n\n token.matches[0].replace( runescape, funescape ),\n\n rsibling.test( tokens[0].type ) && testContext( context.parentNode ) || context\n\n )) ) {\n\n\n\n // If seed is empty or no tokens remain, we can return early\n\n tokens.splice( i, 1 );\n\n selector = seed.length && toSelector( tokens );\n\n if ( !selector ) {\n\n push.apply( results, seed );\n\n return results;\n\n }\n\n\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Compile and execute a filtering function if one is not provided\n\n // Provide `match` to avoid retokenization if we modified the selector above\n\n ( compiled || compile( selector, match ) )(\n\n seed,\n\n context,\n\n !documentIsHTML,\n\n results,\n\n rsibling.test( selector ) && testContext( context.parentNode ) || context\n\n );\n\n return results;\n\n };\n\n\n\n// One-time assignments\n\n\n\n// Sort stability\n\n support.sortStable = expando.split(\"\").sort( sortOrder ).join(\"\") === expando;\n\n\n\n// Support: Chrome 14-35+\n\n// Always assume duplicates if they aren't passed to the comparison function\n\n support.detectDuplicates = !!hasDuplicate;\n\n\n\n// Initialize against the default document\n\n setDocument();\n\n\n\n// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27)\n\n// Detached nodes confoundingly follow *each other*\n\n support.sortDetached = assert(function( div1 ) {\n\n // Should return 1, but returns 4 (following)\n\n return div1.compareDocumentPosition( document.createElement(\"div\") ) & 1;\n\n });\n\n\n\n// Support: IE<8\n\n// Prevent attribute/property \"interpolation\"\n\n// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx\n\n if ( !assert(function( div ) {\n\n div.innerHTML = \"<a href='#'></a>\";\n\n return div.firstChild.getAttribute(\"href\") === \"#\" ;\n\n }) ) {\n\n addHandle( \"type|href|height|width\", function( elem, name, isXML ) {\n\n if ( !isXML ) {\n\n return elem.getAttribute( name, name.toLowerCase() === \"type\" ? 1 : 2 );\n\n }\n\n });\n\n }\n\n\n\n// Support: IE<9\n\n// Use defaultValue in place of getAttribute(\"value\")\n\n if ( !support.attributes || !assert(function( div ) {\n\n div.innerHTML = \"<input/>\";\n\n div.firstChild.setAttribute( \"value\", \"\" );\n\n return div.firstChild.getAttribute( \"value\" ) === \"\";\n\n }) ) {\n\n addHandle( \"value\", function( elem, name, isXML ) {\n\n if ( !isXML && elem.nodeName.toLowerCase() === \"input\" ) {\n\n return elem.defaultValue;\n\n }\n\n });\n\n }\n\n\n\n// Support: IE<9\n\n// Use getAttributeNode to fetch booleans when getAttribute lies\n\n if ( !assert(function( div ) {\n\n return div.getAttribute(\"disabled\") == null;\n\n }) ) {\n\n addHandle( booleans, function( elem, name, isXML ) {\n\n var val;\n\n if ( !isXML ) {\n\n return elem[ name ] === true ? name.toLowerCase() :\n\n (val = elem.getAttributeNode( name )) && val.specified ?\n\n val.value :\n\n null;\n\n }\n\n });\n\n }\n\n\n\n return Sizzle;\n\n\n\n })( window );\n\n\n\n\n\n\n\n jQuery.find = Sizzle;\n\n jQuery.expr = Sizzle.selectors;\n\n jQuery.expr[\":\"] = jQuery.expr.pseudos;\n\n jQuery.unique = Sizzle.uniqueSort;\n\n jQuery.text = Sizzle.getText;\n\n jQuery.isXMLDoc = Sizzle.isXML;\n\n jQuery.contains = Sizzle.contains;\n\n\n\n\n\n\n\n var rneedsContext = jQuery.expr.match.needsContext;\n\n\n\n var rsingleTag = (/^<(\\w+)\\s*\\/?>(?:<\\/\\1>|)$/);\n\n\n\n\n\n\n\n var risSimple = /^.[^:#\\[\\.,]*$/;\n\n\n\n// Implement the identical functionality for filter and not\n\n function winnow( elements, qualifier, not ) {\n\n if ( jQuery.isFunction( qualifier ) ) {\n\n return jQuery.grep( elements, function( elem, i ) {\n\n /* jshint -W018 */\n\n return !!qualifier.call( elem, i, elem ) !== not;\n\n });\n\n\n\n }\n\n\n\n if ( qualifier.nodeType ) {\n\n return jQuery.grep( elements, function( elem ) {\n\n return ( elem === qualifier ) !== not;\n\n });\n\n\n\n }\n\n\n\n if ( typeof qualifier === \"string\" ) {\n\n if ( risSimple.test( qualifier ) ) {\n\n return jQuery.filter( qualifier, elements, not );\n\n }\n\n\n\n qualifier = jQuery.filter( qualifier, elements );\n\n }\n\n\n\n return jQuery.grep( elements, function( elem ) {\n\n return ( jQuery.inArray( elem, qualifier ) >= 0 ) !== not;\n\n });\n\n }\n\n\n\n jQuery.filter = function( expr, elems, not ) {\n\n var elem = elems[ 0 ];\n\n\n\n if ( not ) {\n\n expr = \":not(\" + expr + \")\";\n\n }\n\n\n\n return elems.length === 1 && elem.nodeType === 1 ?\n\n jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [] :\n\n jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) {\n\n return elem.nodeType === 1;\n\n }));\n\n };\n\n\n\n jQuery.fn.extend({\n\n find: function( selector ) {\n\n var i,\n\n ret = [],\n\n self = this,\n\n len = self.length;\n\n\n\n if ( typeof selector !== \"string\" ) {\n\n return this.pushStack( jQuery( selector ).filter(function() {\n\n for ( i = 0; i < len; i++ ) {\n\n if ( jQuery.contains( self[ i ], this ) ) {\n\n return true;\n\n }\n\n }\n\n }) );\n\n }\n\n\n\n for ( i = 0; i < len; i++ ) {\n\n jQuery.find( selector, self[ i ], ret );\n\n }\n\n\n\n // Needed because $( selector, context ) becomes $( context ).find( selector )\n\n ret = this.pushStack( len > 1 ? jQuery.unique( ret ) : ret );\n\n ret.selector = this.selector ? this.selector + \" \" + selector : selector;\n\n return ret;\n\n },\n\n filter: function( selector ) {\n\n return this.pushStack( winnow(this, selector || [], false) );\n\n },\n\n not: function( selector ) {\n\n return this.pushStack( winnow(this, selector || [], true) );\n\n },\n\n is: function( selector ) {\n\n return !!winnow(\n\n this,\n\n\n\n // If this is a positional/relative selector, check membership in the returned set\n\n // so $(\"p:first\").is(\"p:last\") won't return true for a doc with two \"p\".\n\n typeof selector === \"string\" && rneedsContext.test( selector ) ?\n\n jQuery( selector ) :\n\n selector || [],\n\n false\n\n ).length;\n\n }\n\n });\n\n\n\n\n\n// Initialize a jQuery object\n\n\n\n\n\n// A central reference to the root jQuery(document)\n\n var rootjQuery,\n\n\n\n // Use the correct document accordingly with window argument (sandbox)\n\n document = window.document,\n\n\n\n // A simple way to check for HTML strings\n\n // Prioritize #id over <tag> to avoid XSS via location.hash (#9521)\n\n // Strict HTML recognition (#11290: must start with <)\n\n rquickExpr = /^(?:\\s*(<[\\w\\W]+>)[^>]*|#([\\w-]*))$/,\n\n\n\n init = jQuery.fn.init = function( selector, context ) {\n\n var match, elem;\n\n\n\n // HANDLE: $(\"\"), $(null), $(undefined), $(false)\n\n if ( !selector ) {\n\n return this;\n\n }\n\n\n\n // Handle HTML strings\n\n if ( typeof selector === \"string\" ) {\n\n if ( selector.charAt(0) === \"<\" && selector.charAt( selector.length - 1 ) === \">\" && selector.length >= 3 ) {\n\n // Assume that strings that start and end with <> are HTML and skip the regex check\n\n match = [ null, selector, null ];\n\n\n\n } else {\n\n match = rquickExpr.exec( selector );\n\n }\n\n\n\n // Match html or make sure no context is specified for #id\n\n if ( match && (match[1] || !context) ) {\n\n\n\n // HANDLE: $(html) -> $(array)\n\n if ( match[1] ) {\n\n context = context instanceof jQuery ? context[0] : context;\n\n\n\n // scripts is true for back-compat\n\n // Intentionally let the error be thrown if parseHTML is not present\n\n jQuery.merge( this, jQuery.parseHTML(\n\n match[1],\n\n context && context.nodeType ? context.ownerDocument || context : document,\n\n true\n\n ) );\n\n\n\n // HANDLE: $(html, props)\n\n if ( rsingleTag.test( match[1] ) && jQuery.isPlainObject( context ) ) {\n\n for ( match in context ) {\n\n // Properties of context are called as methods if possible\n\n if ( jQuery.isFunction( this[ match ] ) ) {\n\n this[ match ]( context[ match ] );\n\n\n\n // ...and otherwise set as attributes\n\n } else {\n\n this.attr( match, context[ match ] );\n\n }\n\n }\n\n }\n\n\n\n return this;\n\n\n\n // HANDLE: $(#id)\n\n } else {\n\n elem = document.getElementById( match[2] );\n\n\n\n // Check parentNode to catch when Blackberry 4.6 returns\n\n // nodes that are no longer in the document #6963\n\n if ( elem && elem.parentNode ) {\n\n // Handle the case where IE and Opera return items\n\n // by name instead of ID\n\n if ( elem.id !== match[2] ) {\n\n return rootjQuery.find( selector );\n\n }\n\n\n\n // Otherwise, we inject the element directly into the jQuery object\n\n this.length = 1;\n\n this[0] = elem;\n\n }\n\n\n\n this.context = document;\n\n this.selector = selector;\n\n return this;\n\n }\n\n\n\n // HANDLE: $(expr, $(...))\n\n } else if ( !context || context.jquery ) {\n\n return ( context || rootjQuery ).find( selector );\n\n\n\n // HANDLE: $(expr, context)\n\n // (which is just equivalent to: $(context).find(expr)\n\n } else {\n\n return this.constructor( context ).find( selector );\n\n }\n\n\n\n // HANDLE: $(DOMElement)\n\n } else if ( selector.nodeType ) {\n\n this.context = this[0] = selector;\n\n this.length = 1;\n\n return this;\n\n\n\n // HANDLE: $(function)\n\n // Shortcut for document ready\n\n } else if ( jQuery.isFunction( selector ) ) {\n\n return typeof rootjQuery.ready !== \"undefined\" ?\n\n rootjQuery.ready( selector ) :\n\n // Execute immediately if ready is not present\n\n selector( jQuery );\n\n }\n\n\n\n if ( selector.selector !== undefined ) {\n\n this.selector = selector.selector;\n\n this.context = selector.context;\n\n }\n\n\n\n return jQuery.makeArray( selector, this );\n\n };\n\n\n\n// Give the init function the jQuery prototype for later instantiation\n\n init.prototype = jQuery.fn;\n\n\n\n// Initialize central reference\n\n rootjQuery = jQuery( document );\n\n\n\n\n\n var rparentsprev = /^(?:parents|prev(?:Until|All))/,\n\n // methods guaranteed to produce a unique set when starting from a unique set\n\n guaranteedUnique = {\n\n children: true,\n\n contents: true,\n\n next: true,\n\n prev: true\n\n };\n\n\n\n jQuery.extend({\n\n dir: function( elem, dir, until ) {\n\n var matched = [],\n\n cur = elem[ dir ];\n\n\n\n while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) {\n\n if ( cur.nodeType === 1 ) {\n\n matched.push( cur );\n\n }\n\n cur = cur[dir];\n\n }\n\n return matched;\n\n },\n\n\n\n sibling: function( n, elem ) {\n\n var r = [];\n\n\n\n for ( ; n; n = n.nextSibling ) {\n\n if ( n.nodeType === 1 && n !== elem ) {\n\n r.push( n );\n\n }\n\n }\n\n\n\n return r;\n\n }\n\n });\n\n\n\n jQuery.fn.extend({\n\n has: function( target ) {\n\n var i,\n\n targets = jQuery( target, this ),\n\n len = targets.length;\n\n\n\n return this.filter(function() {\n\n for ( i = 0; i < len; i++ ) {\n\n if ( jQuery.contains( this, targets[i] ) ) {\n\n return true;\n\n }\n\n }\n\n });\n\n },\n\n\n\n closest: function( selectors, context ) {\n\n var cur,\n\n i = 0,\n\n l = this.length,\n\n matched = [],\n\n pos = rneedsContext.test( selectors ) || typeof selectors !== \"string\" ?\n\n jQuery( selectors, context || this.context ) :\n\n 0;\n\n\n\n for ( ; i < l; i++ ) {\n\n for ( cur = this[i]; cur && cur !== context; cur = cur.parentNode ) {\n\n // Always skip document fragments\n\n if ( cur.nodeType < 11 && (pos ?\n\n pos.index(cur) > -1 :\n\n\n\n // Don't pass non-elements to Sizzle\n\n cur.nodeType === 1 &&\n\n jQuery.find.matchesSelector(cur, selectors)) ) {\n\n\n\n matched.push( cur );\n\n break;\n\n }\n\n }\n\n }\n\n\n\n return this.pushStack( matched.length > 1 ? jQuery.unique( matched ) : matched );\n\n },\n\n\n\n // Determine the position of an element within\n\n // the matched set of elements\n\n index: function( elem ) {\n\n\n\n // No argument, return index in parent\n\n if ( !elem ) {\n\n return ( this[0] && this[0].parentNode ) ? this.first().prevAll().length : -1;\n\n }\n\n\n\n // index in selector\n\n if ( typeof elem === \"string\" ) {\n\n return jQuery.inArray( this[0], jQuery( elem ) );\n\n }\n\n\n\n // Locate the position of the desired element\n\n return jQuery.inArray(\n\n // If it receives a jQuery object, the first element is used\n\n elem.jquery ? elem[0] : elem, this );\n\n },\n\n\n\n add: function( selector, context ) {\n\n return this.pushStack(\n\n jQuery.unique(\n\n jQuery.merge( this.get(), jQuery( selector, context ) )\n\n )\n\n );\n\n },\n\n\n\n addBack: function( selector ) {\n\n return this.add( selector == null ?\n\n this.prevObject : this.prevObject.filter(selector)\n\n );\n\n }\n\n });\n\n\n\n function sibling( cur, dir ) {\n\n do {\n\n cur = cur[ dir ];\n\n } while ( cur && cur.nodeType !== 1 );\n\n\n\n return cur;\n\n }\n\n\n\n jQuery.each({\n\n parent: function( elem ) {\n\n var parent = elem.parentNode;\n\n return parent && parent.nodeType !== 11 ? parent : null;\n\n },\n\n parents: function( elem ) {\n\n return jQuery.dir( elem, \"parentNode\" );\n\n },\n\n parentsUntil: function( elem, i, until ) {\n\n return jQuery.dir( elem, \"parentNode\", until );\n\n },\n\n next: function( elem ) {\n\n return sibling( elem, \"nextSibling\" );\n\n },\n\n prev: function( elem ) {\n\n return sibling( elem, \"previousSibling\" );\n\n },\n\n nextAll: function( elem ) {\n\n return jQuery.dir( elem, \"nextSibling\" );\n\n },\n\n prevAll: function( elem ) {\n\n return jQuery.dir( elem, \"previousSibling\" );\n\n },\n\n nextUntil: function( elem, i, until ) {\n\n return jQuery.dir( elem, \"nextSibling\", until );\n\n },\n\n prevUntil: function( elem, i, until ) {\n\n return jQuery.dir( elem, \"previousSibling\", until );\n\n },\n\n siblings: function( elem ) {\n\n return jQuery.sibling( ( elem.parentNode || {} ).firstChild, elem );\n\n },\n\n children: function( elem ) {\n\n return jQuery.sibling( elem.firstChild );\n\n },\n\n contents: function( elem ) {\n\n return jQuery.nodeName( elem, \"iframe\" ) ?\n\n elem.contentDocument || elem.contentWindow.document :\n\n jQuery.merge( [], elem.childNodes );\n\n }\n\n }, function( name, fn ) {\n\n jQuery.fn[ name ] = function( until, selector ) {\n\n var ret = jQuery.map( this, fn, until );\n\n\n\n if ( name.slice( -5 ) !== \"Until\" ) {\n\n selector = until;\n\n }\n\n\n\n if ( selector && typeof selector === \"string\" ) {\n\n ret = jQuery.filter( selector, ret );\n\n }\n\n\n\n if ( this.length > 1 ) {\n\n // Remove duplicates\n\n if ( !guaranteedUnique[ name ] ) {\n\n ret = jQuery.unique( ret );\n\n }\n\n\n\n // Reverse order for parents* and prev-derivatives\n\n if ( rparentsprev.test( name ) ) {\n\n ret = ret.reverse();\n\n }\n\n }\n\n\n\n return this.pushStack( ret );\n\n };\n\n });\n\n var rnotwhite = (/\\S+/g);\n\n\n\n\n\n\n\n// String to Object options format cache\n\n var optionsCache = {};\n\n\n\n// Convert String-formatted options into Object-formatted ones and store in cache\n\n function createOptions( options ) {\n\n var object = optionsCache[ options ] = {};\n\n jQuery.each( options.match( rnotwhite ) || [], function( _, flag ) {\n\n object[ flag ] = true;\n\n });\n\n return object;\n\n }\n\n\n\n /*\n\n * Create a callback list using the following parameters:\n\n *\n\n *\toptions: an optional list of space-separated options that will change how\n\n *\t\t\tthe callback list behaves or a more traditional option object\n\n *\n\n * By default a callback list will act like an event callback list and can be\n\n * \"fired\" multiple times.\n\n *\n\n * Possible options:\n\n *\n\n *\tonce:\t\t\twill ensure the callback list can only be fired once (like a Deferred)\n\n *\n\n *\tmemory:\t\t\twill keep track of previous values and will call any callback added\n\n *\t\t\t\t\tafter the list has been fired right away with the latest \"memorized\"\n\n *\t\t\t\t\tvalues (like a Deferred)\n\n *\n\n *\tunique:\t\t\twill ensure a callback can only be added once (no duplicate in the list)\n\n *\n\n *\tstopOnFalse:\tinterrupt callings when a callback returns false\n\n *\n\n */\n\n jQuery.Callbacks = function( options ) {\n\n\n\n // Convert options from String-formatted to Object-formatted if needed\n\n // (we check in cache first)\n\n options = typeof options === \"string\" ?\n\n ( optionsCache[ options ] || createOptions( options ) ) :\n\n jQuery.extend( {}, options );\n\n\n\n var // Flag to know if list is currently firing\n\n firing,\n\n // Last fire value (for non-forgettable lists)\n\n memory,\n\n // Flag to know if list was already fired\n\n fired,\n\n // End of the loop when firing\n\n firingLength,\n\n // Index of currently firing callback (modified by remove if needed)\n\n firingIndex,\n\n // First callback to fire (used internally by add and fireWith)\n\n firingStart,\n\n // Actual callback list\n\n list = [],\n\n // Stack of fire calls for repeatable lists\n\n stack = !options.once && [],\n\n // Fire callbacks\n\n fire = function( data ) {\n\n memory = options.memory && data;\n\n fired = true;\n\n firingIndex = firingStart || 0;\n\n firingStart = 0;\n\n firingLength = list.length;\n\n firing = true;\n\n for ( ; list && firingIndex < firingLength; firingIndex++ ) {\n\n if ( list[ firingIndex ].apply( data[ 0 ], data[ 1 ] ) === false && options.stopOnFalse ) {\n\n memory = false; // To prevent further calls using add\n\n break;\n\n }\n\n }\n\n firing = false;\n\n if ( list ) {\n\n if ( stack ) {\n\n if ( stack.length ) {\n\n fire( stack.shift() );\n\n }\n\n } else if ( memory ) {\n\n list = [];\n\n } else {\n\n self.disable();\n\n }\n\n }\n\n },\n\n // Actual Callbacks object\n\n self = {\n\n // Add a callback or a collection of callbacks to the list\n\n add: function() {\n\n if ( list ) {\n\n // First, we save the current length\n\n var start = list.length;\n\n (function add( args ) {\n\n jQuery.each( args, function( _, arg ) {\n\n var type = jQuery.type( arg );\n\n if ( type === \"function\" ) {\n\n if ( !options.unique || !self.has( arg ) ) {\n\n list.push( arg );\n\n }\n\n } else if ( arg && arg.length && type !== \"string\" ) {\n\n // Inspect recursively\n\n add( arg );\n\n }\n\n });\n\n })( arguments );\n\n // Do we need to add the callbacks to the\n\n // current firing batch?\n\n if ( firing ) {\n\n firingLength = list.length;\n\n // With memory, if we're not firing then\n\n // we should call right away\n\n } else if ( memory ) {\n\n firingStart = start;\n\n fire( memory );\n\n }\n\n }\n\n return this;\n\n },\n\n // Remove a callback from the list\n\n remove: function() {\n\n if ( list ) {\n\n jQuery.each( arguments, function( _, arg ) {\n\n var index;\n\n while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) {\n\n list.splice( index, 1 );\n\n // Handle firing indexes\n\n if ( firing ) {\n\n if ( index <= firingLength ) {\n\n firingLength--;\n\n }\n\n if ( index <= firingIndex ) {\n\n firingIndex--;\n\n }\n\n }\n\n }\n\n });\n\n }\n\n return this;\n\n },\n\n // Check if a given callback is in the list.\n\n // If no argument is given, return whether or not list has callbacks attached.\n\n has: function( fn ) {\n\n return fn ? jQuery.inArray( fn, list ) > -1 : !!( list && list.length );\n\n },\n\n // Remove all callbacks from the list\n\n empty: function() {\n\n list = [];\n\n firingLength = 0;\n\n return this;\n\n },\n\n // Have the list do nothing anymore\n\n disable: function() {\n\n list = stack = memory = undefined;\n\n return this;\n\n },\n\n // Is it disabled?\n\n disabled: function() {\n\n return !list;\n\n },\n\n // Lock the list in its current state\n\n lock: function() {\n\n stack = undefined;\n\n if ( !memory ) {\n\n self.disable();\n\n }\n\n return this;\n\n },\n\n // Is it locked?\n\n locked: function() {\n\n return !stack;\n\n },\n\n // Call all callbacks with the given context and arguments\n\n fireWith: function( context, args ) {\n\n if ( list && ( !fired || stack ) ) {\n\n args = args || [];\n\n args = [ context, args.slice ? args.slice() : args ];\n\n if ( firing ) {\n\n stack.push( args );\n\n } else {\n\n fire( args );\n\n }\n\n }\n\n return this;\n\n },\n\n // Call all the callbacks with the given arguments\n\n fire: function() {\n\n self.fireWith( this, arguments );\n\n return this;\n\n },\n\n // To know if the callbacks have already been called at least once\n\n fired: function() {\n\n return !!fired;\n\n }\n\n };\n\n\n\n return self;\n\n };\n\n\n\n\n\n jQuery.extend({\n\n\n\n Deferred: function( func ) {\n\n var tuples = [\n\n // action, add listener, listener list, final state\n\n [ \"resolve\", \"done\", jQuery.Callbacks(\"once memory\"), \"resolved\" ],\n\n [ \"reject\", \"fail\", jQuery.Callbacks(\"once memory\"), \"rejected\" ],\n\n [ \"notify\", \"progress\", jQuery.Callbacks(\"memory\") ]\n\n ],\n\n state = \"pending\",\n\n promise = {\n\n state: function() {\n\n return state;\n\n },\n\n always: function() {\n\n deferred.done( arguments ).fail( arguments );\n\n return this;\n\n },\n\n then: function( /* fnDone, fnFail, fnProgress */ ) {\n\n var fns = arguments;\n\n return jQuery.Deferred(function( newDefer ) {\n\n jQuery.each( tuples, function( i, tuple ) {\n\n var fn = jQuery.isFunction( fns[ i ] ) && fns[ i ];\n\n // deferred[ done | fail | progress ] for forwarding actions to newDefer\n\n deferred[ tuple[1] ](function() {\n\n var returned = fn && fn.apply( this, arguments );\n\n if ( returned && jQuery.isFunction( returned.promise ) ) {\n\n returned.promise()\n\n .done( newDefer.resolve )\n\n .fail( newDefer.reject )\n\n .progress( newDefer.notify );\n\n } else {\n\n newDefer[ tuple[ 0 ] + \"With\" ]( this === promise ? newDefer.promise() : this, fn ? [ returned ] : arguments );\n\n }\n\n });\n\n });\n\n fns = null;\n\n }).promise();\n\n },\n\n // Get a promise for this deferred\n\n // If obj is provided, the promise aspect is added to the object\n\n promise: function( obj ) {\n\n return obj != null ? jQuery.extend( obj, promise ) : promise;\n\n }\n\n },\n\n deferred = {};\n\n\n\n // Keep pipe for back-compat\n\n promise.pipe = promise.then;\n\n\n\n // Add list-specific methods\n\n jQuery.each( tuples, function( i, tuple ) {\n\n var list = tuple[ 2 ],\n\n stateString = tuple[ 3 ];\n\n\n\n // promise[ done | fail | progress ] = list.add\n\n promise[ tuple[1] ] = list.add;\n\n\n\n // Handle state\n\n if ( stateString ) {\n\n list.add(function() {\n\n // state = [ resolved | rejected ]\n\n state = stateString;\n\n\n\n // [ reject_list | resolve_list ].disable; progress_list.lock\n\n }, tuples[ i ^ 1 ][ 2 ].disable, tuples[ 2 ][ 2 ].lock );\n\n }\n\n\n\n // deferred[ resolve | reject | notify ]\n\n deferred[ tuple[0] ] = function() {\n\n deferred[ tuple[0] + \"With\" ]( this === deferred ? promise : this, arguments );\n\n return this;\n\n };\n\n deferred[ tuple[0] + \"With\" ] = list.fireWith;\n\n });\n\n\n\n // Make the deferred a promise\n\n promise.promise( deferred );\n\n\n\n // Call given func if any\n\n if ( func ) {\n\n func.call( deferred, deferred );\n\n }\n\n\n\n // All done!\n\n return deferred;\n\n },\n\n\n\n // Deferred helper\n\n when: function( subordinate /* , ..., subordinateN */ ) {\n\n var i = 0,\n\n resolveValues = slice.call( arguments ),\n\n length = resolveValues.length,\n\n\n\n // the count of uncompleted subordinates\n\n remaining = length !== 1 || ( subordinate && jQuery.isFunction( subordinate.promise ) ) ? length : 0,\n\n\n\n // the master Deferred. If resolveValues consist of only a single Deferred, just use that.\n\n deferred = remaining === 1 ? subordinate : jQuery.Deferred(),\n\n\n\n // Update function for both resolve and progress values\n\n updateFunc = function( i, contexts, values ) {\n\n return function( value ) {\n\n contexts[ i ] = this;\n\n values[ i ] = arguments.length > 1 ? slice.call( arguments ) : value;\n\n if ( values === progressValues ) {\n\n deferred.notifyWith( contexts, values );\n\n\n\n } else if ( !(--remaining) ) {\n\n deferred.resolveWith( contexts, values );\n\n }\n\n };\n\n },\n\n\n\n progressValues, progressContexts, resolveContexts;\n\n\n\n // add listeners to Deferred subordinates; treat others as resolved\n\n if ( length > 1 ) {\n\n progressValues = new Array( length );\n\n progressContexts = new Array( length );\n\n resolveContexts = new Array( length );\n\n for ( ; i < length; i++ ) {\n\n if ( resolveValues[ i ] && jQuery.isFunction( resolveValues[ i ].promise ) ) {\n\n resolveValues[ i ].promise()\n\n .done( updateFunc( i, resolveContexts, resolveValues ) )\n\n .fail( deferred.reject )\n\n .progress( updateFunc( i, progressContexts, progressValues ) );\n\n } else {\n\n --remaining;\n\n }\n\n }\n\n }\n\n\n\n // if we're not waiting on anything, resolve the master\n\n if ( !remaining ) {\n\n deferred.resolveWith( resolveContexts, resolveValues );\n\n }\n\n\n\n return deferred.promise();\n\n }\n\n });\n\n\n\n\n\n// The deferred used on DOM ready\n\n var readyList;\n\n\n\n jQuery.fn.ready = function( fn ) {\n\n // Add the callback\n\n jQuery.ready.promise().done( fn );\n\n\n\n return this;\n\n };\n\n\n\n jQuery.extend({\n\n // Is the DOM ready to be used? Set to true once it occurs.\n\n isReady: false,\n\n\n\n // A counter to track how many items to wait for before\n\n // the ready event fires. See #6781\n\n readyWait: 1,\n\n\n\n // Hold (or release) the ready event\n\n holdReady: function( hold ) {\n\n if ( hold ) {\n\n jQuery.readyWait++;\n\n } else {\n\n jQuery.ready( true );\n\n }\n\n },\n\n\n\n // Handle when the DOM is ready\n\n ready: function( wait ) {\n\n\n\n // Abort if there are pending holds or we're already ready\n\n if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) {\n\n return;\n\n }\n\n\n\n // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443).\n\n if ( !document.body ) {\n\n return setTimeout( jQuery.ready );\n\n }\n\n\n\n // Remember that the DOM is ready\n\n jQuery.isReady = true;\n\n\n\n // If a normal DOM Ready event fired, decrement, and wait if need be\n\n if ( wait !== true && --jQuery.readyWait > 0 ) {\n\n return;\n\n }\n\n\n\n // If there are functions bound, to execute\n\n readyList.resolveWith( document, [ jQuery ] );\n\n\n\n // Trigger any bound ready events\n\n if ( jQuery.fn.triggerHandler ) {\n\n jQuery( document ).triggerHandler( \"ready\" );\n\n jQuery( document ).off( \"ready\" );\n\n }\n\n }\n\n });\n\n\n\n /**\n\n * Clean-up method for dom ready events\n\n */\n\n function detach() {\n\n if ( document.addEventListener ) {\n\n document.removeEventListener( \"DOMContentLoaded\", completed, false );\n\n window.removeEventListener( \"load\", completed, false );\n\n\n\n } else {\n\n document.detachEvent( \"onreadystatechange\", completed );\n\n window.detachEvent( \"onload\", completed );\n\n }\n\n }\n\n\n\n /**\n\n * The ready event handler and self cleanup method\n\n */\n\n function completed() {\n\n // readyState === \"complete\" is good enough for us to call the dom ready in oldIE\n\n if ( document.addEventListener || event.type === \"load\" || document.readyState === \"complete\" ) {\n\n detach();\n\n jQuery.ready();\n\n }\n\n }\n\n\n\n jQuery.ready.promise = function( obj ) {\n\n if ( !readyList ) {\n\n\n\n readyList = jQuery.Deferred();\n\n\n\n // Catch cases where $(document).ready() is called after the browser event has already occurred.\n\n // we once tried to use readyState \"interactive\" here, but it caused issues like the one\n\n // discovered by ChrisS here: http://bugs.jquery.com/ticket/12282#comment:15\n\n if ( document.readyState === \"complete\" ) {\n\n // Handle it asynchronously to allow scripts the opportunity to delay ready\n\n setTimeout( jQuery.ready );\n\n\n\n // Standards-based browsers support DOMContentLoaded\n\n } else if ( document.addEventListener ) {\n\n // Use the handy event callback\n\n document.addEventListener( \"DOMContentLoaded\", completed, false );\n\n\n\n // A fallback to window.onload, that will always work\n\n window.addEventListener( \"load\", completed, false );\n\n\n\n // If IE event model is used\n\n } else {\n\n // Ensure firing before onload, maybe late but safe also for iframes\n\n document.attachEvent( \"onreadystatechange\", completed );\n\n\n\n // A fallback to window.onload, that will always work\n\n window.attachEvent( \"onload\", completed );\n\n\n\n // If IE and not a frame\n\n // continually check to see if the document is ready\n\n var top = false;\n\n\n\n try {\n\n top = window.frameElement == null && document.documentElement;\n\n } catch(e) {}\n\n\n\n if ( top && top.doScroll ) {\n\n (function doScrollCheck() {\n\n if ( !jQuery.isReady ) {\n\n\n\n try {\n\n // Use the trick by Diego Perini\n\n // http://javascript.nwbox.com/IEContentLoaded/\n\n top.doScroll(\"left\");\n\n } catch(e) {\n\n return setTimeout( doScrollCheck, 50 );\n\n }\n\n\n\n // detach all dom ready events\n\n detach();\n\n\n\n // and execute any waiting functions\n\n jQuery.ready();\n\n }\n\n })();\n\n }\n\n }\n\n }\n\n return readyList.promise( obj );\n\n };\n\n\n\n\n\n var strundefined = typeof undefined;\n\n\n\n\n\n\n\n// Support: IE<9\n\n// Iteration over object's inherited properties before its own\n\n var i;\n\n for ( i in jQuery( support ) ) {\n\n break;\n\n }\n\n support.ownLast = i !== \"0\";\n\n\n\n// Note: most support tests are defined in their respective modules.\n\n// false until the test is run\n\n support.inlineBlockNeedsLayout = false;\n\n\n\n// Execute ASAP in case we need to set body.style.zoom\n\n jQuery(function() {\n\n // Minified: var a,b,c,d\n\n var val, div, body, container;\n\n\n\n body = document.getElementsByTagName( \"body\" )[ 0 ];\n\n if ( !body || !body.style ) {\n\n // Return for frameset docs that don't have a body\n\n return;\n\n }\n\n\n\n // Setup\n\n div = document.createElement( \"div\" );\n\n container = document.createElement( \"div\" );\n\n container.style.cssText = \"position:absolute;border:0;width:0;height:0;top:0;left:-9999px\";\n\n body.appendChild( container ).appendChild( div );\n\n\n\n if ( typeof div.style.zoom !== strundefined ) {\n\n // Support: IE<8\n\n // Check if natively block-level elements act like inline-block\n\n // elements when setting their display to 'inline' and giving\n\n // them layout\n\n div.style.cssText = \"display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1\";\n\n\n\n support.inlineBlockNeedsLayout = val = div.offsetWidth === 3;\n\n if ( val ) {\n\n // Prevent IE 6 from affecting layout for positioned elements #11048\n\n // Prevent IE from shrinking the body in IE 7 mode #12869\n\n // Support: IE<8\n\n body.style.zoom = 1;\n\n }\n\n }\n\n\n\n body.removeChild( container );\n\n });\n\n\n\n\n\n\n\n\n\n (function() {\n\n var div = document.createElement( \"div\" );\n\n\n\n // Execute the test only if not already executed in another module.\n\n if (support.deleteExpando == null) {\n\n // Support: IE<9\n\n support.deleteExpando = true;\n\n try {\n\n delete div.test;\n\n } catch( e ) {\n\n support.deleteExpando = false;\n\n }\n\n }\n\n\n\n // Null elements to avoid leaks in IE.\n\n div = null;\n\n })();\n\n\n\n\n\n /**\n\n * Determines whether an object can have data\n\n */\n\n jQuery.acceptData = function( elem ) {\n\n var noData = jQuery.noData[ (elem.nodeName + \" \").toLowerCase() ],\n\n nodeType = +elem.nodeType || 1;\n\n\n\n // Do not set data on non-element DOM nodes because it will not be cleared (#8335).\n\n return nodeType !== 1 && nodeType !== 9 ?\n\n false :\n\n\n\n // Nodes accept data unless otherwise specified; rejection can be conditional\n\n !noData || noData !== true && elem.getAttribute(\"classid\") === noData;\n\n };\n\n\n\n\n\n var rbrace = /^(?:\\{[\\w\\W]*\\}|\\[[\\w\\W]*\\])$/,\n\n rmultiDash = /([A-Z])/g;\n\n\n\n function dataAttr( elem, key, data ) {\n\n // If nothing was found internally, try to fetch any\n\n // data from the HTML5 data-* attribute\n\n if ( data === undefined && elem.nodeType === 1 ) {\n\n\n\n var name = \"data-\" + key.replace( rmultiDash, \"-$1\" ).toLowerCase();\n\n\n\n data = elem.getAttribute( name );\n\n\n\n if ( typeof data === \"string\" ) {\n\n try {\n\n data = data === \"true\" ? true :\n\n data === \"false\" ? false :\n\n data === \"null\" ? null :\n\n // Only convert to a number if it doesn't change the string\n\n +data + \"\" === data ? +data :\n\n rbrace.test( data ) ? jQuery.parseJSON( data ) :\n\n data;\n\n } catch( e ) {}\n\n\n\n // Make sure we set the data so it isn't changed later\n\n jQuery.data( elem, key, data );\n\n\n\n } else {\n\n data = undefined;\n\n }\n\n }\n\n\n\n return data;\n\n }\n\n\n\n// checks a cache object for emptiness\n\n function isEmptyDataObject( obj ) {\n\n var name;\n\n for ( name in obj ) {\n\n\n\n // if the public data object is empty, the private is still empty\n\n if ( name === \"data\" && jQuery.isEmptyObject( obj[name] ) ) {\n\n continue;\n\n }\n\n if ( name !== \"toJSON\" ) {\n\n return false;\n\n }\n\n }\n\n\n\n return true;\n\n }\n\n\n\n function internalData( elem, name, data, pvt /* Internal Use Only */ ) {\n\n if ( !jQuery.acceptData( elem ) ) {\n\n return;\n\n }\n\n\n\n var ret, thisCache,\n\n internalKey = jQuery.expando,\n\n\n\n // We have to handle DOM nodes and JS objects differently because IE6-7\n\n // can't GC object references properly across the DOM-JS boundary\n\n isNode = elem.nodeType,\n\n\n\n // Only DOM nodes need the global jQuery cache; JS object data is\n\n // attached directly to the object so GC can occur automatically\n\n cache = isNode ? jQuery.cache : elem,\n\n\n\n // Only defining an ID for JS objects if its cache already exists allows\n\n // the code to shortcut on the same path as a DOM node with no cache\n\n id = isNode ? elem[ internalKey ] : elem[ internalKey ] && internalKey;\n\n\n\n // Avoid doing any more work than we need to when trying to get data on an\n\n // object that has no data at all\n\n if ( (!id || !cache[id] || (!pvt && !cache[id].data)) && data === undefined && typeof name === \"string\" ) {\n\n return;\n\n }\n\n\n\n if ( !id ) {\n\n // Only DOM nodes need a new unique ID for each element since their data\n\n // ends up in the global cache\n\n if ( isNode ) {\n\n id = elem[ internalKey ] = deletedIds.pop() || jQuery.guid++;\n\n } else {\n\n id = internalKey;\n\n }\n\n }\n\n\n\n if ( !cache[ id ] ) {\n\n // Avoid exposing jQuery metadata on plain JS objects when the object\n\n // is serialized using JSON.stringify\n\n cache[ id ] = isNode ? {} : { toJSON: jQuery.noop };\n\n }\n\n\n\n // An object can be passed to jQuery.data instead of a key/value pair; this gets\n\n // shallow copied over onto the existing cache\n\n if ( typeof name === \"object\" || typeof name === \"function\" ) {\n\n if ( pvt ) {\n\n cache[ id ] = jQuery.extend( cache[ id ], name );\n\n } else {\n\n cache[ id ].data = jQuery.extend( cache[ id ].data, name );\n\n }\n\n }\n\n\n\n thisCache = cache[ id ];\n\n\n\n // jQuery data() is stored in a separate object inside the object's internal data\n\n // cache in order to avoid key collisions between internal data and user-defined\n\n // data.\n\n if ( !pvt ) {\n\n if ( !thisCache.data ) {\n\n thisCache.data = {};\n\n }\n\n\n\n thisCache = thisCache.data;\n\n }\n\n\n\n if ( data !== undefined ) {\n\n thisCache[ jQuery.camelCase( name ) ] = data;\n\n }\n\n\n\n // Check for both converted-to-camel and non-converted data property names\n\n // If a data property was specified\n\n if ( typeof name === \"string\" ) {\n\n\n\n // First Try to find as-is property data\n\n ret = thisCache[ name ];\n\n\n\n // Test for null|undefined property data\n\n if ( ret == null ) {\n\n\n\n // Try to find the camelCased property\n\n ret = thisCache[ jQuery.camelCase( name ) ];\n\n }\n\n } else {\n\n ret = thisCache;\n\n }\n\n\n\n return ret;\n\n }\n\n\n\n function internalRemoveData( elem, name, pvt ) {\n\n if ( !jQuery.acceptData( elem ) ) {\n\n return;\n\n }\n\n\n\n var thisCache, i,\n\n isNode = elem.nodeType,\n\n\n\n // See jQuery.data for more information\n\n cache = isNode ? jQuery.cache : elem,\n\n id = isNode ? elem[ jQuery.expando ] : jQuery.expando;\n\n\n\n // If there is already no cache entry for this object, there is no\n\n // purpose in continuing\n\n if ( !cache[ id ] ) {\n\n return;\n\n }\n\n\n\n if ( name ) {\n\n\n\n thisCache = pvt ? cache[ id ] : cache[ id ].data;\n\n\n\n if ( thisCache ) {\n\n\n\n // Support array or space separated string names for data keys\n\n if ( !jQuery.isArray( name ) ) {\n\n\n\n // try the string as a key before any manipulation\n\n if ( name in thisCache ) {\n\n name = [ name ];\n\n } else {\n\n\n\n // split the camel cased version by spaces unless a key with the spaces exists\n\n name = jQuery.camelCase( name );\n\n if ( name in thisCache ) {\n\n name = [ name ];\n\n } else {\n\n name = name.split(\" \");\n\n }\n\n }\n\n } else {\n\n // If \"name\" is an array of keys...\n\n // When data is initially created, via (\"key\", \"val\") signature,\n\n // keys will be converted to camelCase.\n\n // Since there is no way to tell _how_ a key was added, remove\n\n // both plain key and camelCase key. #12786\n\n // This will only penalize the array argument path.\n\n name = name.concat( jQuery.map( name, jQuery.camelCase ) );\n\n }\n\n\n\n i = name.length;\n\n while ( i-- ) {\n\n delete thisCache[ name[i] ];\n\n }\n\n\n\n // If there is no data left in the cache, we want to continue\n\n // and let the cache object itself get destroyed\n\n if ( pvt ? !isEmptyDataObject(thisCache) : !jQuery.isEmptyObject(thisCache) ) {\n\n return;\n\n }\n\n }\n\n }\n\n\n\n // See jQuery.data for more information\n\n if ( !pvt ) {\n\n delete cache[ id ].data;\n\n\n\n // Don't destroy the parent cache unless the internal data object\n\n // had been the only thing left in it\n\n if ( !isEmptyDataObject( cache[ id ] ) ) {\n\n return;\n\n }\n\n }\n\n\n\n // Destroy the cache\n\n if ( isNode ) {\n\n jQuery.cleanData( [ elem ], true );\n\n\n\n // Use delete when supported for expandos or `cache` is not a window per isWindow (#10080)\n\n /* jshint eqeqeq: false */\n\n } else if ( support.deleteExpando || cache != cache.window ) {\n\n /* jshint eqeqeq: true */\n\n delete cache[ id ];\n\n\n\n // When all else fails, null\n\n } else {\n\n cache[ id ] = null;\n\n }\n\n }\n\n\n\n jQuery.extend({\n\n cache: {},\n\n\n\n // The following elements (space-suffixed to avoid Object.prototype collisions)\n\n // throw uncatchable exceptions if you attempt to set expando properties\n\n noData: {\n\n \"applet \": true,\n\n \"embed \": true,\n\n // ...but Flash objects (which have this classid) *can* handle expandos\n\n \"object \": \"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000\"\n\n },\n\n\n\n hasData: function( elem ) {\n\n elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ];\n\n return !!elem && !isEmptyDataObject( elem );\n\n },\n\n\n\n data: function( elem, name, data ) {\n\n return internalData( elem, name, data );\n\n },\n\n\n\n removeData: function( elem, name ) {\n\n return internalRemoveData( elem, name );\n\n },\n\n\n\n // For internal use only.\n\n _data: function( elem, name, data ) {\n\n return internalData( elem, name, data, true );\n\n },\n\n\n\n _removeData: function( elem, name ) {\n\n return internalRemoveData( elem, name, true );\n\n }\n\n });\n\n\n\n jQuery.fn.extend({\n\n data: function( key, value ) {\n\n var i, name, data,\n\n elem = this[0],\n\n attrs = elem && elem.attributes;\n\n\n\n // Special expections of .data basically thwart jQuery.access,\n\n // so implement the relevant behavior ourselves\n\n\n\n // Gets all values\n\n if ( key === undefined ) {\n\n if ( this.length ) {\n\n data = jQuery.data( elem );\n\n\n\n if ( elem.nodeType === 1 && !jQuery._data( elem, \"parsedAttrs\" ) ) {\n\n i = attrs.length;\n\n while ( i-- ) {\n\n\n\n // Support: IE11+\n\n // The attrs elements can be null (#14894)\n\n if ( attrs[ i ] ) {\n\n name = attrs[ i ].name;\n\n if ( name.indexOf( \"data-\" ) === 0 ) {\n\n name = jQuery.camelCase( name.slice(5) );\n\n dataAttr( elem, name, data[ name ] );\n\n }\n\n }\n\n }\n\n jQuery._data( elem, \"parsedAttrs\", true );\n\n }\n\n }\n\n\n\n return data;\n\n }\n\n\n\n // Sets multiple values\n\n if ( typeof key === \"object\" ) {\n\n return this.each(function() {\n\n jQuery.data( this, key );\n\n });\n\n }\n\n\n\n return arguments.length > 1 ?\n\n\n\n // Sets one value\n\n this.each(function() {\n\n jQuery.data( this, key, value );\n\n }) :\n\n\n\n // Gets one value\n\n // Try to fetch any internally stored data first\n\n elem ? dataAttr( elem, key, jQuery.data( elem, key ) ) : undefined;\n\n },\n\n\n\n removeData: function( key ) {\n\n return this.each(function() {\n\n jQuery.removeData( this, key );\n\n });\n\n }\n\n });\n\n\n\n\n\n jQuery.extend({\n\n queue: function( elem, type, data ) {\n\n var queue;\n\n\n\n if ( elem ) {\n\n type = ( type || \"fx\" ) + \"queue\";\n\n queue = jQuery._data( elem, type );\n\n\n\n // Speed up dequeue by getting out quickly if this is just a lookup\n\n if ( data ) {\n\n if ( !queue || jQuery.isArray(data) ) {\n\n queue = jQuery._data( elem, type, jQuery.makeArray(data) );\n\n } else {\n\n queue.push( data );\n\n }\n\n }\n\n return queue || [];\n\n }\n\n },\n\n\n\n dequeue: function( elem, type ) {\n\n type = type || \"fx\";\n\n\n\n var queue = jQuery.queue( elem, type ),\n\n startLength = queue.length,\n\n fn = queue.shift(),\n\n hooks = jQuery._queueHooks( elem, type ),\n\n next = function() {\n\n jQuery.dequeue( elem, type );\n\n };\n\n\n\n // If the fx queue is dequeued, always remove the progress sentinel\n\n if ( fn === \"inprogress\" ) {\n\n fn = queue.shift();\n\n startLength--;\n\n }\n\n\n\n if ( fn ) {\n\n\n\n // Add a progress sentinel to prevent the fx queue from being\n\n // automatically dequeued\n\n if ( type === \"fx\" ) {\n\n queue.unshift( \"inprogress\" );\n\n }\n\n\n\n // clear up the last queue stop function\n\n delete hooks.stop;\n\n fn.call( elem, next, hooks );\n\n }\n\n\n\n if ( !startLength && hooks ) {\n\n hooks.empty.fire();\n\n }\n\n },\n\n\n\n // not intended for public consumption - generates a queueHooks object, or returns the current one\n\n _queueHooks: function( elem, type ) {\n\n var key = type + \"queueHooks\";\n\n return jQuery._data( elem, key ) || jQuery._data( elem, key, {\n\n empty: jQuery.Callbacks(\"once memory\").add(function() {\n\n jQuery._removeData( elem, type + \"queue\" );\n\n jQuery._removeData( elem, key );\n\n })\n\n });\n\n }\n\n });\n\n\n\n jQuery.fn.extend({\n\n queue: function( type, data ) {\n\n var setter = 2;\n\n\n\n if ( typeof type !== \"string\" ) {\n\n data = type;\n\n type = \"fx\";\n\n setter--;\n\n }\n\n\n\n if ( arguments.length < setter ) {\n\n return jQuery.queue( this[0], type );\n\n }\n\n\n\n return data === undefined ?\n\n this :\n\n this.each(function() {\n\n var queue = jQuery.queue( this, type, data );\n\n\n\n // ensure a hooks for this queue\n\n jQuery._queueHooks( this, type );\n\n\n\n if ( type === \"fx\" && queue[0] !== \"inprogress\" ) {\n\n jQuery.dequeue( this, type );\n\n }\n\n });\n\n },\n\n dequeue: function( type ) {\n\n return this.each(function() {\n\n jQuery.dequeue( this, type );\n\n });\n\n },\n\n clearQueue: function( type ) {\n\n return this.queue( type || \"fx\", [] );\n\n },\n\n // Get a promise resolved when queues of a certain type\n\n // are emptied (fx is the type by default)\n\n promise: function( type, obj ) {\n\n var tmp,\n\n count = 1,\n\n defer = jQuery.Deferred(),\n\n elements = this,\n\n i = this.length,\n\n resolve = function() {\n\n if ( !( --count ) ) {\n\n defer.resolveWith( elements, [ elements ] );\n\n }\n\n };\n\n\n\n if ( typeof type !== \"string\" ) {\n\n obj = type;\n\n type = undefined;\n\n }\n\n type = type || \"fx\";\n\n\n\n while ( i-- ) {\n\n tmp = jQuery._data( elements[ i ], type + \"queueHooks\" );\n\n if ( tmp && tmp.empty ) {\n\n count++;\n\n tmp.empty.add( resolve );\n\n }\n\n }\n\n resolve();\n\n return defer.promise( obj );\n\n }\n\n });\n\n var pnum = (/[+-]?(?:\\d*\\.|)\\d+(?:[eE][+-]?\\d+|)/).source;\n\n\n\n var cssExpand = [ \"Top\", \"Right\", \"Bottom\", \"Left\" ];\n\n\n\n var isHidden = function( elem, el ) {\n\n // isHidden might be called from jQuery#filter function;\n\n // in that case, element will be second argument\n\n elem = el || elem;\n\n return jQuery.css( elem, \"display\" ) === \"none\" || !jQuery.contains( elem.ownerDocument, elem );\n\n };\n\n\n\n\n\n\n\n// Multifunctional method to get and set values of a collection\n\n// The value/s can optionally be executed if it's a function\n\n var access = jQuery.access = function( elems, fn, key, value, chainable, emptyGet, raw ) {\n\n var i = 0,\n\n length = elems.length,\n\n bulk = key == null;\n\n\n\n // Sets many values\n\n if ( jQuery.type( key ) === \"object\" ) {\n\n chainable = true;\n\n for ( i in key ) {\n\n jQuery.access( elems, fn, i, key[i], true, emptyGet, raw );\n\n }\n\n\n\n // Sets one value\n\n } else if ( value !== undefined ) {\n\n chainable = true;\n\n\n\n if ( !jQuery.isFunction( value ) ) {\n\n raw = true;\n\n }\n\n\n\n if ( bulk ) {\n\n // Bulk operations run against the entire set\n\n if ( raw ) {\n\n fn.call( elems, value );\n\n fn = null;\n\n\n\n // ...except when executing function values\n\n } else {\n\n bulk = fn;\n\n fn = function( elem, key, value ) {\n\n return bulk.call( jQuery( elem ), value );\n\n };\n\n }\n\n }\n\n\n\n if ( fn ) {\n\n for ( ; i < length; i++ ) {\n\n fn( elems[i], key, raw ? value : value.call( elems[i], i, fn( elems[i], key ) ) );\n\n }\n\n }\n\n }\n\n\n\n return chainable ?\n\n elems :\n\n\n\n // Gets\n\n bulk ?\n\n fn.call( elems ) :\n\n length ? fn( elems[0], key ) : emptyGet;\n\n };\n\n var rcheckableType = (/^(?:checkbox|radio)$/i);\n\n\n\n\n\n\n\n (function() {\n\n // Minified: var a,b,c\n\n var input = document.createElement( \"input\" ),\n\n div = document.createElement( \"div\" ),\n\n fragment = document.createDocumentFragment();\n\n\n\n // Setup\n\n div.innerHTML = \" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>\";\n\n\n\n // IE strips leading whitespace when .innerHTML is used\n\n support.leadingWhitespace = div.firstChild.nodeType === 3;\n\n\n\n // Make sure that tbody elements aren't automatically inserted\n\n // IE will insert them into empty tables\n\n support.tbody = !div.getElementsByTagName( \"tbody\" ).length;\n\n\n\n // Make sure that link elements get serialized correctly by innerHTML\n\n // This requires a wrapper element in IE\n\n support.htmlSerialize = !!div.getElementsByTagName( \"link\" ).length;\n\n\n\n // Makes sure cloning an html5 element does not cause problems\n\n // Where outerHTML is undefined, this still works\n\n support.html5Clone =\n\n document.createElement( \"nav\" ).cloneNode( true ).outerHTML !== \"<:nav></:nav>\";\n\n\n\n // Check if a disconnected checkbox will retain its checked\n\n // value of true after appended to the DOM (IE6/7)\n\n input.type = \"checkbox\";\n\n input.checked = true;\n\n fragment.appendChild( input );\n\n support.appendChecked = input.checked;\n\n\n\n // Make sure textarea (and checkbox) defaultValue is properly cloned\n\n // Support: IE6-IE11+\n\n div.innerHTML = \"<textarea>x</textarea>\";\n\n support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue;\n\n\n\n // #11217 - WebKit loses check when the name is after the checked attribute\n\n fragment.appendChild( div );\n\n div.innerHTML = \"<input type='radio' checked='checked' name='t'/>\";\n\n\n\n // Support: Safari 5.1, iOS 5.1, Android 4.x, Android 2.3\n\n // old WebKit doesn't clone checked state correctly in fragments\n\n support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked;\n\n\n\n // Support: IE<9\n\n // Opera does not clone events (and typeof div.attachEvent === undefined).\n\n // IE9-10 clones events bound via attachEvent, but they don't trigger with .click()\n\n support.noCloneEvent = true;\n\n if ( div.attachEvent ) {\n\n div.attachEvent( \"onclick\", function() {\n\n support.noCloneEvent = false;\n\n });\n\n\n\n div.cloneNode( true ).click();\n\n }\n\n\n\n // Execute the test only if not already executed in another module.\n\n if (support.deleteExpando == null) {\n\n // Support: IE<9\n\n support.deleteExpando = true;\n\n try {\n\n delete div.test;\n\n } catch( e ) {\n\n support.deleteExpando = false;\n\n }\n\n }\n\n })();\n\n\n\n\n\n (function() {\n\n var i, eventName,\n\n div = document.createElement( \"div\" );\n\n\n\n // Support: IE<9 (lack submit/change bubble), Firefox 23+ (lack focusin event)\n\n for ( i in { submit: true, change: true, focusin: true }) {\n\n eventName = \"on\" + i;\n\n\n\n if ( !(support[ i + \"Bubbles\" ] = eventName in window) ) {\n\n // Beware of CSP restrictions (https://developer.mozilla.org/en/Security/CSP)\n\n div.setAttribute( eventName, \"t\" );\n\n support[ i + \"Bubbles\" ] = div.attributes[ eventName ].expando === false;\n\n }\n\n }\n\n\n\n // Null elements to avoid leaks in IE.\n\n div = null;\n\n })();\n\n\n\n\n\n var rformElems = /^(?:input|select|textarea)$/i,\n\n rkeyEvent = /^key/,\n\n rmouseEvent = /^(?:mouse|pointer|contextmenu)|click/,\n\n rfocusMorph = /^(?:focusinfocus|focusoutblur)$/,\n\n rtypenamespace = /^([^.]*)(?:\\.(.+)|)$/;\n\n\n\n function returnTrue() {\n\n return true;\n\n }\n\n\n\n function returnFalse() {\n\n return false;\n\n }\n\n\n\n function safeActiveElement() {\n\n try {\n\n return document.activeElement;\n\n } catch ( err ) { }\n\n }\n\n\n\n /*\n\n * Helper functions for managing events -- not part of the public interface.\n\n * Props to Dean Edwards' addEvent library for many of the ideas.\n\n */\n\n jQuery.event = {\n\n\n\n global: {},\n\n\n\n add: function( elem, types, handler, data, selector ) {\n\n var tmp, events, t, handleObjIn,\n\n special, eventHandle, handleObj,\n\n handlers, type, namespaces, origType,\n\n elemData = jQuery._data( elem );\n\n\n\n // Don't attach events to noData or text/comment nodes (but allow plain objects)\n\n if ( !elemData ) {\n\n return;\n\n }\n\n\n\n // Caller can pass in an object of custom data in lieu of the handler\n\n if ( handler.handler ) {\n\n handleObjIn = handler;\n\n handler = handleObjIn.handler;\n\n selector = handleObjIn.selector;\n\n }\n\n\n\n // Make sure that the handler has a unique ID, used to find/remove it later\n\n if ( !handler.guid ) {\n\n handler.guid = jQuery.guid++;\n\n }\n\n\n\n // Init the element's event structure and main handler, if this is the first\n\n if ( !(events = elemData.events) ) {\n\n events = elemData.events = {};\n\n }\n\n if ( !(eventHandle = elemData.handle) ) {\n\n eventHandle = elemData.handle = function( e ) {\n\n // Discard the second event of a jQuery.event.trigger() and\n\n // when an event is called after a page has unloaded\n\n return typeof jQuery !== strundefined && (!e || jQuery.event.triggered !== e.type) ?\n\n jQuery.event.dispatch.apply( eventHandle.elem, arguments ) :\n\n undefined;\n\n };\n\n // Add elem as a property of the handle fn to prevent a memory leak with IE non-native events\n\n eventHandle.elem = elem;\n\n }\n\n\n\n // Handle multiple events separated by a space\n\n types = ( types || \"\" ).match( rnotwhite ) || [ \"\" ];\n\n t = types.length;\n\n while ( t-- ) {\n\n tmp = rtypenamespace.exec( types[t] ) || [];\n\n type = origType = tmp[1];\n\n namespaces = ( tmp[2] || \"\" ).split( \".\" ).sort();\n\n\n\n // There *must* be a type, no attaching namespace-only handlers\n\n if ( !type ) {\n\n continue;\n\n }\n\n\n\n // If event changes its type, use the special event handlers for the changed type\n\n special = jQuery.event.special[ type ] || {};\n\n\n\n // If selector defined, determine special event api type, otherwise given type\n\n type = ( selector ? special.delegateType : special.bindType ) || type;\n\n\n\n // Update special based on newly reset type\n\n special = jQuery.event.special[ type ] || {};\n\n\n\n // handleObj is passed to all event handlers\n\n handleObj = jQuery.extend({\n\n type: type,\n\n origType: origType,\n\n data: data,\n\n handler: handler,\n\n guid: handler.guid,\n\n selector: selector,\n\n needsContext: selector && jQuery.expr.match.needsContext.test( selector ),\n\n namespace: namespaces.join(\".\")\n\n }, handleObjIn );\n\n\n\n // Init the event handler queue if we're the first\n\n if ( !(handlers = events[ type ]) ) {\n\n handlers = events[ type ] = [];\n\n handlers.delegateCount = 0;\n\n\n\n // Only use addEventListener/attachEvent if the special events handler returns false\n\n if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) {\n\n // Bind the global event handler to the element\n\n if ( elem.addEventListener ) {\n\n elem.addEventListener( type, eventHandle, false );\n\n\n\n } else if ( elem.attachEvent ) {\n\n elem.attachEvent( \"on\" + type, eventHandle );\n\n }\n\n }\n\n }\n\n\n\n if ( special.add ) {\n\n special.add.call( elem, handleObj );\n\n\n\n if ( !handleObj.handler.guid ) {\n\n handleObj.handler.guid = handler.guid;\n\n }\n\n }\n\n\n\n // Add to the element's handler list, delegates in front\n\n if ( selector ) {\n\n handlers.splice( handlers.delegateCount++, 0, handleObj );\n\n } else {\n\n handlers.push( handleObj );\n\n }\n\n\n\n // Keep track of which events have ever been used, for event optimization\n\n jQuery.event.global[ type ] = true;\n\n }\n\n\n\n // Nullify elem to prevent memory leaks in IE\n\n elem = null;\n\n },\n\n\n\n // Detach an event or set of events from an element\n\n remove: function( elem, types, handler, selector, mappedTypes ) {\n\n var j, handleObj, tmp,\n\n origCount, t, events,\n\n special, handlers, type,\n\n namespaces, origType,\n\n elemData = jQuery.hasData( elem ) && jQuery._data( elem );\n\n\n\n if ( !elemData || !(events = elemData.events) ) {\n\n return;\n\n }\n\n\n\n // Once for each type.namespace in types; type may be omitted\n\n types = ( types || \"\" ).match( rnotwhite ) || [ \"\" ];\n\n t = types.length;\n\n while ( t-- ) {\n\n tmp = rtypenamespace.exec( types[t] ) || [];\n\n type = origType = tmp[1];\n\n namespaces = ( tmp[2] || \"\" ).split( \".\" ).sort();\n\n\n\n // Unbind all events (on this namespace, if provided) for the element\n\n if ( !type ) {\n\n for ( type in events ) {\n\n jQuery.event.remove( elem, type + types[ t ], handler, selector, true );\n\n }\n\n continue;\n\n }\n\n\n\n special = jQuery.event.special[ type ] || {};\n\n type = ( selector ? special.delegateType : special.bindType ) || type;\n\n handlers = events[ type ] || [];\n\n tmp = tmp[2] && new RegExp( \"(^|\\\\.)\" + namespaces.join(\"\\\\.(?:.*\\\\.|)\") + \"(\\\\.|$)\" );\n\n\n\n // Remove matching events\n\n origCount = j = handlers.length;\n\n while ( j-- ) {\n\n handleObj = handlers[ j ];\n\n\n\n if ( ( mappedTypes || origType === handleObj.origType ) &&\n\n ( !handler || handler.guid === handleObj.guid ) &&\n\n ( !tmp || tmp.test( handleObj.namespace ) ) &&\n\n ( !selector || selector === handleObj.selector || selector === \"**\" && handleObj.selector ) ) {\n\n handlers.splice( j, 1 );\n\n\n\n if ( handleObj.selector ) {\n\n handlers.delegateCount--;\n\n }\n\n if ( special.remove ) {\n\n special.remove.call( elem, handleObj );\n\n }\n\n }\n\n }\n\n\n\n // Remove generic event handler if we removed something and no more handlers exist\n\n // (avoids potential for endless recursion during removal of special event handlers)\n\n if ( origCount && !handlers.length ) {\n\n if ( !special.teardown || special.teardown.call( elem, namespaces, elemData.handle ) === false ) {\n\n jQuery.removeEvent( elem, type, elemData.handle );\n\n }\n\n\n\n delete events[ type ];\n\n }\n\n }\n\n\n\n // Remove the expando if it's no longer used\n\n if ( jQuery.isEmptyObject( events ) ) {\n\n delete elemData.handle;\n\n\n\n // removeData also checks for emptiness and clears the expando if empty\n\n // so use it instead of delete\n\n jQuery._removeData( elem, \"events\" );\n\n }\n\n },\n\n\n\n trigger: function( event, data, elem, onlyHandlers ) {\n\n var handle, ontype, cur,\n\n bubbleType, special, tmp, i,\n\n eventPath = [ elem || document ],\n\n type = hasOwn.call( event, \"type\" ) ? event.type : event,\n\n namespaces = hasOwn.call( event, \"namespace\" ) ? event.namespace.split(\".\") : [];\n\n\n\n cur = tmp = elem = elem || document;\n\n\n\n // Don't do events on text and comment nodes\n\n if ( elem.nodeType === 3 || elem.nodeType === 8 ) {\n\n return;\n\n }\n\n\n\n // focus/blur morphs to focusin/out; ensure we're not firing them right now\n\n if ( rfocusMorph.test( type + jQuery.event.triggered ) ) {\n\n return;\n\n }\n\n\n\n if ( type.indexOf(\".\") >= 0 ) {\n\n // Namespaced trigger; create a regexp to match event type in handle()\n\n namespaces = type.split(\".\");\n\n type = namespaces.shift();\n\n namespaces.sort();\n\n }\n\n ontype = type.indexOf(\":\") < 0 && \"on\" + type;\n\n\n\n // Caller can pass in a jQuery.Event object, Object, or just an event type string\n\n event = event[ jQuery.expando ] ?\n\n event :\n\n new jQuery.Event( type, typeof event === \"object\" && event );\n\n\n\n // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true)\n\n event.isTrigger = onlyHandlers ? 2 : 3;\n\n event.namespace = namespaces.join(\".\");\n\n event.namespace_re = event.namespace ?\n\n new RegExp( \"(^|\\\\.)\" + namespaces.join(\"\\\\.(?:.*\\\\.|)\") + \"(\\\\.|$)\" ) :\n\n null;\n\n\n\n // Clean up the event in case it is being reused\n\n event.result = undefined;\n\n if ( !event.target ) {\n\n event.target = elem;\n\n }\n\n\n\n // Clone any incoming data and prepend the event, creating the handler arg list\n\n data = data == null ?\n\n [ event ] :\n\n jQuery.makeArray( data, [ event ] );\n\n\n\n // Allow special events to draw outside the lines\n\n special = jQuery.event.special[ type ] || {};\n\n if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) {\n\n return;\n\n }\n\n\n\n // Determine event propagation path in advance, per W3C events spec (#9951)\n\n // Bubble up to document, then to window; watch for a global ownerDocument var (#9724)\n\n if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) {\n\n\n\n bubbleType = special.delegateType || type;\n\n if ( !rfocusMorph.test( bubbleType + type ) ) {\n\n cur = cur.parentNode;\n\n }\n\n for ( ; cur; cur = cur.parentNode ) {\n\n eventPath.push( cur );\n\n tmp = cur;\n\n }\n\n\n\n // Only add window if we got to document (e.g., not plain obj or detached DOM)\n\n if ( tmp === (elem.ownerDocument || document) ) {\n\n eventPath.push( tmp.defaultView || tmp.parentWindow || window );\n\n }\n\n }\n\n\n\n // Fire handlers on the event path\n\n i = 0;\n\n while ( (cur = eventPath[i++]) && !event.isPropagationStopped() ) {\n\n\n\n event.type = i > 1 ?\n\n bubbleType :\n\n special.bindType || type;\n\n\n\n // jQuery handler\n\n handle = ( jQuery._data( cur, \"events\" ) || {} )[ event.type ] && jQuery._data( cur, \"handle\" );\n\n if ( handle ) {\n\n handle.apply( cur, data );\n\n }\n\n\n\n // Native handler\n\n handle = ontype && cur[ ontype ];\n\n if ( handle && handle.apply && jQuery.acceptData( cur ) ) {\n\n event.result = handle.apply( cur, data );\n\n if ( event.result === false ) {\n\n event.preventDefault();\n\n }\n\n }\n\n }\n\n event.type = type;\n\n\n\n // If nobody prevented the default action, do it now\n\n if ( !onlyHandlers && !event.isDefaultPrevented() ) {\n\n\n\n if ( (!special._default || special._default.apply( eventPath.pop(), data ) === false) &&\n\n jQuery.acceptData( elem ) ) {\n\n\n\n // Call a native DOM method on the target with the same name name as the event.\n\n // Can't use an .isFunction() check here because IE6/7 fails that test.\n\n // Don't do default actions on window, that's where global variables be (#6170)\n\n if ( ontype && elem[ type ] && !jQuery.isWindow( elem ) ) {\n\n\n\n // Don't re-trigger an onFOO event when we call its FOO() method\n\n tmp = elem[ ontype ];\n\n\n\n if ( tmp ) {\n\n elem[ ontype ] = null;\n\n }\n\n\n\n // Prevent re-triggering of the same event, since we already bubbled it above\n\n jQuery.event.triggered = type;\n\n try {\n\n elem[ type ]();\n\n } catch ( e ) {\n\n // IE<9 dies on focus/blur to hidden element (#1486,#12518)\n\n // only reproducible on winXP IE8 native, not IE9 in IE8 mode\n\n }\n\n jQuery.event.triggered = undefined;\n\n\n\n if ( tmp ) {\n\n elem[ ontype ] = tmp;\n\n }\n\n }\n\n }\n\n }\n\n\n\n return event.result;\n\n },\n\n\n\n dispatch: function( event ) {\n\n\n\n // Make a writable jQuery.Event from the native event object\n\n event = jQuery.event.fix( event );\n\n\n\n var i, ret, handleObj, matched, j,\n\n handlerQueue = [],\n\n args = slice.call( arguments ),\n\n handlers = ( jQuery._data( this, \"events\" ) || {} )[ event.type ] || [],\n\n special = jQuery.event.special[ event.type ] || {};\n\n\n\n // Use the fix-ed jQuery.Event rather than the (read-only) native event\n\n args[0] = event;\n\n event.delegateTarget = this;\n\n\n\n // Call the preDispatch hook for the mapped type, and let it bail if desired\n\n if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) {\n\n return;\n\n }\n\n\n\n // Determine handlers\n\n handlerQueue = jQuery.event.handlers.call( this, event, handlers );\n\n\n\n // Run delegates first; they may want to stop propagation beneath us\n\n i = 0;\n\n while ( (matched = handlerQueue[ i++ ]) && !event.isPropagationStopped() ) {\n\n event.currentTarget = matched.elem;\n\n\n\n j = 0;\n\n while ( (handleObj = matched.handlers[ j++ ]) && !event.isImmediatePropagationStopped() ) {\n\n\n\n // Triggered event must either 1) have no namespace, or\n\n // 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace).\n\n if ( !event.namespace_re || event.namespace_re.test( handleObj.namespace ) ) {\n\n\n\n event.handleObj = handleObj;\n\n event.data = handleObj.data;\n\n\n\n ret = ( (jQuery.event.special[ handleObj.origType ] || {}).handle || handleObj.handler )\n\n .apply( matched.elem, args );\n\n\n\n if ( ret !== undefined ) {\n\n if ( (event.result = ret) === false ) {\n\n event.preventDefault();\n\n event.stopPropagation();\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Call the postDispatch hook for the mapped type\n\n if ( special.postDispatch ) {\n\n special.postDispatch.call( this, event );\n\n }\n\n\n\n return event.result;\n\n },\n\n\n\n handlers: function( event, handlers ) {\n\n var sel, handleObj, matches, i,\n\n handlerQueue = [],\n\n delegateCount = handlers.delegateCount,\n\n cur = event.target;\n\n\n\n // Find delegate handlers\n\n // Black-hole SVG <use> instance trees (#13180)\n\n // Avoid non-left-click bubbling in Firefox (#3861)\n\n if ( delegateCount && cur.nodeType && (!event.button || event.type !== \"click\") ) {\n\n\n\n /* jshint eqeqeq: false */\n\n for ( ; cur != this; cur = cur.parentNode || this ) {\n\n /* jshint eqeqeq: true */\n\n\n\n // Don't check non-elements (#13208)\n\n // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764)\n\n if ( cur.nodeType === 1 && (cur.disabled !== true || event.type !== \"click\") ) {\n\n matches = [];\n\n for ( i = 0; i < delegateCount; i++ ) {\n\n handleObj = handlers[ i ];\n\n\n\n // Don't conflict with Object.prototype properties (#13203)\n\n sel = handleObj.selector + \" \";\n\n\n\n if ( matches[ sel ] === undefined ) {\n\n matches[ sel ] = handleObj.needsContext ?\n\n jQuery( sel, this ).index( cur ) >= 0 :\n\n jQuery.find( sel, this, null, [ cur ] ).length;\n\n }\n\n if ( matches[ sel ] ) {\n\n matches.push( handleObj );\n\n }\n\n }\n\n if ( matches.length ) {\n\n handlerQueue.push({ elem: cur, handlers: matches });\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Add the remaining (directly-bound) handlers\n\n if ( delegateCount < handlers.length ) {\n\n handlerQueue.push({ elem: this, handlers: handlers.slice( delegateCount ) });\n\n }\n\n\n\n return handlerQueue;\n\n },\n\n\n\n fix: function( event ) {\n\n if ( event[ jQuery.expando ] ) {\n\n return event;\n\n }\n\n\n\n // Create a writable copy of the event object and normalize some properties\n\n var i, prop, copy,\n\n type = event.type,\n\n originalEvent = event,\n\n fixHook = this.fixHooks[ type ];\n\n\n\n if ( !fixHook ) {\n\n this.fixHooks[ type ] = fixHook =\n\n rmouseEvent.test( type ) ? this.mouseHooks :\n\n rkeyEvent.test( type ) ? this.keyHooks :\n\n {};\n\n }\n\n copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props;\n\n\n\n event = new jQuery.Event( originalEvent );\n\n\n\n i = copy.length;\n\n while ( i-- ) {\n\n prop = copy[ i ];\n\n event[ prop ] = originalEvent[ prop ];\n\n }\n\n\n\n // Support: IE<9\n\n // Fix target property (#1925)\n\n if ( !event.target ) {\n\n event.target = originalEvent.srcElement || document;\n\n }\n\n\n\n // Support: Chrome 23+, Safari?\n\n // Target should not be a text node (#504, #13143)\n\n if ( event.target.nodeType === 3 ) {\n\n event.target = event.target.parentNode;\n\n }\n\n\n\n // Support: IE<9\n\n // For mouse/key events, metaKey==false if it's undefined (#3368, #11328)\n\n event.metaKey = !!event.metaKey;\n\n\n\n return fixHook.filter ? fixHook.filter( event, originalEvent ) : event;\n\n },\n\n\n\n // Includes some event props shared by KeyEvent and MouseEvent\n\n props: \"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which\".split(\" \"),\n\n\n\n fixHooks: {},\n\n\n\n keyHooks: {\n\n props: \"char charCode key keyCode\".split(\" \"),\n\n filter: function( event, original ) {\n\n\n\n // Add which for key events\n\n if ( event.which == null ) {\n\n event.which = original.charCode != null ? original.charCode : original.keyCode;\n\n }\n\n\n\n return event;\n\n }\n\n },\n\n\n\n mouseHooks: {\n\n props: \"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement\".split(\" \"),\n\n filter: function( event, original ) {\n\n var body, eventDoc, doc,\n\n button = original.button,\n\n fromElement = original.fromElement;\n\n\n\n // Calculate pageX/Y if missing and clientX/Y available\n\n if ( event.pageX == null && original.clientX != null ) {\n\n eventDoc = event.target.ownerDocument || document;\n\n doc = eventDoc.documentElement;\n\n body = eventDoc.body;\n\n\n\n event.pageX = original.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && body.clientLeft || 0 );\n\n event.pageY = original.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && body.clientTop || 0 );\n\n }\n\n\n\n // Add relatedTarget, if necessary\n\n if ( !event.relatedTarget && fromElement ) {\n\n event.relatedTarget = fromElement === event.target ? original.toElement : fromElement;\n\n }\n\n\n\n // Add which for click: 1 === left; 2 === middle; 3 === right\n\n // Note: button is not normalized, so don't use it\n\n if ( !event.which && button !== undefined ) {\n\n event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) );\n\n }\n\n\n\n return event;\n\n }\n\n },\n\n\n\n special: {\n\n load: {\n\n // Prevent triggered image.load events from bubbling to window.load\n\n noBubble: true\n\n },\n\n focus: {\n\n // Fire native event if possible so blur/focus sequence is correct\n\n trigger: function() {\n\n if ( this !== safeActiveElement() && this.focus ) {\n\n try {\n\n this.focus();\n\n return false;\n\n } catch ( e ) {\n\n // Support: IE<9\n\n // If we error on focus to hidden element (#1486, #12518),\n\n // let .trigger() run the handlers\n\n }\n\n }\n\n },\n\n delegateType: \"focusin\"\n\n },\n\n blur: {\n\n trigger: function() {\n\n if ( this === safeActiveElement() && this.blur ) {\n\n this.blur();\n\n return false;\n\n }\n\n },\n\n delegateType: \"focusout\"\n\n },\n\n click: {\n\n // For checkbox, fire native event so checked state will be right\n\n trigger: function() {\n\n if ( jQuery.nodeName( this, \"input\" ) && this.type === \"checkbox\" && this.click ) {\n\n this.click();\n\n return false;\n\n }\n\n },\n\n\n\n // For cross-browser consistency, don't fire native .click() on links\n\n _default: function( event ) {\n\n return jQuery.nodeName( event.target, \"a\" );\n\n }\n\n },\n\n\n\n beforeunload: {\n\n postDispatch: function( event ) {\n\n\n\n // Support: Firefox 20+\n\n // Firefox doesn't alert if the returnValue field is not set.\n\n if ( event.result !== undefined && event.originalEvent ) {\n\n event.originalEvent.returnValue = event.result;\n\n }\n\n }\n\n }\n\n },\n\n\n\n simulate: function( type, elem, event, bubble ) {\n\n // Piggyback on a donor event to simulate a different one.\n\n // Fake originalEvent to avoid donor's stopPropagation, but if the\n\n // simulated event prevents default then we do the same on the donor.\n\n var e = jQuery.extend(\n\n new jQuery.Event(),\n\n event,\n\n {\n\n type: type,\n\n isSimulated: true,\n\n originalEvent: {}\n\n }\n\n );\n\n if ( bubble ) {\n\n jQuery.event.trigger( e, null, elem );\n\n } else {\n\n jQuery.event.dispatch.call( elem, e );\n\n }\n\n if ( e.isDefaultPrevented() ) {\n\n event.preventDefault();\n\n }\n\n }\n\n };\n\n\n\n jQuery.removeEvent = document.removeEventListener ?\n\n function( elem, type, handle ) {\n\n if ( elem.removeEventListener ) {\n\n elem.removeEventListener( type, handle, false );\n\n }\n\n } :\n\n function( elem, type, handle ) {\n\n var name = \"on\" + type;\n\n\n\n if ( elem.detachEvent ) {\n\n\n\n // #8545, #7054, preventing memory leaks for custom events in IE6-8\n\n // detachEvent needed property on element, by name of that event, to properly expose it to GC\n\n if ( typeof elem[ name ] === strundefined ) {\n\n elem[ name ] = null;\n\n }\n\n\n\n elem.detachEvent( name, handle );\n\n }\n\n };\n\n\n\n jQuery.Event = function( src, props ) {\n\n // Allow instantiation without the 'new' keyword\n\n if ( !(this instanceof jQuery.Event) ) {\n\n return new jQuery.Event( src, props );\n\n }\n\n\n\n // Event object\n\n if ( src && src.type ) {\n\n this.originalEvent = src;\n\n this.type = src.type;\n\n\n\n // Events bubbling up the document may have been marked as prevented\n\n // by a handler lower down the tree; reflect the correct value.\n\n this.isDefaultPrevented = src.defaultPrevented ||\n\n src.defaultPrevented === undefined &&\n\n // Support: IE < 9, Android < 4.0\n\n src.returnValue === false ?\n\n returnTrue :\n\n returnFalse;\n\n\n\n // Event type\n\n } else {\n\n this.type = src;\n\n }\n\n\n\n // Put explicitly provided properties onto the event object\n\n if ( props ) {\n\n jQuery.extend( this, props );\n\n }\n\n\n\n // Create a timestamp if incoming event doesn't have one\n\n this.timeStamp = src && src.timeStamp || jQuery.now();\n\n\n\n // Mark it as fixed\n\n this[ jQuery.expando ] = true;\n\n };\n\n\n\n// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding\n\n// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html\n\n jQuery.Event.prototype = {\n\n isDefaultPrevented: returnFalse,\n\n isPropagationStopped: returnFalse,\n\n isImmediatePropagationStopped: returnFalse,\n\n\n\n preventDefault: function() {\n\n var e = this.originalEvent;\n\n\n\n this.isDefaultPrevented = returnTrue;\n\n if ( !e ) {\n\n return;\n\n }\n\n\n\n // If preventDefault exists, run it on the original event\n\n if ( e.preventDefault ) {\n\n e.preventDefault();\n\n\n\n // Support: IE\n\n // Otherwise set the returnValue property of the original event to false\n\n } else {\n\n e.returnValue = false;\n\n }\n\n },\n\n stopPropagation: function() {\n\n var e = this.originalEvent;\n\n\n\n this.isPropagationStopped = returnTrue;\n\n if ( !e ) {\n\n return;\n\n }\n\n // If stopPropagation exists, run it on the original event\n\n if ( e.stopPropagation ) {\n\n e.stopPropagation();\n\n }\n\n\n\n // Support: IE\n\n // Set the cancelBubble property of the original event to true\n\n e.cancelBubble = true;\n\n },\n\n stopImmediatePropagation: function() {\n\n var e = this.originalEvent;\n\n\n\n this.isImmediatePropagationStopped = returnTrue;\n\n\n\n if ( e && e.stopImmediatePropagation ) {\n\n e.stopImmediatePropagation();\n\n }\n\n\n\n this.stopPropagation();\n\n }\n\n };\n\n\n\n// Create mouseenter/leave events using mouseover/out and event-time checks\n\n jQuery.each({\n\n mouseenter: \"mouseover\",\n\n mouseleave: \"mouseout\",\n\n pointerenter: \"pointerover\",\n\n pointerleave: \"pointerout\"\n\n }, function( orig, fix ) {\n\n jQuery.event.special[ orig ] = {\n\n delegateType: fix,\n\n bindType: fix,\n\n\n\n handle: function( event ) {\n\n var ret,\n\n target = this,\n\n related = event.relatedTarget,\n\n handleObj = event.handleObj;\n\n\n\n // For mousenter/leave call the handler if related is outside the target.\n\n // NB: No relatedTarget if the mouse left/entered the browser window\n\n if ( !related || (related !== target && !jQuery.contains( target, related )) ) {\n\n event.type = handleObj.origType;\n\n ret = handleObj.handler.apply( this, arguments );\n\n event.type = fix;\n\n }\n\n return ret;\n\n }\n\n };\n\n });\n\n\n\n// IE submit delegation\n\n if ( !support.submitBubbles ) {\n\n\n\n jQuery.event.special.submit = {\n\n setup: function() {\n\n // Only need this for delegated form submit events\n\n if ( jQuery.nodeName( this, \"form\" ) ) {\n\n return false;\n\n }\n\n\n\n // Lazy-add a submit handler when a descendant form may potentially be submitted\n\n jQuery.event.add( this, \"click._submit keypress._submit\", function( e ) {\n\n // Node name check avoids a VML-related crash in IE (#9807)\n\n var elem = e.target,\n\n form = jQuery.nodeName( elem, \"input\" ) || jQuery.nodeName( elem, \"button\" ) ? elem.form : undefined;\n\n if ( form && !jQuery._data( form, \"submitBubbles\" ) ) {\n\n jQuery.event.add( form, \"submit._submit\", function( event ) {\n\n event._submit_bubble = true;\n\n });\n\n jQuery._data( form, \"submitBubbles\", true );\n\n }\n\n });\n\n // return undefined since we don't need an event listener\n\n },\n\n\n\n postDispatch: function( event ) {\n\n // If form was submitted by the user, bubble the event up the tree\n\n if ( event._submit_bubble ) {\n\n delete event._submit_bubble;\n\n if ( this.parentNode && !event.isTrigger ) {\n\n jQuery.event.simulate( \"submit\", this.parentNode, event, true );\n\n }\n\n }\n\n },\n\n\n\n teardown: function() {\n\n // Only need this for delegated form submit events\n\n if ( jQuery.nodeName( this, \"form\" ) ) {\n\n return false;\n\n }\n\n\n\n // Remove delegated handlers; cleanData eventually reaps submit handlers attached above\n\n jQuery.event.remove( this, \"._submit\" );\n\n }\n\n };\n\n }\n\n\n\n// IE change delegation and checkbox/radio fix\n\n if ( !support.changeBubbles ) {\n\n\n\n jQuery.event.special.change = {\n\n\n\n setup: function() {\n\n\n\n if ( rformElems.test( this.nodeName ) ) {\n\n // IE doesn't fire change on a check/radio until blur; trigger it on click\n\n // after a propertychange. Eat the blur-change in special.change.handle.\n\n // This still fires onchange a second time for check/radio after blur.\n\n if ( this.type === \"checkbox\" || this.type === \"radio\" ) {\n\n jQuery.event.add( this, \"propertychange._change\", function( event ) {\n\n if ( event.originalEvent.propertyName === \"checked\" ) {\n\n this._just_changed = true;\n\n }\n\n });\n\n jQuery.event.add( this, \"click._change\", function( event ) {\n\n if ( this._just_changed && !event.isTrigger ) {\n\n this._just_changed = false;\n\n }\n\n // Allow triggered, simulated change events (#11500)\n\n jQuery.event.simulate( \"change\", this, event, true );\n\n });\n\n }\n\n return false;\n\n }\n\n // Delegated event; lazy-add a change handler on descendant inputs\n\n jQuery.event.add( this, \"beforeactivate._change\", function( e ) {\n\n var elem = e.target;\n\n\n\n if ( rformElems.test( elem.nodeName ) && !jQuery._data( elem, \"changeBubbles\" ) ) {\n\n jQuery.event.add( elem, \"change._change\", function( event ) {\n\n if ( this.parentNode && !event.isSimulated && !event.isTrigger ) {\n\n jQuery.event.simulate( \"change\", this.parentNode, event, true );\n\n }\n\n });\n\n jQuery._data( elem, \"changeBubbles\", true );\n\n }\n\n });\n\n },\n\n\n\n handle: function( event ) {\n\n var elem = event.target;\n\n\n\n // Swallow native change events from checkbox/radio, we already triggered them above\n\n if ( this !== elem || event.isSimulated || event.isTrigger || (elem.type !== \"radio\" && elem.type !== \"checkbox\") ) {\n\n return event.handleObj.handler.apply( this, arguments );\n\n }\n\n },\n\n\n\n teardown: function() {\n\n jQuery.event.remove( this, \"._change\" );\n\n\n\n return !rformElems.test( this.nodeName );\n\n }\n\n };\n\n }\n\n\n\n// Create \"bubbling\" focus and blur events\n\n if ( !support.focusinBubbles ) {\n\n jQuery.each({ focus: \"focusin\", blur: \"focusout\" }, function( orig, fix ) {\n\n\n\n // Attach a single capturing handler on the document while someone wants focusin/focusout\n\n var handler = function( event ) {\n\n jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ), true );\n\n };\n\n\n\n jQuery.event.special[ fix ] = {\n\n setup: function() {\n\n var doc = this.ownerDocument || this,\n\n attaches = jQuery._data( doc, fix );\n\n\n\n if ( !attaches ) {\n\n doc.addEventListener( orig, handler, true );\n\n }\n\n jQuery._data( doc, fix, ( attaches || 0 ) + 1 );\n\n },\n\n teardown: function() {\n\n var doc = this.ownerDocument || this,\n\n attaches = jQuery._data( doc, fix ) - 1;\n\n\n\n if ( !attaches ) {\n\n doc.removeEventListener( orig, handler, true );\n\n jQuery._removeData( doc, fix );\n\n } else {\n\n jQuery._data( doc, fix, attaches );\n\n }\n\n }\n\n };\n\n });\n\n }\n\n\n\n jQuery.fn.extend({\n\n\n\n on: function( types, selector, data, fn, /*INTERNAL*/ one ) {\n\n var type, origFn;\n\n\n\n // Types can be a map of types/handlers\n\n if ( typeof types === \"object\" ) {\n\n // ( types-Object, selector, data )\n\n if ( typeof selector !== \"string\" ) {\n\n // ( types-Object, data )\n\n data = data || selector;\n\n selector = undefined;\n\n }\n\n for ( type in types ) {\n\n this.on( type, selector, data, types[ type ], one );\n\n }\n\n return this;\n\n }\n\n\n\n if ( data == null && fn == null ) {\n\n // ( types, fn )\n\n fn = selector;\n\n data = selector = undefined;\n\n } else if ( fn == null ) {\n\n if ( typeof selector === \"string\" ) {\n\n // ( types, selector, fn )\n\n fn = data;\n\n data = undefined;\n\n } else {\n\n // ( types, data, fn )\n\n fn = data;\n\n data = selector;\n\n selector = undefined;\n\n }\n\n }\n\n if ( fn === false ) {\n\n fn = returnFalse;\n\n } else if ( !fn ) {\n\n return this;\n\n }\n\n\n\n if ( one === 1 ) {\n\n origFn = fn;\n\n fn = function( event ) {\n\n // Can use an empty set, since event contains the info\n\n jQuery().off( event );\n\n return origFn.apply( this, arguments );\n\n };\n\n // Use same guid so caller can remove using origFn\n\n fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ );\n\n }\n\n return this.each( function() {\n\n jQuery.event.add( this, types, fn, data, selector );\n\n });\n\n },\n\n one: function( types, selector, data, fn ) {\n\n return this.on( types, selector, data, fn, 1 );\n\n },\n\n off: function( types, selector, fn ) {\n\n var handleObj, type;\n\n if ( types && types.preventDefault && types.handleObj ) {\n\n // ( event ) dispatched jQuery.Event\n\n handleObj = types.handleObj;\n\n jQuery( types.delegateTarget ).off(\n\n handleObj.namespace ? handleObj.origType + \".\" + handleObj.namespace : handleObj.origType,\n\n handleObj.selector,\n\n handleObj.handler\n\n );\n\n return this;\n\n }\n\n if ( typeof types === \"object\" ) {\n\n // ( types-object [, selector] )\n\n for ( type in types ) {\n\n this.off( type, selector, types[ type ] );\n\n }\n\n return this;\n\n }\n\n if ( selector === false || typeof selector === \"function\" ) {\n\n // ( types [, fn] )\n\n fn = selector;\n\n selector = undefined;\n\n }\n\n if ( fn === false ) {\n\n fn = returnFalse;\n\n }\n\n return this.each(function() {\n\n jQuery.event.remove( this, types, fn, selector );\n\n });\n\n },\n\n\n\n trigger: function( type, data ) {\n\n return this.each(function() {\n\n jQuery.event.trigger( type, data, this );\n\n });\n\n },\n\n triggerHandler: function( type, data ) {\n\n var elem = this[0];\n\n if ( elem ) {\n\n return jQuery.event.trigger( type, data, elem, true );\n\n }\n\n }\n\n });\n\n\n\n\n\n function createSafeFragment( document ) {\n\n var list = nodeNames.split( \"|\" ),\n\n safeFrag = document.createDocumentFragment();\n\n\n\n if ( safeFrag.createElement ) {\n\n while ( list.length ) {\n\n safeFrag.createElement(\n\n list.pop()\n\n );\n\n }\n\n }\n\n return safeFrag;\n\n }\n\n\n\n var nodeNames = \"abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|\" +\n\n \"header|hgroup|mark|meter|nav|output|progress|section|summary|time|video\",\n\n rinlinejQuery = / jQuery\\d+=\"(?:null|\\d+)\"/g,\n\n rnoshimcache = new RegExp(\"<(?:\" + nodeNames + \")[\\\\s/>]\", \"i\"),\n\n rleadingWhitespace = /^\\s+/,\n\n rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\\w:]+)[^>]*)\\/>/gi,\n\n rtagName = /<([\\w:]+)/,\n\n rtbody = /<tbody/i,\n\n rhtml = /<|&#?\\w+;/,\n\n rnoInnerhtml = /<(?:script|style|link)/i,\n\n // checked=\"checked\" or checked\n\n rchecked = /checked\\s*(?:[^=]|=\\s*.checked.)/i,\n\n rscriptType = /^$|\\/(?:java|ecma)script/i,\n\n rscriptTypeMasked = /^true\\/(.*)/,\n\n rcleanScript = /^\\s*<!(?:\\[CDATA\\[|--)|(?:\\]\\]|--)>\\s*$/g,\n\n\n\n // We have to close these tags to support XHTML (#13200)\n\n wrapMap = {\n\n option: [ 1, \"<select multiple='multiple'>\", \"</select>\" ],\n\n legend: [ 1, \"<fieldset>\", \"</fieldset>\" ],\n\n area: [ 1, \"<map>\", \"</map>\" ],\n\n param: [ 1, \"<object>\", \"</object>\" ],\n\n thead: [ 1, \"<table>\", \"</table>\" ],\n\n tr: [ 2, \"<table><tbody>\", \"</tbody></table>\" ],\n\n col: [ 2, \"<table><tbody></tbody><colgroup>\", \"</colgroup></table>\" ],\n\n td: [ 3, \"<table><tbody><tr>\", \"</tr></tbody></table>\" ],\n\n\n\n // IE6-8 can't serialize link, script, style, or any html5 (NoScope) tags,\n\n // unless wrapped in a div with non-breaking characters in front of it.\n\n _default: support.htmlSerialize ? [ 0, \"\", \"\" ] : [ 1, \"X<div>\", \"</div>\" ]\n\n },\n\n safeFragment = createSafeFragment( document ),\n\n fragmentDiv = safeFragment.appendChild( document.createElement(\"div\") );\n\n\n\n wrapMap.optgroup = wrapMap.option;\n\n wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead;\n\n wrapMap.th = wrapMap.td;\n\n\n\n function getAll( context, tag ) {\n\n var elems, elem,\n\n i = 0,\n\n found = typeof context.getElementsByTagName !== strundefined ? context.getElementsByTagName( tag || \"*\" ) :\n\n typeof context.querySelectorAll !== strundefined ? context.querySelectorAll( tag || \"*\" ) :\n\n undefined;\n\n\n\n if ( !found ) {\n\n for ( found = [], elems = context.childNodes || context; (elem = elems[i]) != null; i++ ) {\n\n if ( !tag || jQuery.nodeName( elem, tag ) ) {\n\n found.push( elem );\n\n } else {\n\n jQuery.merge( found, getAll( elem, tag ) );\n\n }\n\n }\n\n }\n\n\n\n return tag === undefined || tag && jQuery.nodeName( context, tag ) ?\n\n jQuery.merge( [ context ], found ) :\n\n found;\n\n }\n\n\n\n// Used in buildFragment, fixes the defaultChecked property\n\n function fixDefaultChecked( elem ) {\n\n if ( rcheckableType.test( elem.type ) ) {\n\n elem.defaultChecked = elem.checked;\n\n }\n\n }\n\n\n\n// Support: IE<8\n\n// Manipulating tables requires a tbody\n\n function manipulationTarget( elem, content ) {\n\n return jQuery.nodeName( elem, \"table\" ) &&\n\n jQuery.nodeName( content.nodeType !== 11 ? content : content.firstChild, \"tr\" ) ?\n\n\n\n elem.getElementsByTagName(\"tbody\")[0] ||\n\n elem.appendChild( elem.ownerDocument.createElement(\"tbody\") ) :\n\n elem;\n\n }\n\n\n\n// Replace/restore the type attribute of script elements for safe DOM manipulation\n\n function disableScript( elem ) {\n\n elem.type = (jQuery.find.attr( elem, \"type\" ) !== null) + \"/\" + elem.type;\n\n return elem;\n\n }\n\n function restoreScript( elem ) {\n\n var match = rscriptTypeMasked.exec( elem.type );\n\n if ( match ) {\n\n elem.type = match[1];\n\n } else {\n\n elem.removeAttribute(\"type\");\n\n }\n\n return elem;\n\n }\n\n\n\n// Mark scripts as having already been evaluated\n\n function setGlobalEval( elems, refElements ) {\n\n var elem,\n\n i = 0;\n\n for ( ; (elem = elems[i]) != null; i++ ) {\n\n jQuery._data( elem, \"globalEval\", !refElements || jQuery._data( refElements[i], \"globalEval\" ) );\n\n }\n\n }\n\n\n\n function cloneCopyEvent( src, dest ) {\n\n\n\n if ( dest.nodeType !== 1 || !jQuery.hasData( src ) ) {\n\n return;\n\n }\n\n\n\n var type, i, l,\n\n oldData = jQuery._data( src ),\n\n curData = jQuery._data( dest, oldData ),\n\n events = oldData.events;\n\n\n\n if ( events ) {\n\n delete curData.handle;\n\n curData.events = {};\n\n\n\n for ( type in events ) {\n\n for ( i = 0, l = events[ type ].length; i < l; i++ ) {\n\n jQuery.event.add( dest, type, events[ type ][ i ] );\n\n }\n\n }\n\n }\n\n\n\n // make the cloned public data object a copy from the original\n\n if ( curData.data ) {\n\n curData.data = jQuery.extend( {}, curData.data );\n\n }\n\n }\n\n\n\n function fixCloneNodeIssues( src, dest ) {\n\n var nodeName, e, data;\n\n\n\n // We do not need to do anything for non-Elements\n\n if ( dest.nodeType !== 1 ) {\n\n return;\n\n }\n\n\n\n nodeName = dest.nodeName.toLowerCase();\n\n\n\n // IE6-8 copies events bound via attachEvent when using cloneNode.\n\n if ( !support.noCloneEvent && dest[ jQuery.expando ] ) {\n\n data = jQuery._data( dest );\n\n\n\n for ( e in data.events ) {\n\n jQuery.removeEvent( dest, e, data.handle );\n\n }\n\n\n\n // Event data gets referenced instead of copied if the expando gets copied too\n\n dest.removeAttribute( jQuery.expando );\n\n }\n\n\n\n // IE blanks contents when cloning scripts, and tries to evaluate newly-set text\n\n if ( nodeName === \"script\" && dest.text !== src.text ) {\n\n disableScript( dest ).text = src.text;\n\n restoreScript( dest );\n\n\n\n // IE6-10 improperly clones children of object elements using classid.\n\n // IE10 throws NoModificationAllowedError if parent is null, #12132.\n\n } else if ( nodeName === \"object\" ) {\n\n if ( dest.parentNode ) {\n\n dest.outerHTML = src.outerHTML;\n\n }\n\n\n\n // This path appears unavoidable for IE9. When cloning an object\n\n // element in IE9, the outerHTML strategy above is not sufficient.\n\n // If the src has innerHTML and the destination does not,\n\n // copy the src.innerHTML into the dest.innerHTML. #10324\n\n if ( support.html5Clone && ( src.innerHTML && !jQuery.trim(dest.innerHTML) ) ) {\n\n dest.innerHTML = src.innerHTML;\n\n }\n\n\n\n } else if ( nodeName === \"input\" && rcheckableType.test( src.type ) ) {\n\n // IE6-8 fails to persist the checked state of a cloned checkbox\n\n // or radio button. Worse, IE6-7 fail to give the cloned element\n\n // a checked appearance if the defaultChecked value isn't also set\n\n\n\n dest.defaultChecked = dest.checked = src.checked;\n\n\n\n // IE6-7 get confused and end up setting the value of a cloned\n\n // checkbox/radio button to an empty string instead of \"on\"\n\n if ( dest.value !== src.value ) {\n\n dest.value = src.value;\n\n }\n\n\n\n // IE6-8 fails to return the selected option to the default selected\n\n // state when cloning options\n\n } else if ( nodeName === \"option\" ) {\n\n dest.defaultSelected = dest.selected = src.defaultSelected;\n\n\n\n // IE6-8 fails to set the defaultValue to the correct value when\n\n // cloning other types of input fields\n\n } else if ( nodeName === \"input\" || nodeName === \"textarea\" ) {\n\n dest.defaultValue = src.defaultValue;\n\n }\n\n }\n\n\n\n jQuery.extend({\n\n clone: function( elem, dataAndEvents, deepDataAndEvents ) {\n\n var destElements, node, clone, i, srcElements,\n\n inPage = jQuery.contains( elem.ownerDocument, elem );\n\n\n\n if ( support.html5Clone || jQuery.isXMLDoc(elem) || !rnoshimcache.test( \"<\" + elem.nodeName + \">\" ) ) {\n\n clone = elem.cloneNode( true );\n\n\n\n // IE<=8 does not properly clone detached, unknown element nodes\n\n } else {\n\n fragmentDiv.innerHTML = elem.outerHTML;\n\n fragmentDiv.removeChild( clone = fragmentDiv.firstChild );\n\n }\n\n\n\n if ( (!support.noCloneEvent || !support.noCloneChecked) &&\n\n (elem.nodeType === 1 || elem.nodeType === 11) && !jQuery.isXMLDoc(elem) ) {\n\n\n\n // We eschew Sizzle here for performance reasons: http://jsperf.com/getall-vs-sizzle/2\n\n destElements = getAll( clone );\n\n srcElements = getAll( elem );\n\n\n\n // Fix all IE cloning issues\n\n for ( i = 0; (node = srcElements[i]) != null; ++i ) {\n\n // Ensure that the destination node is not null; Fixes #9587\n\n if ( destElements[i] ) {\n\n fixCloneNodeIssues( node, destElements[i] );\n\n }\n\n }\n\n }\n\n\n\n // Copy the events from the original to the clone\n\n if ( dataAndEvents ) {\n\n if ( deepDataAndEvents ) {\n\n srcElements = srcElements || getAll( elem );\n\n destElements = destElements || getAll( clone );\n\n\n\n for ( i = 0; (node = srcElements[i]) != null; i++ ) {\n\n cloneCopyEvent( node, destElements[i] );\n\n }\n\n } else {\n\n cloneCopyEvent( elem, clone );\n\n }\n\n }\n\n\n\n // Preserve script evaluation history\n\n destElements = getAll( clone, \"script\" );\n\n if ( destElements.length > 0 ) {\n\n setGlobalEval( destElements, !inPage && getAll( elem, \"script\" ) );\n\n }\n\n\n\n destElements = srcElements = node = null;\n\n\n\n // Return the cloned set\n\n return clone;\n\n },\n\n\n\n buildFragment: function( elems, context, scripts, selection ) {\n\n var j, elem, contains,\n\n tmp, tag, tbody, wrap,\n\n l = elems.length,\n\n\n\n // Ensure a safe fragment\n\n safe = createSafeFragment( context ),\n\n\n\n nodes = [],\n\n i = 0;\n\n\n\n for ( ; i < l; i++ ) {\n\n elem = elems[ i ];\n\n\n\n if ( elem || elem === 0 ) {\n\n\n\n // Add nodes directly\n\n if ( jQuery.type( elem ) === \"object\" ) {\n\n jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem );\n\n\n\n // Convert non-html into a text node\n\n } else if ( !rhtml.test( elem ) ) {\n\n nodes.push( context.createTextNode( elem ) );\n\n\n\n // Convert html into DOM nodes\n\n } else {\n\n tmp = tmp || safe.appendChild( context.createElement(\"div\") );\n\n\n\n // Deserialize a standard representation\n\n tag = (rtagName.exec( elem ) || [ \"\", \"\" ])[ 1 ].toLowerCase();\n\n wrap = wrapMap[ tag ] || wrapMap._default;\n\n\n\n tmp.innerHTML = wrap[1] + elem.replace( rxhtmlTag, \"<$1></$2>\" ) + wrap[2];\n\n\n\n // Descend through wrappers to the right content\n\n j = wrap[0];\n\n while ( j-- ) {\n\n tmp = tmp.lastChild;\n\n }\n\n\n\n // Manually add leading whitespace removed by IE\n\n if ( !support.leadingWhitespace && rleadingWhitespace.test( elem ) ) {\n\n nodes.push( context.createTextNode( rleadingWhitespace.exec( elem )[0] ) );\n\n }\n\n\n\n // Remove IE's autoinserted <tbody> from table fragments\n\n if ( !support.tbody ) {\n\n\n\n // String was a <table>, *may* have spurious <tbody>\n\n elem = tag === \"table\" && !rtbody.test( elem ) ?\n\n tmp.firstChild :\n\n\n\n // String was a bare <thead> or <tfoot>\n\n wrap[1] === \"<table>\" && !rtbody.test( elem ) ?\n\n tmp :\n\n 0;\n\n\n\n j = elem && elem.childNodes.length;\n\n while ( j-- ) {\n\n if ( jQuery.nodeName( (tbody = elem.childNodes[j]), \"tbody\" ) && !tbody.childNodes.length ) {\n\n elem.removeChild( tbody );\n\n }\n\n }\n\n }\n\n\n\n jQuery.merge( nodes, tmp.childNodes );\n\n\n\n // Fix #12392 for WebKit and IE > 9\n\n tmp.textContent = \"\";\n\n\n\n // Fix #12392 for oldIE\n\n while ( tmp.firstChild ) {\n\n tmp.removeChild( tmp.firstChild );\n\n }\n\n\n\n // Remember the top-level container for proper cleanup\n\n tmp = safe.lastChild;\n\n }\n\n }\n\n }\n\n\n\n // Fix #11356: Clear elements from fragment\n\n if ( tmp ) {\n\n safe.removeChild( tmp );\n\n }\n\n\n\n // Reset defaultChecked for any radios and checkboxes\n\n // about to be appended to the DOM in IE 6/7 (#8060)\n\n if ( !support.appendChecked ) {\n\n jQuery.grep( getAll( nodes, \"input\" ), fixDefaultChecked );\n\n }\n\n\n\n i = 0;\n\n while ( (elem = nodes[ i++ ]) ) {\n\n\n\n // #4087 - If origin and destination elements are the same, and this is\n\n // that element, do not do anything\n\n if ( selection && jQuery.inArray( elem, selection ) !== -1 ) {\n\n continue;\n\n }\n\n\n\n contains = jQuery.contains( elem.ownerDocument, elem );\n\n\n\n // Append to fragment\n\n tmp = getAll( safe.appendChild( elem ), \"script\" );\n\n\n\n // Preserve script evaluation history\n\n if ( contains ) {\n\n setGlobalEval( tmp );\n\n }\n\n\n\n // Capture executables\n\n if ( scripts ) {\n\n j = 0;\n\n while ( (elem = tmp[ j++ ]) ) {\n\n if ( rscriptType.test( elem.type || \"\" ) ) {\n\n scripts.push( elem );\n\n }\n\n }\n\n }\n\n }\n\n\n\n tmp = null;\n\n\n\n return safe;\n\n },\n\n\n\n cleanData: function( elems, /* internal */ acceptData ) {\n\n var elem, type, id, data,\n\n i = 0,\n\n internalKey = jQuery.expando,\n\n cache = jQuery.cache,\n\n deleteExpando = support.deleteExpando,\n\n special = jQuery.event.special;\n\n\n\n for ( ; (elem = elems[i]) != null; i++ ) {\n\n if ( acceptData || jQuery.acceptData( elem ) ) {\n\n\n\n id = elem[ internalKey ];\n\n data = id && cache[ id ];\n\n\n\n if ( data ) {\n\n if ( data.events ) {\n\n for ( type in data.events ) {\n\n if ( special[ type ] ) {\n\n jQuery.event.remove( elem, type );\n\n\n\n // This is a shortcut to avoid jQuery.event.remove's overhead\n\n } else {\n\n jQuery.removeEvent( elem, type, data.handle );\n\n }\n\n }\n\n }\n\n\n\n // Remove cache only if it was not already removed by jQuery.event.remove\n\n if ( cache[ id ] ) {\n\n\n\n delete cache[ id ];\n\n\n\n // IE does not allow us to delete expando properties from nodes,\n\n // nor does it have a removeAttribute function on Document nodes;\n\n // we must handle all of these cases\n\n if ( deleteExpando ) {\n\n delete elem[ internalKey ];\n\n\n\n } else if ( typeof elem.removeAttribute !== strundefined ) {\n\n elem.removeAttribute( internalKey );\n\n\n\n } else {\n\n elem[ internalKey ] = null;\n\n }\n\n\n\n deletedIds.push( id );\n\n }\n\n }\n\n }\n\n }\n\n }\n\n });\n\n\n\n jQuery.fn.extend({\n\n text: function( value ) {\n\n return access( this, function( value ) {\n\n return value === undefined ?\n\n jQuery.text( this ) :\n\n this.empty().append( ( this[0] && this[0].ownerDocument || document ).createTextNode( value ) );\n\n }, null, value, arguments.length );\n\n },\n\n\n\n append: function() {\n\n return this.domManip( arguments, function( elem ) {\n\n if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {\n\n var target = manipulationTarget( this, elem );\n\n target.appendChild( elem );\n\n }\n\n });\n\n },\n\n\n\n prepend: function() {\n\n return this.domManip( arguments, function( elem ) {\n\n if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {\n\n var target = manipulationTarget( this, elem );\n\n target.insertBefore( elem, target.firstChild );\n\n }\n\n });\n\n },\n\n\n\n before: function() {\n\n return this.domManip( arguments, function( elem ) {\n\n if ( this.parentNode ) {\n\n this.parentNode.insertBefore( elem, this );\n\n }\n\n });\n\n },\n\n\n\n after: function() {\n\n return this.domManip( arguments, function( elem ) {\n\n if ( this.parentNode ) {\n\n this.parentNode.insertBefore( elem, this.nextSibling );\n\n }\n\n });\n\n },\n\n\n\n remove: function( selector, keepData /* Internal Use Only */ ) {\n\n var elem,\n\n elems = selector ? jQuery.filter( selector, this ) : this,\n\n i = 0;\n\n\n\n for ( ; (elem = elems[i]) != null; i++ ) {\n\n\n\n if ( !keepData && elem.nodeType === 1 ) {\n\n jQuery.cleanData( getAll( elem ) );\n\n }\n\n\n\n if ( elem.parentNode ) {\n\n if ( keepData && jQuery.contains( elem.ownerDocument, elem ) ) {\n\n setGlobalEval( getAll( elem, \"script\" ) );\n\n }\n\n elem.parentNode.removeChild( elem );\n\n }\n\n }\n\n\n\n return this;\n\n },\n\n\n\n empty: function() {\n\n var elem,\n\n i = 0;\n\n\n\n for ( ; (elem = this[i]) != null; i++ ) {\n\n // Remove element nodes and prevent memory leaks\n\n if ( elem.nodeType === 1 ) {\n\n jQuery.cleanData( getAll( elem, false ) );\n\n }\n\n\n\n // Remove any remaining nodes\n\n while ( elem.firstChild ) {\n\n elem.removeChild( elem.firstChild );\n\n }\n\n\n\n // If this is a select, ensure that it displays empty (#12336)\n\n // Support: IE<9\n\n if ( elem.options && jQuery.nodeName( elem, \"select\" ) ) {\n\n elem.options.length = 0;\n\n }\n\n }\n\n\n\n return this;\n\n },\n\n\n\n clone: function( dataAndEvents, deepDataAndEvents ) {\n\n dataAndEvents = dataAndEvents == null ? false : dataAndEvents;\n\n deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents;\n\n\n\n return this.map(function() {\n\n return jQuery.clone( this, dataAndEvents, deepDataAndEvents );\n\n });\n\n },\n\n\n\n html: function( value ) {\n\n return access( this, function( value ) {\n\n var elem = this[ 0 ] || {},\n\n i = 0,\n\n l = this.length;\n\n\n\n if ( value === undefined ) {\n\n return elem.nodeType === 1 ?\n\n elem.innerHTML.replace( rinlinejQuery, \"\" ) :\n\n undefined;\n\n }\n\n\n\n // See if we can take a shortcut and just use innerHTML\n\n if ( typeof value === \"string\" && !rnoInnerhtml.test( value ) &&\n\n ( support.htmlSerialize || !rnoshimcache.test( value ) ) &&\n\n ( support.leadingWhitespace || !rleadingWhitespace.test( value ) ) &&\n\n !wrapMap[ (rtagName.exec( value ) || [ \"\", \"\" ])[ 1 ].toLowerCase() ] ) {\n\n\n\n value = value.replace( rxhtmlTag, \"<$1></$2>\" );\n\n\n\n try {\n\n for (; i < l; i++ ) {\n\n // Remove element nodes and prevent memory leaks\n\n elem = this[i] || {};\n\n if ( elem.nodeType === 1 ) {\n\n jQuery.cleanData( getAll( elem, false ) );\n\n elem.innerHTML = value;\n\n }\n\n }\n\n\n\n elem = 0;\n\n\n\n // If using innerHTML throws an exception, use the fallback method\n\n } catch(e) {}\n\n }\n\n\n\n if ( elem ) {\n\n this.empty().append( value );\n\n }\n\n }, null, value, arguments.length );\n\n },\n\n\n\n replaceWith: function() {\n\n var arg = arguments[ 0 ];\n\n\n\n // Make the changes, replacing each context element with the new content\n\n this.domManip( arguments, function( elem ) {\n\n arg = this.parentNode;\n\n\n\n jQuery.cleanData( getAll( this ) );\n\n\n\n if ( arg ) {\n\n arg.replaceChild( elem, this );\n\n }\n\n });\n\n\n\n // Force removal if there was no new content (e.g., from empty arguments)\n\n return arg && (arg.length || arg.nodeType) ? this : this.remove();\n\n },\n\n\n\n detach: function( selector ) {\n\n return this.remove( selector, true );\n\n },\n\n\n\n domManip: function( args, callback ) {\n\n\n\n // Flatten any nested arrays\n\n args = concat.apply( [], args );\n\n\n\n var first, node, hasScripts,\n\n scripts, doc, fragment,\n\n i = 0,\n\n l = this.length,\n\n set = this,\n\n iNoClone = l - 1,\n\n value = args[0],\n\n isFunction = jQuery.isFunction( value );\n\n\n\n // We can't cloneNode fragments that contain checked, in WebKit\n\n if ( isFunction ||\n\n ( l > 1 && typeof value === \"string\" &&\n\n !support.checkClone && rchecked.test( value ) ) ) {\n\n return this.each(function( index ) {\n\n var self = set.eq( index );\n\n if ( isFunction ) {\n\n args[0] = value.call( this, index, self.html() );\n\n }\n\n self.domManip( args, callback );\n\n });\n\n }\n\n\n\n if ( l ) {\n\n fragment = jQuery.buildFragment( args, this[ 0 ].ownerDocument, false, this );\n\n first = fragment.firstChild;\n\n\n\n if ( fragment.childNodes.length === 1 ) {\n\n fragment = first;\n\n }\n\n\n\n if ( first ) {\n\n scripts = jQuery.map( getAll( fragment, \"script\" ), disableScript );\n\n hasScripts = scripts.length;\n\n\n\n // Use the original fragment for the last item instead of the first because it can end up\n\n // being emptied incorrectly in certain situations (#8070).\n\n for ( ; i < l; i++ ) {\n\n node = fragment;\n\n\n\n if ( i !== iNoClone ) {\n\n node = jQuery.clone( node, true, true );\n\n\n\n // Keep references to cloned scripts for later restoration\n\n if ( hasScripts ) {\n\n jQuery.merge( scripts, getAll( node, \"script\" ) );\n\n }\n\n }\n\n\n\n callback.call( this[i], node, i );\n\n }\n\n\n\n if ( hasScripts ) {\n\n doc = scripts[ scripts.length - 1 ].ownerDocument;\n\n\n\n // Reenable scripts\n\n jQuery.map( scripts, restoreScript );\n\n\n\n // Evaluate executable scripts on first document insertion\n\n for ( i = 0; i < hasScripts; i++ ) {\n\n node = scripts[ i ];\n\n if ( rscriptType.test( node.type || \"\" ) &&\n\n !jQuery._data( node, \"globalEval\" ) && jQuery.contains( doc, node ) ) {\n\n\n\n if ( node.src ) {\n\n // Optional AJAX dependency, but won't run scripts if not present\n\n if ( jQuery._evalUrl ) {\n\n jQuery._evalUrl( node.src );\n\n }\n\n } else {\n\n jQuery.globalEval( ( node.text || node.textContent || node.innerHTML || \"\" ).replace( rcleanScript, \"\" ) );\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Fix #11809: Avoid leaking memory\n\n fragment = first = null;\n\n }\n\n }\n\n\n\n return this;\n\n }\n\n });\n\n\n\n jQuery.each({\n\n appendTo: \"append\",\n\n prependTo: \"prepend\",\n\n insertBefore: \"before\",\n\n insertAfter: \"after\",\n\n replaceAll: \"replaceWith\"\n\n }, function( name, original ) {\n\n jQuery.fn[ name ] = function( selector ) {\n\n var elems,\n\n i = 0,\n\n ret = [],\n\n insert = jQuery( selector ),\n\n last = insert.length - 1;\n\n\n\n for ( ; i <= last; i++ ) {\n\n elems = i === last ? this : this.clone(true);\n\n jQuery( insert[i] )[ original ]( elems );\n\n\n\n // Modern browsers can apply jQuery collections as arrays, but oldIE needs a .get()\n\n push.apply( ret, elems.get() );\n\n }\n\n\n\n return this.pushStack( ret );\n\n };\n\n });\n\n\n\n\n\n var iframe,\n\n elemdisplay = {};\n\n\n\n /**\n\n * Retrieve the actual display of a element\n\n * @param {String} name nodeName of the element\n\n * @param {Object} doc Document object\n\n */\n\n// Called only from within defaultDisplay\n\n function actualDisplay( name, doc ) {\n\n var style,\n\n elem = jQuery( doc.createElement( name ) ).appendTo( doc.body ),\n\n\n\n // getDefaultComputedStyle might be reliably used only on attached element\n\n display = window.getDefaultComputedStyle && ( style = window.getDefaultComputedStyle( elem[ 0 ] ) ) ?\n\n\n\n // Use of this method is a temporary fix (more like optmization) until something better comes along,\n\n // since it was removed from specification and supported only in FF\n\n style.display : jQuery.css( elem[ 0 ], \"display\" );\n\n\n\n // We don't have any data stored on the element,\n\n // so use \"detach\" method as fast way to get rid of the element\n\n elem.detach();\n\n\n\n return display;\n\n }\n\n\n\n /**\n\n * Try to determine the default display value of an element\n\n * @param {String} nodeName\n\n */\n\n function defaultDisplay( nodeName ) {\n\n var doc = document,\n\n display = elemdisplay[ nodeName ];\n\n\n\n if ( !display ) {\n\n display = actualDisplay( nodeName, doc );\n\n\n\n // If the simple way fails, read from inside an iframe\n\n if ( display === \"none\" || !display ) {\n\n\n\n // Use the already-created iframe if possible\n\n iframe = (iframe || jQuery( \"<iframe frameborder='0' width='0' height='0'/>\" )).appendTo( doc.documentElement );\n\n\n\n // Always write a new HTML skeleton so Webkit and Firefox don't choke on reuse\n\n doc = ( iframe[ 0 ].contentWindow || iframe[ 0 ].contentDocument ).document;\n\n\n\n // Support: IE\n\n doc.write();\n\n doc.close();\n\n\n\n display = actualDisplay( nodeName, doc );\n\n iframe.detach();\n\n }\n\n\n\n // Store the correct default display\n\n elemdisplay[ nodeName ] = display;\n\n }\n\n\n\n return display;\n\n }\n\n\n\n\n\n (function() {\n\n var shrinkWrapBlocksVal;\n\n\n\n support.shrinkWrapBlocks = function() {\n\n if ( shrinkWrapBlocksVal != null ) {\n\n return shrinkWrapBlocksVal;\n\n }\n\n\n\n // Will be changed later if needed.\n\n shrinkWrapBlocksVal = false;\n\n\n\n // Minified: var b,c,d\n\n var div, body, container;\n\n\n\n body = document.getElementsByTagName( \"body\" )[ 0 ];\n\n if ( !body || !body.style ) {\n\n // Test fired too early or in an unsupported environment, exit.\n\n return;\n\n }\n\n\n\n // Setup\n\n div = document.createElement( \"div\" );\n\n container = document.createElement( \"div\" );\n\n container.style.cssText = \"position:absolute;border:0;width:0;height:0;top:0;left:-9999px\";\n\n body.appendChild( container ).appendChild( div );\n\n\n\n // Support: IE6\n\n // Check if elements with layout shrink-wrap their children\n\n if ( typeof div.style.zoom !== strundefined ) {\n\n // Reset CSS: box-sizing; display; margin; border\n\n div.style.cssText =\n\n // Support: Firefox<29, Android 2.3\n\n // Vendor-prefix box-sizing\n\n \"-webkit-box-sizing:content-box;-moz-box-sizing:content-box;\" +\n\n \"box-sizing:content-box;display:block;margin:0;border:0;\" +\n\n \"padding:1px;width:1px;zoom:1\";\n\n div.appendChild( document.createElement( \"div\" ) ).style.width = \"5px\";\n\n shrinkWrapBlocksVal = div.offsetWidth !== 3;\n\n }\n\n\n\n body.removeChild( container );\n\n\n\n return shrinkWrapBlocksVal;\n\n };\n\n\n\n })();\n\n var rmargin = (/^margin/);\n\n\n\n var rnumnonpx = new RegExp( \"^(\" + pnum + \")(?!px)[a-z%]+$\", \"i\" );\n\n\n\n\n\n\n\n var getStyles, curCSS,\n\n rposition = /^(top|right|bottom|left)$/;\n\n\n\n if ( window.getComputedStyle ) {\n\n getStyles = function( elem ) {\n\n // Support: IE<=11+, Firefox<=30+ (#15098, #14150)\n\n // IE throws on elements created in popups\n\n // FF meanwhile throws on frame elements through \"defaultView.getComputedStyle\"\n\n if ( elem.ownerDocument.defaultView.opener ) {\n\n return elem.ownerDocument.defaultView.getComputedStyle( elem, null );\n\n }\n\n\n\n return window.getComputedStyle( elem, null );\n\n };\n\n\n\n curCSS = function( elem, name, computed ) {\n\n var width, minWidth, maxWidth, ret,\n\n style = elem.style;\n\n\n\n computed = computed || getStyles( elem );\n\n\n\n // getPropertyValue is only needed for .css('filter') in IE9, see #12537\n\n ret = computed ? computed.getPropertyValue( name ) || computed[ name ] : undefined;\n\n\n\n if ( computed ) {\n\n\n\n if ( ret === \"\" && !jQuery.contains( elem.ownerDocument, elem ) ) {\n\n ret = jQuery.style( elem, name );\n\n }\n\n\n\n // A tribute to the \"awesome hack by Dean Edwards\"\n\n // Chrome < 17 and Safari 5.0 uses \"computed value\" instead of \"used value\" for margin-right\n\n // Safari 5.1.7 (at least) returns percentage for a larger set of values, but width seems to be reliably pixels\n\n // this is against the CSSOM draft spec: http://dev.w3.org/csswg/cssom/#resolved-values\n\n if ( rnumnonpx.test( ret ) && rmargin.test( name ) ) {\n\n\n\n // Remember the original values\n\n width = style.width;\n\n minWidth = style.minWidth;\n\n maxWidth = style.maxWidth;\n\n\n\n // Put in the new values to get a computed value out\n\n style.minWidth = style.maxWidth = style.width = ret;\n\n ret = computed.width;\n\n\n\n // Revert the changed values\n\n style.width = width;\n\n style.minWidth = minWidth;\n\n style.maxWidth = maxWidth;\n\n }\n\n }\n\n\n\n // Support: IE\n\n // IE returns zIndex value as an integer.\n\n return ret === undefined ?\n\n ret :\n\n ret + \"\";\n\n };\n\n } else if ( document.documentElement.currentStyle ) {\n\n getStyles = function( elem ) {\n\n return elem.currentStyle;\n\n };\n\n\n\n curCSS = function( elem, name, computed ) {\n\n var left, rs, rsLeft, ret,\n\n style = elem.style;\n\n\n\n computed = computed || getStyles( elem );\n\n ret = computed ? computed[ name ] : undefined;\n\n\n\n // Avoid setting ret to empty string here\n\n // so we don't default to auto\n\n if ( ret == null && style && style[ name ] ) {\n\n ret = style[ name ];\n\n }\n\n\n\n // From the awesome hack by Dean Edwards\n\n // http://erik.eae.net/archives/2007/07/27/18.54.15/#comment-102291\n\n\n\n // If we're not dealing with a regular pixel number\n\n // but a number that has a weird ending, we need to convert it to pixels\n\n // but not position css attributes, as those are proportional to the parent element instead\n\n // and we can't measure the parent instead because it might trigger a \"stacking dolls\" problem\n\n if ( rnumnonpx.test( ret ) && !rposition.test( name ) ) {\n\n\n\n // Remember the original values\n\n left = style.left;\n\n rs = elem.runtimeStyle;\n\n rsLeft = rs && rs.left;\n\n\n\n // Put in the new values to get a computed value out\n\n if ( rsLeft ) {\n\n rs.left = elem.currentStyle.left;\n\n }\n\n style.left = name === \"fontSize\" ? \"1em\" : ret;\n\n ret = style.pixelLeft + \"px\";\n\n\n\n // Revert the changed values\n\n style.left = left;\n\n if ( rsLeft ) {\n\n rs.left = rsLeft;\n\n }\n\n }\n\n\n\n // Support: IE\n\n // IE returns zIndex value as an integer.\n\n return ret === undefined ?\n\n ret :\n\n ret + \"\" || \"auto\";\n\n };\n\n }\n\n\n\n\n\n\n\n\n\n function addGetHookIf( conditionFn, hookFn ) {\n\n // Define the hook, we'll check on the first run if it's really needed.\n\n return {\n\n get: function() {\n\n var condition = conditionFn();\n\n\n\n if ( condition == null ) {\n\n // The test was not ready at this point; screw the hook this time\n\n // but check again when needed next time.\n\n return;\n\n }\n\n\n\n if ( condition ) {\n\n // Hook not needed (or it's not possible to use it due to missing dependency),\n\n // remove it.\n\n // Since there are no other hooks for marginRight, remove the whole object.\n\n delete this.get;\n\n return;\n\n }\n\n\n\n // Hook needed; redefine it so that the support test is not executed again.\n\n\n\n return (this.get = hookFn).apply( this, arguments );\n\n }\n\n };\n\n }\n\n\n\n\n\n (function() {\n\n // Minified: var b,c,d,e,f,g, h,i\n\n var div, style, a, pixelPositionVal, boxSizingReliableVal,\n\n reliableHiddenOffsetsVal, reliableMarginRightVal;\n\n\n\n // Setup\n\n div = document.createElement( \"div\" );\n\n div.innerHTML = \" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>\";\n\n a = div.getElementsByTagName( \"a\" )[ 0 ];\n\n style = a && a.style;\n\n\n\n // Finish early in limited (non-browser) environments\n\n if ( !style ) {\n\n return;\n\n }\n\n\n\n style.cssText = \"float:left;opacity:.5\";\n\n\n\n // Support: IE<9\n\n // Make sure that element opacity exists (as opposed to filter)\n\n support.opacity = style.opacity === \"0.5\";\n\n\n\n // Verify style float existence\n\n // (IE uses styleFloat instead of cssFloat)\n\n support.cssFloat = !!style.cssFloat;\n\n\n\n div.style.backgroundClip = \"content-box\";\n\n div.cloneNode( true ).style.backgroundClip = \"\";\n\n support.clearCloneStyle = div.style.backgroundClip === \"content-box\";\n\n\n\n // Support: Firefox<29, Android 2.3\n\n // Vendor-prefix box-sizing\n\n support.boxSizing = style.boxSizing === \"\" || style.MozBoxSizing === \"\" ||\n\n style.WebkitBoxSizing === \"\";\n\n\n\n jQuery.extend(support, {\n\n reliableHiddenOffsets: function() {\n\n if ( reliableHiddenOffsetsVal == null ) {\n\n computeStyleTests();\n\n }\n\n return reliableHiddenOffsetsVal;\n\n },\n\n\n\n boxSizingReliable: function() {\n\n if ( boxSizingReliableVal == null ) {\n\n computeStyleTests();\n\n }\n\n return boxSizingReliableVal;\n\n },\n\n\n\n pixelPosition: function() {\n\n if ( pixelPositionVal == null ) {\n\n computeStyleTests();\n\n }\n\n return pixelPositionVal;\n\n },\n\n\n\n // Support: Android 2.3\n\n reliableMarginRight: function() {\n\n if ( reliableMarginRightVal == null ) {\n\n computeStyleTests();\n\n }\n\n return reliableMarginRightVal;\n\n }\n\n });\n\n\n\n function computeStyleTests() {\n\n // Minified: var b,c,d,j\n\n var div, body, container, contents;\n\n\n\n body = document.getElementsByTagName( \"body\" )[ 0 ];\n\n if ( !body || !body.style ) {\n\n // Test fired too early or in an unsupported environment, exit.\n\n return;\n\n }\n\n\n\n // Setup\n\n div = document.createElement( \"div\" );\n\n container = document.createElement( \"div\" );\n\n container.style.cssText = \"position:absolute;border:0;width:0;height:0;top:0;left:-9999px\";\n\n body.appendChild( container ).appendChild( div );\n\n\n\n div.style.cssText =\n\n // Support: Firefox<29, Android 2.3\n\n // Vendor-prefix box-sizing\n\n \"-webkit-box-sizing:border-box;-moz-box-sizing:border-box;\" +\n\n \"box-sizing:border-box;display:block;margin-top:1%;top:1%;\" +\n\n \"border:1px;padding:1px;width:4px;position:absolute\";\n\n\n\n // Support: IE<9\n\n // Assume reasonable values in the absence of getComputedStyle\n\n pixelPositionVal = boxSizingReliableVal = false;\n\n reliableMarginRightVal = true;\n\n\n\n // Check for getComputedStyle so that this code is not run in IE<9.\n\n if ( window.getComputedStyle ) {\n\n pixelPositionVal = ( window.getComputedStyle( div, null ) || {} ).top !== \"1%\";\n\n boxSizingReliableVal =\n\n ( window.getComputedStyle( div, null ) || { width: \"4px\" } ).width === \"4px\";\n\n\n\n // Support: Android 2.3\n\n // Div with explicit width and no margin-right incorrectly\n\n // gets computed margin-right based on width of container (#3333)\n\n // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right\n\n contents = div.appendChild( document.createElement( \"div\" ) );\n\n\n\n // Reset CSS: box-sizing; display; margin; border; padding\n\n contents.style.cssText = div.style.cssText =\n\n // Support: Firefox<29, Android 2.3\n\n // Vendor-prefix box-sizing\n\n \"-webkit-box-sizing:content-box;-moz-box-sizing:content-box;\" +\n\n \"box-sizing:content-box;display:block;margin:0;border:0;padding:0\";\n\n contents.style.marginRight = contents.style.width = \"0\";\n\n div.style.width = \"1px\";\n\n\n\n reliableMarginRightVal =\n\n !parseFloat( ( window.getComputedStyle( contents, null ) || {} ).marginRight );\n\n\n\n div.removeChild( contents );\n\n }\n\n\n\n // Support: IE8\n\n // Check if table cells still have offsetWidth/Height when they are set\n\n // to display:none and there are still other visible table cells in a\n\n // table row; if so, offsetWidth/Height are not reliable for use when\n\n // determining if an element has been hidden directly using\n\n // display:none (it is still safe to use offsets if a parent element is\n\n // hidden; don safety goggles and see bug #4512 for more information).\n\n div.innerHTML = \"<table><tr><td></td><td>t</td></tr></table>\";\n\n contents = div.getElementsByTagName( \"td\" );\n\n contents[ 0 ].style.cssText = \"margin:0;border:0;padding:0;display:none\";\n\n reliableHiddenOffsetsVal = contents[ 0 ].offsetHeight === 0;\n\n if ( reliableHiddenOffsetsVal ) {\n\n contents[ 0 ].style.display = \"\";\n\n contents[ 1 ].style.display = \"none\";\n\n reliableHiddenOffsetsVal = contents[ 0 ].offsetHeight === 0;\n\n }\n\n\n\n body.removeChild( container );\n\n }\n\n\n\n })();\n\n\n\n\n\n// A method for quickly swapping in/out CSS properties to get correct calculations.\n\n jQuery.swap = function( elem, options, callback, args ) {\n\n var ret, name,\n\n old = {};\n\n\n\n // Remember the old values, and insert the new ones\n\n for ( name in options ) {\n\n old[ name ] = elem.style[ name ];\n\n elem.style[ name ] = options[ name ];\n\n }\n\n\n\n ret = callback.apply( elem, args || [] );\n\n\n\n // Revert the old values\n\n for ( name in options ) {\n\n elem.style[ name ] = old[ name ];\n\n }\n\n\n\n return ret;\n\n };\n\n\n\n\n\n var\n\n ralpha = /alpha\\([^)]*\\)/i,\n\n ropacity = /opacity\\s*=\\s*([^)]*)/,\n\n\n\n // swappable if display is none or starts with table except \"table\", \"table-cell\", or \"table-caption\"\n\n // see here for display values: https://developer.mozilla.org/en-US/docs/CSS/display\n\n rdisplayswap = /^(none|table(?!-c[ea]).+)/,\n\n rnumsplit = new RegExp( \"^(\" + pnum + \")(.*)$\", \"i\" ),\n\n rrelNum = new RegExp( \"^([+-])=(\" + pnum + \")\", \"i\" ),\n\n\n\n cssShow = { position: \"absolute\", visibility: \"hidden\", display: \"block\" },\n\n cssNormalTransform = {\n\n letterSpacing: \"0\",\n\n fontWeight: \"400\"\n\n },\n\n\n\n cssPrefixes = [ \"Webkit\", \"O\", \"Moz\", \"ms\" ];\n\n\n\n\n\n// return a css property mapped to a potentially vendor prefixed property\n\n function vendorPropName( style, name ) {\n\n\n\n // shortcut for names that are not vendor prefixed\n\n if ( name in style ) {\n\n return name;\n\n }\n\n\n\n // check for vendor prefixed names\n\n var capName = name.charAt(0).toUpperCase() + name.slice(1),\n\n origName = name,\n\n i = cssPrefixes.length;\n\n\n\n while ( i-- ) {\n\n name = cssPrefixes[ i ] + capName;\n\n if ( name in style ) {\n\n return name;\n\n }\n\n }\n\n\n\n return origName;\n\n }\n\n\n\n function showHide( elements, show ) {\n\n var display, elem, hidden,\n\n values = [],\n\n index = 0,\n\n length = elements.length;\n\n\n\n for ( ; index < length; index++ ) {\n\n elem = elements[ index ];\n\n if ( !elem.style ) {\n\n continue;\n\n }\n\n\n\n values[ index ] = jQuery._data( elem, \"olddisplay\" );\n\n display = elem.style.display;\n\n if ( show ) {\n\n // Reset the inline display of this element to learn if it is\n\n // being hidden by cascaded rules or not\n\n if ( !values[ index ] && display === \"none\" ) {\n\n elem.style.display = \"\";\n\n }\n\n\n\n // Set elements which have been overridden with display: none\n\n // in a stylesheet to whatever the default browser style is\n\n // for such an element\n\n if ( elem.style.display === \"\" && isHidden( elem ) ) {\n\n values[ index ] = jQuery._data( elem, \"olddisplay\", defaultDisplay(elem.nodeName) );\n\n }\n\n } else {\n\n hidden = isHidden( elem );\n\n\n\n if ( display && display !== \"none\" || !hidden ) {\n\n jQuery._data( elem, \"olddisplay\", hidden ? display : jQuery.css( elem, \"display\" ) );\n\n }\n\n }\n\n }\n\n\n\n // Set the display of most of the elements in a second loop\n\n // to avoid the constant reflow\n\n for ( index = 0; index < length; index++ ) {\n\n elem = elements[ index ];\n\n if ( !elem.style ) {\n\n continue;\n\n }\n\n if ( !show || elem.style.display === \"none\" || elem.style.display === \"\" ) {\n\n elem.style.display = show ? values[ index ] || \"\" : \"none\";\n\n }\n\n }\n\n\n\n return elements;\n\n }\n\n\n\n function setPositiveNumber( elem, value, subtract ) {\n\n var matches = rnumsplit.exec( value );\n\n return matches ?\n\n // Guard against undefined \"subtract\", e.g., when used as in cssHooks\n\n Math.max( 0, matches[ 1 ] - ( subtract || 0 ) ) + ( matches[ 2 ] || \"px\" ) :\n\n value;\n\n }\n\n\n\n function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) {\n\n var i = extra === ( isBorderBox ? \"border\" : \"content\" ) ?\n\n // If we already have the right measurement, avoid augmentation\n\n 4 :\n\n // Otherwise initialize for horizontal or vertical properties\n\n name === \"width\" ? 1 : 0,\n\n\n\n val = 0;\n\n\n\n for ( ; i < 4; i += 2 ) {\n\n // both box models exclude margin, so add it if we want it\n\n if ( extra === \"margin\" ) {\n\n val += jQuery.css( elem, extra + cssExpand[ i ], true, styles );\n\n }\n\n\n\n if ( isBorderBox ) {\n\n // border-box includes padding, so remove it if we want content\n\n if ( extra === \"content\" ) {\n\n val -= jQuery.css( elem, \"padding\" + cssExpand[ i ], true, styles );\n\n }\n\n\n\n // at this point, extra isn't border nor margin, so remove border\n\n if ( extra !== \"margin\" ) {\n\n val -= jQuery.css( elem, \"border\" + cssExpand[ i ] + \"Width\", true, styles );\n\n }\n\n } else {\n\n // at this point, extra isn't content, so add padding\n\n val += jQuery.css( elem, \"padding\" + cssExpand[ i ], true, styles );\n\n\n\n // at this point, extra isn't content nor padding, so add border\n\n if ( extra !== \"padding\" ) {\n\n val += jQuery.css( elem, \"border\" + cssExpand[ i ] + \"Width\", true, styles );\n\n }\n\n }\n\n }\n\n\n\n return val;\n\n }\n\n\n\n function getWidthOrHeight( elem, name, extra ) {\n\n\n\n // Start with offset property, which is equivalent to the border-box value\n\n var valueIsBorderBox = true,\n\n val = name === \"width\" ? elem.offsetWidth : elem.offsetHeight,\n\n styles = getStyles( elem ),\n\n isBorderBox = support.boxSizing && jQuery.css( elem, \"boxSizing\", false, styles ) === \"border-box\";\n\n\n\n // some non-html elements return undefined for offsetWidth, so check for null/undefined\n\n // svg - https://bugzilla.mozilla.org/show_bug.cgi?id=649285\n\n // MathML - https://bugzilla.mozilla.org/show_bug.cgi?id=491668\n\n if ( val <= 0 || val == null ) {\n\n // Fall back to computed then uncomputed css if necessary\n\n val = curCSS( elem, name, styles );\n\n if ( val < 0 || val == null ) {\n\n val = elem.style[ name ];\n\n }\n\n\n\n // Computed unit is not pixels. Stop here and return.\n\n if ( rnumnonpx.test(val) ) {\n\n return val;\n\n }\n\n\n\n // we need the check for style in case a browser which returns unreliable values\n\n // for getComputedStyle silently falls back to the reliable elem.style\n\n valueIsBorderBox = isBorderBox && ( support.boxSizingReliable() || val === elem.style[ name ] );\n\n\n\n // Normalize \"\", auto, and prepare for extra\n\n val = parseFloat( val ) || 0;\n\n }\n\n\n\n // use the active box-sizing model to add/subtract irrelevant styles\n\n return ( val +\n\n augmentWidthOrHeight(\n\n elem,\n\n name,\n\n extra || ( isBorderBox ? \"border\" : \"content\" ),\n\n valueIsBorderBox,\n\n styles\n\n )\n\n ) + \"px\";\n\n }\n\n\n\n jQuery.extend({\n\n // Add in style property hooks for overriding the default\n\n // behavior of getting and setting a style property\n\n cssHooks: {\n\n opacity: {\n\n get: function( elem, computed ) {\n\n if ( computed ) {\n\n // We should always get a number back from opacity\n\n var ret = curCSS( elem, \"opacity\" );\n\n return ret === \"\" ? \"1\" : ret;\n\n }\n\n }\n\n }\n\n },\n\n\n\n // Don't automatically add \"px\" to these possibly-unitless properties\n\n cssNumber: {\n\n \"columnCount\": true,\n\n \"fillOpacity\": true,\n\n \"flexGrow\": true,\n\n \"flexShrink\": true,\n\n \"fontWeight\": true,\n\n \"lineHeight\": true,\n\n \"opacity\": true,\n\n \"order\": true,\n\n \"orphans\": true,\n\n \"widows\": true,\n\n \"zIndex\": true,\n\n \"zoom\": true\n\n },\n\n\n\n // Add in properties whose names you wish to fix before\n\n // setting or getting the value\n\n cssProps: {\n\n // normalize float css property\n\n \"float\": support.cssFloat ? \"cssFloat\" : \"styleFloat\"\n\n },\n\n\n\n // Get and set the style property on a DOM Node\n\n style: function( elem, name, value, extra ) {\n\n // Don't set styles on text and comment nodes\n\n if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) {\n\n return;\n\n }\n\n\n\n // Make sure that we're working with the right name\n\n var ret, type, hooks,\n\n origName = jQuery.camelCase( name ),\n\n style = elem.style;\n\n\n\n name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( style, origName ) );\n\n\n\n // gets hook for the prefixed version\n\n // followed by the unprefixed version\n\n hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ];\n\n\n\n // Check if we're setting a value\n\n if ( value !== undefined ) {\n\n type = typeof value;\n\n\n\n // convert relative number strings (+= or -=) to relative numbers. #7345\n\n if ( type === \"string\" && (ret = rrelNum.exec( value )) ) {\n\n value = ( ret[1] + 1 ) * ret[2] + parseFloat( jQuery.css( elem, name ) );\n\n // Fixes bug #9237\n\n type = \"number\";\n\n }\n\n\n\n // Make sure that null and NaN values aren't set. See: #7116\n\n if ( value == null || value !== value ) {\n\n return;\n\n }\n\n\n\n // If a number was passed in, add 'px' to the (except for certain CSS properties)\n\n if ( type === \"number\" && !jQuery.cssNumber[ origName ] ) {\n\n value += \"px\";\n\n }\n\n\n\n // Fixes #8908, it can be done more correctly by specifing setters in cssHooks,\n\n // but it would mean to define eight (for every problematic property) identical functions\n\n if ( !support.clearCloneStyle && value === \"\" && name.indexOf(\"background\") === 0 ) {\n\n style[ name ] = \"inherit\";\n\n }\n\n\n\n // If a hook was provided, use that value, otherwise just set the specified value\n\n if ( !hooks || !(\"set\" in hooks) || (value = hooks.set( elem, value, extra )) !== undefined ) {\n\n\n\n // Support: IE\n\n // Swallow errors from 'invalid' CSS values (#5509)\n\n try {\n\n style[ name ] = value;\n\n } catch(e) {}\n\n }\n\n\n\n } else {\n\n // If a hook was provided get the non-computed value from there\n\n if ( hooks && \"get\" in hooks && (ret = hooks.get( elem, false, extra )) !== undefined ) {\n\n return ret;\n\n }\n\n\n\n // Otherwise just get the value from the style object\n\n return style[ name ];\n\n }\n\n },\n\n\n\n css: function( elem, name, extra, styles ) {\n\n var num, val, hooks,\n\n origName = jQuery.camelCase( name );\n\n\n\n // Make sure that we're working with the right name\n\n name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( elem.style, origName ) );\n\n\n\n // gets hook for the prefixed version\n\n // followed by the unprefixed version\n\n hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ];\n\n\n\n // If a hook was provided get the computed value from there\n\n if ( hooks && \"get\" in hooks ) {\n\n val = hooks.get( elem, true, extra );\n\n }\n\n\n\n // Otherwise, if a way to get the computed value exists, use that\n\n if ( val === undefined ) {\n\n val = curCSS( elem, name, styles );\n\n }\n\n\n\n //convert \"normal\" to computed value\n\n if ( val === \"normal\" && name in cssNormalTransform ) {\n\n val = cssNormalTransform[ name ];\n\n }\n\n\n\n // Return, converting to number if forced or a qualifier was provided and val looks numeric\n\n if ( extra === \"\" || extra ) {\n\n num = parseFloat( val );\n\n return extra === true || jQuery.isNumeric( num ) ? num || 0 : val;\n\n }\n\n return val;\n\n }\n\n });\n\n\n\n jQuery.each([ \"height\", \"width\" ], function( i, name ) {\n\n jQuery.cssHooks[ name ] = {\n\n get: function( elem, computed, extra ) {\n\n if ( computed ) {\n\n // certain elements can have dimension info if we invisibly show them\n\n // however, it must have a current display style that would benefit from this\n\n return rdisplayswap.test( jQuery.css( elem, \"display\" ) ) && elem.offsetWidth === 0 ?\n\n jQuery.swap( elem, cssShow, function() {\n\n return getWidthOrHeight( elem, name, extra );\n\n }) :\n\n getWidthOrHeight( elem, name, extra );\n\n }\n\n },\n\n\n\n set: function( elem, value, extra ) {\n\n var styles = extra && getStyles( elem );\n\n return setPositiveNumber( elem, value, extra ?\n\n augmentWidthOrHeight(\n\n elem,\n\n name,\n\n extra,\n\n support.boxSizing && jQuery.css( elem, \"boxSizing\", false, styles ) === \"border-box\",\n\n styles\n\n ) : 0\n\n );\n\n }\n\n };\n\n });\n\n\n\n if ( !support.opacity ) {\n\n jQuery.cssHooks.opacity = {\n\n get: function( elem, computed ) {\n\n // IE uses filters for opacity\n\n return ropacity.test( (computed && elem.currentStyle ? elem.currentStyle.filter : elem.style.filter) || \"\" ) ?\n\n ( 0.01 * parseFloat( RegExp.$1 ) ) + \"\" :\n\n computed ? \"1\" : \"\";\n\n },\n\n\n\n set: function( elem, value ) {\n\n var style = elem.style,\n\n currentStyle = elem.currentStyle,\n\n opacity = jQuery.isNumeric( value ) ? \"alpha(opacity=\" + value * 100 + \")\" : \"\",\n\n filter = currentStyle && currentStyle.filter || style.filter || \"\";\n\n\n\n // IE has trouble with opacity if it does not have layout\n\n // Force it by setting the zoom level\n\n style.zoom = 1;\n\n\n\n // if setting opacity to 1, and no other filters exist - attempt to remove filter attribute #6652\n\n // if value === \"\", then remove inline opacity #12685\n\n if ( ( value >= 1 || value === \"\" ) &&\n\n jQuery.trim( filter.replace( ralpha, \"\" ) ) === \"\" &&\n\n style.removeAttribute ) {\n\n\n\n // Setting style.filter to null, \"\" & \" \" still leave \"filter:\" in the cssText\n\n // if \"filter:\" is present at all, clearType is disabled, we want to avoid this\n\n // style.removeAttribute is IE Only, but so apparently is this code path...\n\n style.removeAttribute( \"filter\" );\n\n\n\n // if there is no filter style applied in a css rule or unset inline opacity, we are done\n\n if ( value === \"\" || currentStyle && !currentStyle.filter ) {\n\n return;\n\n }\n\n }\n\n\n\n // otherwise, set new filter values\n\n style.filter = ralpha.test( filter ) ?\n\n filter.replace( ralpha, opacity ) :\n\n filter + \" \" + opacity;\n\n }\n\n };\n\n }\n\n\n\n jQuery.cssHooks.marginRight = addGetHookIf( support.reliableMarginRight,\n\n function( elem, computed ) {\n\n if ( computed ) {\n\n // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right\n\n // Work around by temporarily setting element display to inline-block\n\n return jQuery.swap( elem, { \"display\": \"inline-block\" },\n\n curCSS, [ elem, \"marginRight\" ] );\n\n }\n\n }\n\n );\n\n\n\n// These hooks are used by animate to expand properties\n\n jQuery.each({\n\n margin: \"\",\n\n padding: \"\",\n\n border: \"Width\"\n\n }, function( prefix, suffix ) {\n\n jQuery.cssHooks[ prefix + suffix ] = {\n\n expand: function( value ) {\n\n var i = 0,\n\n expanded = {},\n\n\n\n // assumes a single number if not a string\n\n parts = typeof value === \"string\" ? value.split(\" \") : [ value ];\n\n\n\n for ( ; i < 4; i++ ) {\n\n expanded[ prefix + cssExpand[ i ] + suffix ] =\n\n parts[ i ] || parts[ i - 2 ] || parts[ 0 ];\n\n }\n\n\n\n return expanded;\n\n }\n\n };\n\n\n\n if ( !rmargin.test( prefix ) ) {\n\n jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber;\n\n }\n\n });\n\n\n\n jQuery.fn.extend({\n\n css: function( name, value ) {\n\n return access( this, function( elem, name, value ) {\n\n var styles, len,\n\n map = {},\n\n i = 0;\n\n\n\n if ( jQuery.isArray( name ) ) {\n\n styles = getStyles( elem );\n\n len = name.length;\n\n\n\n for ( ; i < len; i++ ) {\n\n map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles );\n\n }\n\n\n\n return map;\n\n }\n\n\n\n return value !== undefined ?\n\n jQuery.style( elem, name, value ) :\n\n jQuery.css( elem, name );\n\n }, name, value, arguments.length > 1 );\n\n },\n\n show: function() {\n\n return showHide( this, true );\n\n },\n\n hide: function() {\n\n return showHide( this );\n\n },\n\n toggle: function( state ) {\n\n if ( typeof state === \"boolean\" ) {\n\n return state ? this.show() : this.hide();\n\n }\n\n\n\n return this.each(function() {\n\n if ( isHidden( this ) ) {\n\n jQuery( this ).show();\n\n } else {\n\n jQuery( this ).hide();\n\n }\n\n });\n\n }\n\n });\n\n\n\n\n\n function Tween( elem, options, prop, end, easing ) {\n\n return new Tween.prototype.init( elem, options, prop, end, easing );\n\n }\n\n jQuery.Tween = Tween;\n\n\n\n Tween.prototype = {\n\n constructor: Tween,\n\n init: function( elem, options, prop, end, easing, unit ) {\n\n this.elem = elem;\n\n this.prop = prop;\n\n this.easing = easing || \"swing\";\n\n this.options = options;\n\n this.start = this.now = this.cur();\n\n this.end = end;\n\n this.unit = unit || ( jQuery.cssNumber[ prop ] ? \"\" : \"px\" );\n\n },\n\n cur: function() {\n\n var hooks = Tween.propHooks[ this.prop ];\n\n\n\n return hooks && hooks.get ?\n\n hooks.get( this ) :\n\n Tween.propHooks._default.get( this );\n\n },\n\n run: function( percent ) {\n\n var eased,\n\n hooks = Tween.propHooks[ this.prop ];\n\n\n\n if ( this.options.duration ) {\n\n this.pos = eased = jQuery.easing[ this.easing ](\n\n percent, this.options.duration * percent, 0, 1, this.options.duration\n\n );\n\n } else {\n\n this.pos = eased = percent;\n\n }\n\n this.now = ( this.end - this.start ) * eased + this.start;\n\n\n\n if ( this.options.step ) {\n\n this.options.step.call( this.elem, this.now, this );\n\n }\n\n\n\n if ( hooks && hooks.set ) {\n\n hooks.set( this );\n\n } else {\n\n Tween.propHooks._default.set( this );\n\n }\n\n return this;\n\n }\n\n };\n\n\n\n Tween.prototype.init.prototype = Tween.prototype;\n\n\n\n Tween.propHooks = {\n\n _default: {\n\n get: function( tween ) {\n\n var result;\n\n\n\n if ( tween.elem[ tween.prop ] != null &&\n\n (!tween.elem.style || tween.elem.style[ tween.prop ] == null) ) {\n\n return tween.elem[ tween.prop ];\n\n }\n\n\n\n // passing an empty string as a 3rd parameter to .css will automatically\n\n // attempt a parseFloat and fallback to a string if the parse fails\n\n // so, simple values such as \"10px\" are parsed to Float.\n\n // complex values such as \"rotate(1rad)\" are returned as is.\n\n result = jQuery.css( tween.elem, tween.prop, \"\" );\n\n // Empty strings, null, undefined and \"auto\" are converted to 0.\n\n return !result || result === \"auto\" ? 0 : result;\n\n },\n\n set: function( tween ) {\n\n // use step hook for back compat - use cssHook if its there - use .style if its\n\n // available and use plain properties where available\n\n if ( jQuery.fx.step[ tween.prop ] ) {\n\n jQuery.fx.step[ tween.prop ]( tween );\n\n } else if ( tween.elem.style && ( tween.elem.style[ jQuery.cssProps[ tween.prop ] ] != null || jQuery.cssHooks[ tween.prop ] ) ) {\n\n jQuery.style( tween.elem, tween.prop, tween.now + tween.unit );\n\n } else {\n\n tween.elem[ tween.prop ] = tween.now;\n\n }\n\n }\n\n }\n\n };\n\n\n\n// Support: IE <=9\n\n// Panic based approach to setting things on disconnected nodes\n\n\n\n Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = {\n\n set: function( tween ) {\n\n if ( tween.elem.nodeType && tween.elem.parentNode ) {\n\n tween.elem[ tween.prop ] = tween.now;\n\n }\n\n }\n\n };\n\n\n\n jQuery.easing = {\n\n linear: function( p ) {\n\n return p;\n\n },\n\n swing: function( p ) {\n\n return 0.5 - Math.cos( p * Math.PI ) / 2;\n\n }\n\n };\n\n\n\n jQuery.fx = Tween.prototype.init;\n\n\n\n// Back Compat <1.8 extension point\n\n jQuery.fx.step = {};\n\n\n\n\n\n\n\n\n\n var\n\n fxNow, timerId,\n\n rfxtypes = /^(?:toggle|show|hide)$/,\n\n rfxnum = new RegExp( \"^(?:([+-])=|)(\" + pnum + \")([a-z%]*)$\", \"i\" ),\n\n rrun = /queueHooks$/,\n\n animationPrefilters = [ defaultPrefilter ],\n\n tweeners = {\n\n \"*\": [ function( prop, value ) {\n\n var tween = this.createTween( prop, value ),\n\n target = tween.cur(),\n\n parts = rfxnum.exec( value ),\n\n unit = parts && parts[ 3 ] || ( jQuery.cssNumber[ prop ] ? \"\" : \"px\" ),\n\n\n\n // Starting value computation is required for potential unit mismatches\n\n start = ( jQuery.cssNumber[ prop ] || unit !== \"px\" && +target ) &&\n\n rfxnum.exec( jQuery.css( tween.elem, prop ) ),\n\n scale = 1,\n\n maxIterations = 20;\n\n\n\n if ( start && start[ 3 ] !== unit ) {\n\n // Trust units reported by jQuery.css\n\n unit = unit || start[ 3 ];\n\n\n\n // Make sure we update the tween properties later on\n\n parts = parts || [];\n\n\n\n // Iteratively approximate from a nonzero starting point\n\n start = +target || 1;\n\n\n\n do {\n\n // If previous iteration zeroed out, double until we get *something*\n\n // Use a string for doubling factor so we don't accidentally see scale as unchanged below\n\n scale = scale || \".5\";\n\n\n\n // Adjust and apply\n\n start = start / scale;\n\n jQuery.style( tween.elem, prop, start + unit );\n\n\n\n // Update scale, tolerating zero or NaN from tween.cur()\n\n // And breaking the loop if scale is unchanged or perfect, or if we've just had enough\n\n } while ( scale !== (scale = tween.cur() / target) && scale !== 1 && --maxIterations );\n\n }\n\n\n\n // Update tween properties\n\n if ( parts ) {\n\n start = tween.start = +start || +target || 0;\n\n tween.unit = unit;\n\n // If a +=/-= token was provided, we're doing a relative animation\n\n tween.end = parts[ 1 ] ?\n\n start + ( parts[ 1 ] + 1 ) * parts[ 2 ] :\n\n +parts[ 2 ];\n\n }\n\n\n\n return tween;\n\n } ]\n\n };\n\n\n\n// Animations created synchronously will run synchronously\n\n function createFxNow() {\n\n setTimeout(function() {\n\n fxNow = undefined;\n\n });\n\n return ( fxNow = jQuery.now() );\n\n }\n\n\n\n// Generate parameters to create a standard animation\n\n function genFx( type, includeWidth ) {\n\n var which,\n\n attrs = { height: type },\n\n i = 0;\n\n\n\n // if we include width, step value is 1 to do all cssExpand values,\n\n // if we don't include width, step value is 2 to skip over Left and Right\n\n includeWidth = includeWidth ? 1 : 0;\n\n for ( ; i < 4 ; i += 2 - includeWidth ) {\n\n which = cssExpand[ i ];\n\n attrs[ \"margin\" + which ] = attrs[ \"padding\" + which ] = type;\n\n }\n\n\n\n if ( includeWidth ) {\n\n attrs.opacity = attrs.width = type;\n\n }\n\n\n\n return attrs;\n\n }\n\n\n\n function createTween( value, prop, animation ) {\n\n var tween,\n\n collection = ( tweeners[ prop ] || [] ).concat( tweeners[ \"*\" ] ),\n\n index = 0,\n\n length = collection.length;\n\n for ( ; index < length; index++ ) {\n\n if ( (tween = collection[ index ].call( animation, prop, value )) ) {\n\n\n\n // we're done with this property\n\n return tween;\n\n }\n\n }\n\n }\n\n\n\n function defaultPrefilter( elem, props, opts ) {\n\n /* jshint validthis: true */\n\n var prop, value, toggle, tween, hooks, oldfire, display, checkDisplay,\n\n anim = this,\n\n orig = {},\n\n style = elem.style,\n\n hidden = elem.nodeType && isHidden( elem ),\n\n dataShow = jQuery._data( elem, \"fxshow\" );\n\n\n\n // handle queue: false promises\n\n if ( !opts.queue ) {\n\n hooks = jQuery._queueHooks( elem, \"fx\" );\n\n if ( hooks.unqueued == null ) {\n\n hooks.unqueued = 0;\n\n oldfire = hooks.empty.fire;\n\n hooks.empty.fire = function() {\n\n if ( !hooks.unqueued ) {\n\n oldfire();\n\n }\n\n };\n\n }\n\n hooks.unqueued++;\n\n\n\n anim.always(function() {\n\n // doing this makes sure that the complete handler will be called\n\n // before this completes\n\n anim.always(function() {\n\n hooks.unqueued--;\n\n if ( !jQuery.queue( elem, \"fx\" ).length ) {\n\n hooks.empty.fire();\n\n }\n\n });\n\n });\n\n }\n\n\n\n // height/width overflow pass\n\n if ( elem.nodeType === 1 && ( \"height\" in props || \"width\" in props ) ) {\n\n // Make sure that nothing sneaks out\n\n // Record all 3 overflow attributes because IE does not\n\n // change the overflow attribute when overflowX and\n\n // overflowY are set to the same value\n\n opts.overflow = [ style.overflow, style.overflowX, style.overflowY ];\n\n\n\n // Set display property to inline-block for height/width\n\n // animations on inline elements that are having width/height animated\n\n display = jQuery.css( elem, \"display\" );\n\n\n\n // Test default display if display is currently \"none\"\n\n checkDisplay = display === \"none\" ?\n\n jQuery._data( elem, \"olddisplay\" ) || defaultDisplay( elem.nodeName ) : display;\n\n\n\n if ( checkDisplay === \"inline\" && jQuery.css( elem, \"float\" ) === \"none\" ) {\n\n\n\n // inline-level elements accept inline-block;\n\n // block-level elements need to be inline with layout\n\n if ( !support.inlineBlockNeedsLayout || defaultDisplay( elem.nodeName ) === \"inline\" ) {\n\n style.display = \"inline-block\";\n\n } else {\n\n style.zoom = 1;\n\n }\n\n }\n\n }\n\n\n\n if ( opts.overflow ) {\n\n style.overflow = \"hidden\";\n\n if ( !support.shrinkWrapBlocks() ) {\n\n anim.always(function() {\n\n style.overflow = opts.overflow[ 0 ];\n\n style.overflowX = opts.overflow[ 1 ];\n\n style.overflowY = opts.overflow[ 2 ];\n\n });\n\n }\n\n }\n\n\n\n // show/hide pass\n\n for ( prop in props ) {\n\n value = props[ prop ];\n\n if ( rfxtypes.exec( value ) ) {\n\n delete props[ prop ];\n\n toggle = toggle || value === \"toggle\";\n\n if ( value === ( hidden ? \"hide\" : \"show\" ) ) {\n\n\n\n // If there is dataShow left over from a stopped hide or show and we are going to proceed with show, we should pretend to be hidden\n\n if ( value === \"show\" && dataShow && dataShow[ prop ] !== undefined ) {\n\n hidden = true;\n\n } else {\n\n continue;\n\n }\n\n }\n\n orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop );\n\n\n\n // Any non-fx value stops us from restoring the original display value\n\n } else {\n\n display = undefined;\n\n }\n\n }\n\n\n\n if ( !jQuery.isEmptyObject( orig ) ) {\n\n if ( dataShow ) {\n\n if ( \"hidden\" in dataShow ) {\n\n hidden = dataShow.hidden;\n\n }\n\n } else {\n\n dataShow = jQuery._data( elem, \"fxshow\", {} );\n\n }\n\n\n\n // store state if its toggle - enables .stop().toggle() to \"reverse\"\n\n if ( toggle ) {\n\n dataShow.hidden = !hidden;\n\n }\n\n if ( hidden ) {\n\n jQuery( elem ).show();\n\n } else {\n\n anim.done(function() {\n\n jQuery( elem ).hide();\n\n });\n\n }\n\n anim.done(function() {\n\n var prop;\n\n jQuery._removeData( elem, \"fxshow\" );\n\n for ( prop in orig ) {\n\n jQuery.style( elem, prop, orig[ prop ] );\n\n }\n\n });\n\n for ( prop in orig ) {\n\n tween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim );\n\n\n\n if ( !( prop in dataShow ) ) {\n\n dataShow[ prop ] = tween.start;\n\n if ( hidden ) {\n\n tween.end = tween.start;\n\n tween.start = prop === \"width\" || prop === \"height\" ? 1 : 0;\n\n }\n\n }\n\n }\n\n\n\n // If this is a noop like .hide().hide(), restore an overwritten display value\n\n } else if ( (display === \"none\" ? defaultDisplay( elem.nodeName ) : display) === \"inline\" ) {\n\n style.display = display;\n\n }\n\n }\n\n\n\n function propFilter( props, specialEasing ) {\n\n var index, name, easing, value, hooks;\n\n\n\n // camelCase, specialEasing and expand cssHook pass\n\n for ( index in props ) {\n\n name = jQuery.camelCase( index );\n\n easing = specialEasing[ name ];\n\n value = props[ index ];\n\n if ( jQuery.isArray( value ) ) {\n\n easing = value[ 1 ];\n\n value = props[ index ] = value[ 0 ];\n\n }\n\n\n\n if ( index !== name ) {\n\n props[ name ] = value;\n\n delete props[ index ];\n\n }\n\n\n\n hooks = jQuery.cssHooks[ name ];\n\n if ( hooks && \"expand\" in hooks ) {\n\n value = hooks.expand( value );\n\n delete props[ name ];\n\n\n\n // not quite $.extend, this wont overwrite keys already present.\n\n // also - reusing 'index' from above because we have the correct \"name\"\n\n for ( index in value ) {\n\n if ( !( index in props ) ) {\n\n props[ index ] = value[ index ];\n\n specialEasing[ index ] = easing;\n\n }\n\n }\n\n } else {\n\n specialEasing[ name ] = easing;\n\n }\n\n }\n\n }\n\n\n\n function Animation( elem, properties, options ) {\n\n var result,\n\n stopped,\n\n index = 0,\n\n length = animationPrefilters.length,\n\n deferred = jQuery.Deferred().always( function() {\n\n // don't match elem in the :animated selector\n\n delete tick.elem;\n\n }),\n\n tick = function() {\n\n if ( stopped ) {\n\n return false;\n\n }\n\n var currentTime = fxNow || createFxNow(),\n\n remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ),\n\n // archaic crash bug won't allow us to use 1 - ( 0.5 || 0 ) (#12497)\n\n temp = remaining / animation.duration || 0,\n\n percent = 1 - temp,\n\n index = 0,\n\n length = animation.tweens.length;\n\n\n\n for ( ; index < length ; index++ ) {\n\n animation.tweens[ index ].run( percent );\n\n }\n\n\n\n deferred.notifyWith( elem, [ animation, percent, remaining ]);\n\n\n\n if ( percent < 1 && length ) {\n\n return remaining;\n\n } else {\n\n deferred.resolveWith( elem, [ animation ] );\n\n return false;\n\n }\n\n },\n\n animation = deferred.promise({\n\n elem: elem,\n\n props: jQuery.extend( {}, properties ),\n\n opts: jQuery.extend( true, { specialEasing: {} }, options ),\n\n originalProperties: properties,\n\n originalOptions: options,\n\n startTime: fxNow || createFxNow(),\n\n duration: options.duration,\n\n tweens: [],\n\n createTween: function( prop, end ) {\n\n var tween = jQuery.Tween( elem, animation.opts, prop, end,\n\n animation.opts.specialEasing[ prop ] || animation.opts.easing );\n\n animation.tweens.push( tween );\n\n return tween;\n\n },\n\n stop: function( gotoEnd ) {\n\n var index = 0,\n\n // if we are going to the end, we want to run all the tweens\n\n // otherwise we skip this part\n\n length = gotoEnd ? animation.tweens.length : 0;\n\n if ( stopped ) {\n\n return this;\n\n }\n\n stopped = true;\n\n for ( ; index < length ; index++ ) {\n\n animation.tweens[ index ].run( 1 );\n\n }\n\n\n\n // resolve when we played the last frame\n\n // otherwise, reject\n\n if ( gotoEnd ) {\n\n deferred.resolveWith( elem, [ animation, gotoEnd ] );\n\n } else {\n\n deferred.rejectWith( elem, [ animation, gotoEnd ] );\n\n }\n\n return this;\n\n }\n\n }),\n\n props = animation.props;\n\n\n\n propFilter( props, animation.opts.specialEasing );\n\n\n\n for ( ; index < length ; index++ ) {\n\n result = animationPrefilters[ index ].call( animation, elem, props, animation.opts );\n\n if ( result ) {\n\n return result;\n\n }\n\n }\n\n\n\n jQuery.map( props, createTween, animation );\n\n\n\n if ( jQuery.isFunction( animation.opts.start ) ) {\n\n animation.opts.start.call( elem, animation );\n\n }\n\n\n\n jQuery.fx.timer(\n\n jQuery.extend( tick, {\n\n elem: elem,\n\n anim: animation,\n\n queue: animation.opts.queue\n\n })\n\n );\n\n\n\n // attach callbacks from options\n\n return animation.progress( animation.opts.progress )\n\n .done( animation.opts.done, animation.opts.complete )\n\n .fail( animation.opts.fail )\n\n .always( animation.opts.always );\n\n }\n\n\n\n jQuery.Animation = jQuery.extend( Animation, {\n\n tweener: function( props, callback ) {\n\n if ( jQuery.isFunction( props ) ) {\n\n callback = props;\n\n props = [ \"*\" ];\n\n } else {\n\n props = props.split(\" \");\n\n }\n\n\n\n var prop,\n\n index = 0,\n\n length = props.length;\n\n\n\n for ( ; index < length ; index++ ) {\n\n prop = props[ index ];\n\n tweeners[ prop ] = tweeners[ prop ] || [];\n\n tweeners[ prop ].unshift( callback );\n\n }\n\n },\n\n\n\n prefilter: function( callback, prepend ) {\n\n if ( prepend ) {\n\n animationPrefilters.unshift( callback );\n\n } else {\n\n animationPrefilters.push( callback );\n\n }\n\n }\n\n });\n\n\n\n jQuery.speed = function( speed, easing, fn ) {\n\n var opt = speed && typeof speed === \"object\" ? jQuery.extend( {}, speed ) : {\n\n complete: fn || !fn && easing ||\n\n jQuery.isFunction( speed ) && speed,\n\n duration: speed,\n\n easing: fn && easing || easing && !jQuery.isFunction( easing ) && easing\n\n };\n\n\n\n opt.duration = jQuery.fx.off ? 0 : typeof opt.duration === \"number\" ? opt.duration :\n\n opt.duration in jQuery.fx.speeds ? jQuery.fx.speeds[ opt.duration ] : jQuery.fx.speeds._default;\n\n\n\n // normalize opt.queue - true/undefined/null -> \"fx\"\n\n if ( opt.queue == null || opt.queue === true ) {\n\n opt.queue = \"fx\";\n\n }\n\n\n\n // Queueing\n\n opt.old = opt.complete;\n\n\n\n opt.complete = function() {\n\n if ( jQuery.isFunction( opt.old ) ) {\n\n opt.old.call( this );\n\n }\n\n\n\n if ( opt.queue ) {\n\n jQuery.dequeue( this, opt.queue );\n\n }\n\n };\n\n\n\n return opt;\n\n };\n\n\n\n jQuery.fn.extend({\n\n fadeTo: function( speed, to, easing, callback ) {\n\n\n\n // show any hidden elements after setting opacity to 0\n\n return this.filter( isHidden ).css( \"opacity\", 0 ).show()\n\n\n\n // animate to the value specified\n\n .end().animate({ opacity: to }, speed, easing, callback );\n\n },\n\n animate: function( prop, speed, easing, callback ) {\n\n var empty = jQuery.isEmptyObject( prop ),\n\n optall = jQuery.speed( speed, easing, callback ),\n\n doAnimation = function() {\n\n // Operate on a copy of prop so per-property easing won't be lost\n\n var anim = Animation( this, jQuery.extend( {}, prop ), optall );\n\n\n\n // Empty animations, or finishing resolves immediately\n\n if ( empty || jQuery._data( this, \"finish\" ) ) {\n\n anim.stop( true );\n\n }\n\n };\n\n doAnimation.finish = doAnimation;\n\n\n\n return empty || optall.queue === false ?\n\n this.each( doAnimation ) :\n\n this.queue( optall.queue, doAnimation );\n\n },\n\n stop: function( type, clearQueue, gotoEnd ) {\n\n var stopQueue = function( hooks ) {\n\n var stop = hooks.stop;\n\n delete hooks.stop;\n\n stop( gotoEnd );\n\n };\n\n\n\n if ( typeof type !== \"string\" ) {\n\n gotoEnd = clearQueue;\n\n clearQueue = type;\n\n type = undefined;\n\n }\n\n if ( clearQueue && type !== false ) {\n\n this.queue( type || \"fx\", [] );\n\n }\n\n\n\n return this.each(function() {\n\n var dequeue = true,\n\n index = type != null && type + \"queueHooks\",\n\n timers = jQuery.timers,\n\n data = jQuery._data( this );\n\n\n\n if ( index ) {\n\n if ( data[ index ] && data[ index ].stop ) {\n\n stopQueue( data[ index ] );\n\n }\n\n } else {\n\n for ( index in data ) {\n\n if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) {\n\n stopQueue( data[ index ] );\n\n }\n\n }\n\n }\n\n\n\n for ( index = timers.length; index--; ) {\n\n if ( timers[ index ].elem === this && (type == null || timers[ index ].queue === type) ) {\n\n timers[ index ].anim.stop( gotoEnd );\n\n dequeue = false;\n\n timers.splice( index, 1 );\n\n }\n\n }\n\n\n\n // start the next in the queue if the last step wasn't forced\n\n // timers currently will call their complete callbacks, which will dequeue\n\n // but only if they were gotoEnd\n\n if ( dequeue || !gotoEnd ) {\n\n jQuery.dequeue( this, type );\n\n }\n\n });\n\n },\n\n finish: function( type ) {\n\n if ( type !== false ) {\n\n type = type || \"fx\";\n\n }\n\n return this.each(function() {\n\n var index,\n\n data = jQuery._data( this ),\n\n queue = data[ type + \"queue\" ],\n\n hooks = data[ type + \"queueHooks\" ],\n\n timers = jQuery.timers,\n\n length = queue ? queue.length : 0;\n\n\n\n // enable finishing flag on private data\n\n data.finish = true;\n\n\n\n // empty the queue first\n\n jQuery.queue( this, type, [] );\n\n\n\n if ( hooks && hooks.stop ) {\n\n hooks.stop.call( this, true );\n\n }\n\n\n\n // look for any active animations, and finish them\n\n for ( index = timers.length; index--; ) {\n\n if ( timers[ index ].elem === this && timers[ index ].queue === type ) {\n\n timers[ index ].anim.stop( true );\n\n timers.splice( index, 1 );\n\n }\n\n }\n\n\n\n // look for any animations in the old queue and finish them\n\n for ( index = 0; index < length; index++ ) {\n\n if ( queue[ index ] && queue[ index ].finish ) {\n\n queue[ index ].finish.call( this );\n\n }\n\n }\n\n\n\n // turn off finishing flag\n\n delete data.finish;\n\n });\n\n }\n\n });\n\n\n\n jQuery.each([ \"toggle\", \"show\", \"hide\" ], function( i, name ) {\n\n var cssFn = jQuery.fn[ name ];\n\n jQuery.fn[ name ] = function( speed, easing, callback ) {\n\n return speed == null || typeof speed === \"boolean\" ?\n\n cssFn.apply( this, arguments ) :\n\n this.animate( genFx( name, true ), speed, easing, callback );\n\n };\n\n });\n\n\n\n// Generate shortcuts for custom animations\n\n jQuery.each({\n\n slideDown: genFx(\"show\"),\n\n slideUp: genFx(\"hide\"),\n\n slideToggle: genFx(\"toggle\"),\n\n fadeIn: { opacity: \"show\" },\n\n fadeOut: { opacity: \"hide\" },\n\n fadeToggle: { opacity: \"toggle\" }\n\n }, function( name, props ) {\n\n jQuery.fn[ name ] = function( speed, easing, callback ) {\n\n return this.animate( props, speed, easing, callback );\n\n };\n\n });\n\n\n\n jQuery.timers = [];\n\n jQuery.fx.tick = function() {\n\n var timer,\n\n timers = jQuery.timers,\n\n i = 0;\n\n\n\n fxNow = jQuery.now();\n\n\n\n for ( ; i < timers.length; i++ ) {\n\n timer = timers[ i ];\n\n // Checks the timer has not already been removed\n\n if ( !timer() && timers[ i ] === timer ) {\n\n timers.splice( i--, 1 );\n\n }\n\n }\n\n\n\n if ( !timers.length ) {\n\n jQuery.fx.stop();\n\n }\n\n fxNow = undefined;\n\n };\n\n\n\n jQuery.fx.timer = function( timer ) {\n\n jQuery.timers.push( timer );\n\n if ( timer() ) {\n\n jQuery.fx.start();\n\n } else {\n\n jQuery.timers.pop();\n\n }\n\n };\n\n\n\n jQuery.fx.interval = 13;\n\n\n\n jQuery.fx.start = function() {\n\n if ( !timerId ) {\n\n timerId = setInterval( jQuery.fx.tick, jQuery.fx.interval );\n\n }\n\n };\n\n\n\n jQuery.fx.stop = function() {\n\n clearInterval( timerId );\n\n timerId = null;\n\n };\n\n\n\n jQuery.fx.speeds = {\n\n slow: 600,\n\n fast: 200,\n\n // Default speed\n\n _default: 400\n\n };\n\n\n\n\n\n// Based off of the plugin by Clint Helfers, with permission.\n\n// http://blindsignals.com/index.php/2009/07/jquery-delay/\n\n jQuery.fn.delay = function( time, type ) {\n\n time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time;\n\n type = type || \"fx\";\n\n\n\n return this.queue( type, function( next, hooks ) {\n\n var timeout = setTimeout( next, time );\n\n hooks.stop = function() {\n\n clearTimeout( timeout );\n\n };\n\n });\n\n };\n\n\n\n\n\n (function() {\n\n // Minified: var a,b,c,d,e\n\n var input, div, select, a, opt;\n\n\n\n // Setup\n\n div = document.createElement( \"div\" );\n\n div.setAttribute( \"className\", \"t\" );\n\n div.innerHTML = \" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>\";\n\n a = div.getElementsByTagName(\"a\")[ 0 ];\n\n\n\n // First batch of tests.\n\n select = document.createElement(\"select\");\n\n opt = select.appendChild( document.createElement(\"option\") );\n\n input = div.getElementsByTagName(\"input\")[ 0 ];\n\n\n\n a.style.cssText = \"top:1px\";\n\n\n\n // Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7)\n\n support.getSetAttribute = div.className !== \"t\";\n\n\n\n // Get the style information from getAttribute\n\n // (IE uses .cssText instead)\n\n support.style = /top/.test( a.getAttribute(\"style\") );\n\n\n\n // Make sure that URLs aren't manipulated\n\n // (IE normalizes it by default)\n\n support.hrefNormalized = a.getAttribute(\"href\") === \"/a\";\n\n\n\n // Check the default checkbox/radio value (\"\" on WebKit; \"on\" elsewhere)\n\n support.checkOn = !!input.value;\n\n\n\n // Make sure that a selected-by-default option has a working selected property.\n\n // (WebKit defaults to false instead of true, IE too, if it's in an optgroup)\n\n support.optSelected = opt.selected;\n\n\n\n // Tests for enctype support on a form (#6743)\n\n support.enctype = !!document.createElement(\"form\").enctype;\n\n\n\n // Make sure that the options inside disabled selects aren't marked as disabled\n\n // (WebKit marks them as disabled)\n\n select.disabled = true;\n\n support.optDisabled = !opt.disabled;\n\n\n\n // Support: IE8 only\n\n // Check if we can trust getAttribute(\"value\")\n\n input = document.createElement( \"input\" );\n\n input.setAttribute( \"value\", \"\" );\n\n support.input = input.getAttribute( \"value\" ) === \"\";\n\n\n\n // Check if an input maintains its value after becoming a radio\n\n input.value = \"t\";\n\n input.setAttribute( \"type\", \"radio\" );\n\n support.radioValue = input.value === \"t\";\n\n })();\n\n\n\n\n\n var rreturn = /\\r/g;\n\n\n\n jQuery.fn.extend({\n\n val: function( value ) {\n\n var hooks, ret, isFunction,\n\n elem = this[0];\n\n\n\n if ( !arguments.length ) {\n\n if ( elem ) {\n\n hooks = jQuery.valHooks[ elem.type ] || jQuery.valHooks[ elem.nodeName.toLowerCase() ];\n\n\n\n if ( hooks && \"get\" in hooks && (ret = hooks.get( elem, \"value\" )) !== undefined ) {\n\n return ret;\n\n }\n\n\n\n ret = elem.value;\n\n\n\n return typeof ret === \"string\" ?\n\n // handle most common string cases\n\n ret.replace(rreturn, \"\") :\n\n // handle cases where value is null/undef or number\n\n ret == null ? \"\" : ret;\n\n }\n\n\n\n return;\n\n }\n\n\n\n isFunction = jQuery.isFunction( value );\n\n\n\n return this.each(function( i ) {\n\n var val;\n\n\n\n if ( this.nodeType !== 1 ) {\n\n return;\n\n }\n\n\n\n if ( isFunction ) {\n\n val = value.call( this, i, jQuery( this ).val() );\n\n } else {\n\n val = value;\n\n }\n\n\n\n // Treat null/undefined as \"\"; convert numbers to string\n\n if ( val == null ) {\n\n val = \"\";\n\n } else if ( typeof val === \"number\" ) {\n\n val += \"\";\n\n } else if ( jQuery.isArray( val ) ) {\n\n val = jQuery.map( val, function( value ) {\n\n return value == null ? \"\" : value + \"\";\n\n });\n\n }\n\n\n\n hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ];\n\n\n\n // If set returns undefined, fall back to normal setting\n\n if ( !hooks || !(\"set\" in hooks) || hooks.set( this, val, \"value\" ) === undefined ) {\n\n this.value = val;\n\n }\n\n });\n\n }\n\n });\n\n\n\n jQuery.extend({\n\n valHooks: {\n\n option: {\n\n get: function( elem ) {\n\n var val = jQuery.find.attr( elem, \"value\" );\n\n return val != null ?\n\n val :\n\n // Support: IE10-11+\n\n // option.text throws exceptions (#14686, #14858)\n\n jQuery.trim( jQuery.text( elem ) );\n\n }\n\n },\n\n select: {\n\n get: function( elem ) {\n\n var value, option,\n\n options = elem.options,\n\n index = elem.selectedIndex,\n\n one = elem.type === \"select-one\" || index < 0,\n\n values = one ? null : [],\n\n max = one ? index + 1 : options.length,\n\n i = index < 0 ?\n\n max :\n\n one ? index : 0;\n\n\n\n // Loop through all the selected options\n\n for ( ; i < max; i++ ) {\n\n option = options[ i ];\n\n\n\n // oldIE doesn't update selected after form reset (#2551)\n\n if ( ( option.selected || i === index ) &&\n\n // Don't return options that are disabled or in a disabled optgroup\n\n ( support.optDisabled ? !option.disabled : option.getAttribute(\"disabled\") === null ) &&\n\n ( !option.parentNode.disabled || !jQuery.nodeName( option.parentNode, \"optgroup\" ) ) ) {\n\n\n\n // Get the specific value for the option\n\n value = jQuery( option ).val();\n\n\n\n // We don't need an array for one selects\n\n if ( one ) {\n\n return value;\n\n }\n\n\n\n // Multi-Selects return an array\n\n values.push( value );\n\n }\n\n }\n\n\n\n return values;\n\n },\n\n\n\n set: function( elem, value ) {\n\n var optionSet, option,\n\n options = elem.options,\n\n values = jQuery.makeArray( value ),\n\n i = options.length;\n\n\n\n while ( i-- ) {\n\n option = options[ i ];\n\n\n\n if ( jQuery.inArray( jQuery.valHooks.option.get( option ), values ) >= 0 ) {\n\n\n\n // Support: IE6\n\n // When new option element is added to select box we need to\n\n // force reflow of newly added node in order to workaround delay\n\n // of initialization properties\n\n try {\n\n option.selected = optionSet = true;\n\n\n\n } catch ( _ ) {\n\n\n\n // Will be executed only in IE6\n\n option.scrollHeight;\n\n }\n\n\n\n } else {\n\n option.selected = false;\n\n }\n\n }\n\n\n\n // Force browsers to behave consistently when non-matching value is set\n\n if ( !optionSet ) {\n\n elem.selectedIndex = -1;\n\n }\n\n\n\n return options;\n\n }\n\n }\n\n }\n\n });\n\n\n\n// Radios and checkboxes getter/setter\n\n jQuery.each([ \"radio\", \"checkbox\" ], function() {\n\n jQuery.valHooks[ this ] = {\n\n set: function( elem, value ) {\n\n if ( jQuery.isArray( value ) ) {\n\n return ( elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0 );\n\n }\n\n }\n\n };\n\n if ( !support.checkOn ) {\n\n jQuery.valHooks[ this ].get = function( elem ) {\n\n // Support: Webkit\n\n // \"\" is returned instead of \"on\" if a value isn't specified\n\n return elem.getAttribute(\"value\") === null ? \"on\" : elem.value;\n\n };\n\n }\n\n });\n\n\n\n\n\n\n\n\n\n var nodeHook, boolHook,\n\n attrHandle = jQuery.expr.attrHandle,\n\n ruseDefault = /^(?:checked|selected)$/i,\n\n getSetAttribute = support.getSetAttribute,\n\n getSetInput = support.input;\n\n\n\n jQuery.fn.extend({\n\n attr: function( name, value ) {\n\n return access( this, jQuery.attr, name, value, arguments.length > 1 );\n\n },\n\n\n\n removeAttr: function( name ) {\n\n return this.each(function() {\n\n jQuery.removeAttr( this, name );\n\n });\n\n }\n\n });\n\n\n\n jQuery.extend({\n\n attr: function( elem, name, value ) {\n\n var hooks, ret,\n\n nType = elem.nodeType;\n\n\n\n // don't get/set attributes on text, comment and attribute nodes\n\n if ( !elem || nType === 3 || nType === 8 || nType === 2 ) {\n\n return;\n\n }\n\n\n\n // Fallback to prop when attributes are not supported\n\n if ( typeof elem.getAttribute === strundefined ) {\n\n return jQuery.prop( elem, name, value );\n\n }\n\n\n\n // All attributes are lowercase\n\n // Grab necessary hook if one is defined\n\n if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) {\n\n name = name.toLowerCase();\n\n hooks = jQuery.attrHooks[ name ] ||\n\n ( jQuery.expr.match.bool.test( name ) ? boolHook : nodeHook );\n\n }\n\n\n\n if ( value !== undefined ) {\n\n\n\n if ( value === null ) {\n\n jQuery.removeAttr( elem, name );\n\n\n\n } else if ( hooks && \"set\" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) {\n\n return ret;\n\n\n\n } else {\n\n elem.setAttribute( name, value + \"\" );\n\n return value;\n\n }\n\n\n\n } else if ( hooks && \"get\" in hooks && (ret = hooks.get( elem, name )) !== null ) {\n\n return ret;\n\n\n\n } else {\n\n ret = jQuery.find.attr( elem, name );\n\n\n\n // Non-existent attributes return null, we normalize to undefined\n\n return ret == null ?\n\n undefined :\n\n ret;\n\n }\n\n },\n\n\n\n removeAttr: function( elem, value ) {\n\n var name, propName,\n\n i = 0,\n\n attrNames = value && value.match( rnotwhite );\n\n\n\n if ( attrNames && elem.nodeType === 1 ) {\n\n while ( (name = attrNames[i++]) ) {\n\n propName = jQuery.propFix[ name ] || name;\n\n\n\n // Boolean attributes get special treatment (#10870)\n\n if ( jQuery.expr.match.bool.test( name ) ) {\n\n // Set corresponding property to false\n\n if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) {\n\n elem[ propName ] = false;\n\n // Support: IE<9\n\n // Also clear defaultChecked/defaultSelected (if appropriate)\n\n } else {\n\n elem[ jQuery.camelCase( \"default-\" + name ) ] =\n\n elem[ propName ] = false;\n\n }\n\n\n\n // See #9699 for explanation of this approach (setting first, then removal)\n\n } else {\n\n jQuery.attr( elem, name, \"\" );\n\n }\n\n\n\n elem.removeAttribute( getSetAttribute ? name : propName );\n\n }\n\n }\n\n },\n\n\n\n attrHooks: {\n\n type: {\n\n set: function( elem, value ) {\n\n if ( !support.radioValue && value === \"radio\" && jQuery.nodeName(elem, \"input\") ) {\n\n // Setting the type on a radio button after the value resets the value in IE6-9\n\n // Reset value to default in case type is set after value during creation\n\n var val = elem.value;\n\n elem.setAttribute( \"type\", value );\n\n if ( val ) {\n\n elem.value = val;\n\n }\n\n return value;\n\n }\n\n }\n\n }\n\n }\n\n });\n\n\n\n// Hook for boolean attributes\n\n boolHook = {\n\n set: function( elem, value, name ) {\n\n if ( value === false ) {\n\n // Remove boolean attributes when set to false\n\n jQuery.removeAttr( elem, name );\n\n } else if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) {\n\n // IE<8 needs the *property* name\n\n elem.setAttribute( !getSetAttribute && jQuery.propFix[ name ] || name, name );\n\n\n\n // Use defaultChecked and defaultSelected for oldIE\n\n } else {\n\n elem[ jQuery.camelCase( \"default-\" + name ) ] = elem[ name ] = true;\n\n }\n\n\n\n return name;\n\n }\n\n };\n\n\n\n// Retrieve booleans specially\n\n jQuery.each( jQuery.expr.match.bool.source.match( /\\w+/g ), function( i, name ) {\n\n\n\n var getter = attrHandle[ name ] || jQuery.find.attr;\n\n\n\n attrHandle[ name ] = getSetInput && getSetAttribute || !ruseDefault.test( name ) ?\n\n function( elem, name, isXML ) {\n\n var ret, handle;\n\n if ( !isXML ) {\n\n // Avoid an infinite loop by temporarily removing this function from the getter\n\n handle = attrHandle[ name ];\n\n attrHandle[ name ] = ret;\n\n ret = getter( elem, name, isXML ) != null ?\n\n name.toLowerCase() :\n\n null;\n\n attrHandle[ name ] = handle;\n\n }\n\n return ret;\n\n } :\n\n function( elem, name, isXML ) {\n\n if ( !isXML ) {\n\n return elem[ jQuery.camelCase( \"default-\" + name ) ] ?\n\n name.toLowerCase() :\n\n null;\n\n }\n\n };\n\n });\n\n\n\n// fix oldIE attroperties\n\n if ( !getSetInput || !getSetAttribute ) {\n\n jQuery.attrHooks.value = {\n\n set: function( elem, value, name ) {\n\n if ( jQuery.nodeName( elem, \"input\" ) ) {\n\n // Does not return so that setAttribute is also used\n\n elem.defaultValue = value;\n\n } else {\n\n // Use nodeHook if defined (#1954); otherwise setAttribute is fine\n\n return nodeHook && nodeHook.set( elem, value, name );\n\n }\n\n }\n\n };\n\n }\n\n\n\n// IE6/7 do not support getting/setting some attributes with get/setAttribute\n\n if ( !getSetAttribute ) {\n\n\n\n // Use this for any attribute in IE6/7\n\n // This fixes almost every IE6/7 issue\n\n nodeHook = {\n\n set: function( elem, value, name ) {\n\n // Set the existing or create a new attribute node\n\n var ret = elem.getAttributeNode( name );\n\n if ( !ret ) {\n\n elem.setAttributeNode(\n\n (ret = elem.ownerDocument.createAttribute( name ))\n\n );\n\n }\n\n\n\n ret.value = value += \"\";\n\n\n\n // Break association with cloned elements by also using setAttribute (#9646)\n\n if ( name === \"value\" || value === elem.getAttribute( name ) ) {\n\n return value;\n\n }\n\n }\n\n };\n\n\n\n // Some attributes are constructed with empty-string values when not defined\n\n attrHandle.id = attrHandle.name = attrHandle.coords =\n\n function( elem, name, isXML ) {\n\n var ret;\n\n if ( !isXML ) {\n\n return (ret = elem.getAttributeNode( name )) && ret.value !== \"\" ?\n\n ret.value :\n\n null;\n\n }\n\n };\n\n\n\n // Fixing value retrieval on a button requires this module\n\n jQuery.valHooks.button = {\n\n get: function( elem, name ) {\n\n var ret = elem.getAttributeNode( name );\n\n if ( ret && ret.specified ) {\n\n return ret.value;\n\n }\n\n },\n\n set: nodeHook.set\n\n };\n\n\n\n // Set contenteditable to false on removals(#10429)\n\n // Setting to empty string throws an error as an invalid value\n\n jQuery.attrHooks.contenteditable = {\n\n set: function( elem, value, name ) {\n\n nodeHook.set( elem, value === \"\" ? false : value, name );\n\n }\n\n };\n\n\n\n // Set width and height to auto instead of 0 on empty string( Bug #8150 )\n\n // This is for removals\n\n jQuery.each([ \"width\", \"height\" ], function( i, name ) {\n\n jQuery.attrHooks[ name ] = {\n\n set: function( elem, value ) {\n\n if ( value === \"\" ) {\n\n elem.setAttribute( name, \"auto\" );\n\n return value;\n\n }\n\n }\n\n };\n\n });\n\n }\n\n\n\n if ( !support.style ) {\n\n jQuery.attrHooks.style = {\n\n get: function( elem ) {\n\n // Return undefined in the case of empty string\n\n // Note: IE uppercases css property names, but if we were to .toLowerCase()\n\n // .cssText, that would destroy case senstitivity in URL's, like in \"background\"\n\n return elem.style.cssText || undefined;\n\n },\n\n set: function( elem, value ) {\n\n return ( elem.style.cssText = value + \"\" );\n\n }\n\n };\n\n }\n\n\n\n\n\n\n\n\n\n var rfocusable = /^(?:input|select|textarea|button|object)$/i,\n\n rclickable = /^(?:a|area)$/i;\n\n\n\n jQuery.fn.extend({\n\n prop: function( name, value ) {\n\n return access( this, jQuery.prop, name, value, arguments.length > 1 );\n\n },\n\n\n\n removeProp: function( name ) {\n\n name = jQuery.propFix[ name ] || name;\n\n return this.each(function() {\n\n // try/catch handles cases where IE balks (such as removing a property on window)\n\n try {\n\n this[ name ] = undefined;\n\n delete this[ name ];\n\n } catch( e ) {}\n\n });\n\n }\n\n });\n\n\n\n jQuery.extend({\n\n propFix: {\n\n \"for\": \"htmlFor\",\n\n \"class\": \"className\"\n\n },\n\n\n\n prop: function( elem, name, value ) {\n\n var ret, hooks, notxml,\n\n nType = elem.nodeType;\n\n\n\n // don't get/set properties on text, comment and attribute nodes\n\n if ( !elem || nType === 3 || nType === 8 || nType === 2 ) {\n\n return;\n\n }\n\n\n\n notxml = nType !== 1 || !jQuery.isXMLDoc( elem );\n\n\n\n if ( notxml ) {\n\n // Fix name and attach hooks\n\n name = jQuery.propFix[ name ] || name;\n\n hooks = jQuery.propHooks[ name ];\n\n }\n\n\n\n if ( value !== undefined ) {\n\n return hooks && \"set\" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ?\n\n ret :\n\n ( elem[ name ] = value );\n\n\n\n } else {\n\n return hooks && \"get\" in hooks && (ret = hooks.get( elem, name )) !== null ?\n\n ret :\n\n elem[ name ];\n\n }\n\n },\n\n\n\n propHooks: {\n\n tabIndex: {\n\n get: function( elem ) {\n\n // elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set\n\n // http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/\n\n // Use proper attribute retrieval(#12072)\n\n var tabindex = jQuery.find.attr( elem, \"tabindex\" );\n\n\n\n return tabindex ?\n\n parseInt( tabindex, 10 ) :\n\n rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ?\n\n 0 :\n\n -1;\n\n }\n\n }\n\n }\n\n });\n\n\n\n// Some attributes require a special call on IE\n\n// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx\n\n if ( !support.hrefNormalized ) {\n\n // href/src property should get the full normalized URL (#10299/#12915)\n\n jQuery.each([ \"href\", \"src\" ], function( i, name ) {\n\n jQuery.propHooks[ name ] = {\n\n get: function( elem ) {\n\n return elem.getAttribute( name, 4 );\n\n }\n\n };\n\n });\n\n }\n\n\n\n// Support: Safari, IE9+\n\n// mis-reports the default selected property of an option\n\n// Accessing the parent's selectedIndex property fixes it\n\n if ( !support.optSelected ) {\n\n jQuery.propHooks.selected = {\n\n get: function( elem ) {\n\n var parent = elem.parentNode;\n\n\n\n if ( parent ) {\n\n parent.selectedIndex;\n\n\n\n // Make sure that it also works with optgroups, see #5701\n\n if ( parent.parentNode ) {\n\n parent.parentNode.selectedIndex;\n\n }\n\n }\n\n return null;\n\n }\n\n };\n\n }\n\n\n\n jQuery.each([\n\n \"tabIndex\",\n\n \"readOnly\",\n\n \"maxLength\",\n\n \"cellSpacing\",\n\n \"cellPadding\",\n\n \"rowSpan\",\n\n \"colSpan\",\n\n \"useMap\",\n\n \"frameBorder\",\n\n \"contentEditable\"\n\n ], function() {\n\n jQuery.propFix[ this.toLowerCase() ] = this;\n\n });\n\n\n\n// IE6/7 call enctype encoding\n\n if ( !support.enctype ) {\n\n jQuery.propFix.enctype = \"encoding\";\n\n }\n\n\n\n\n\n\n\n\n\n var rclass = /[\\t\\r\\n\\f]/g;\n\n\n\n jQuery.fn.extend({\n\n addClass: function( value ) {\n\n var classes, elem, cur, clazz, j, finalValue,\n\n i = 0,\n\n len = this.length,\n\n proceed = typeof value === \"string\" && value;\n\n\n\n if ( jQuery.isFunction( value ) ) {\n\n return this.each(function( j ) {\n\n jQuery( this ).addClass( value.call( this, j, this.className ) );\n\n });\n\n }\n\n\n\n if ( proceed ) {\n\n // The disjunction here is for better compressibility (see removeClass)\n\n classes = ( value || \"\" ).match( rnotwhite ) || [];\n\n\n\n for ( ; i < len; i++ ) {\n\n elem = this[ i ];\n\n cur = elem.nodeType === 1 && ( elem.className ?\n\n ( \" \" + elem.className + \" \" ).replace( rclass, \" \" ) :\n\n \" \"\n\n );\n\n\n\n if ( cur ) {\n\n j = 0;\n\n while ( (clazz = classes[j++]) ) {\n\n if ( cur.indexOf( \" \" + clazz + \" \" ) < 0 ) {\n\n cur += clazz + \" \";\n\n }\n\n }\n\n\n\n // only assign if different to avoid unneeded rendering.\n\n finalValue = jQuery.trim( cur );\n\n if ( elem.className !== finalValue ) {\n\n elem.className = finalValue;\n\n }\n\n }\n\n }\n\n }\n\n\n\n return this;\n\n },\n\n\n\n removeClass: function( value ) {\n\n var classes, elem, cur, clazz, j, finalValue,\n\n i = 0,\n\n len = this.length,\n\n proceed = arguments.length === 0 || typeof value === \"string\" && value;\n\n\n\n if ( jQuery.isFunction( value ) ) {\n\n return this.each(function( j ) {\n\n jQuery( this ).removeClass( value.call( this, j, this.className ) );\n\n });\n\n }\n\n if ( proceed ) {\n\n classes = ( value || \"\" ).match( rnotwhite ) || [];\n\n\n\n for ( ; i < len; i++ ) {\n\n elem = this[ i ];\n\n // This expression is here for better compressibility (see addClass)\n\n cur = elem.nodeType === 1 && ( elem.className ?\n\n ( \" \" + elem.className + \" \" ).replace( rclass, \" \" ) :\n\n \"\"\n\n );\n\n\n\n if ( cur ) {\n\n j = 0;\n\n while ( (clazz = classes[j++]) ) {\n\n // Remove *all* instances\n\n while ( cur.indexOf( \" \" + clazz + \" \" ) >= 0 ) {\n\n cur = cur.replace( \" \" + clazz + \" \", \" \" );\n\n }\n\n }\n\n\n\n // only assign if different to avoid unneeded rendering.\n\n finalValue = value ? jQuery.trim( cur ) : \"\";\n\n if ( elem.className !== finalValue ) {\n\n elem.className = finalValue;\n\n }\n\n }\n\n }\n\n }\n\n\n\n return this;\n\n },\n\n\n\n toggleClass: function( value, stateVal ) {\n\n var type = typeof value;\n\n\n\n if ( typeof stateVal === \"boolean\" && type === \"string\" ) {\n\n return stateVal ? this.addClass( value ) : this.removeClass( value );\n\n }\n\n\n\n if ( jQuery.isFunction( value ) ) {\n\n return this.each(function( i ) {\n\n jQuery( this ).toggleClass( value.call(this, i, this.className, stateVal), stateVal );\n\n });\n\n }\n\n\n\n return this.each(function() {\n\n if ( type === \"string\" ) {\n\n // toggle individual class names\n\n var className,\n\n i = 0,\n\n self = jQuery( this ),\n\n classNames = value.match( rnotwhite ) || [];\n\n\n\n while ( (className = classNames[ i++ ]) ) {\n\n // check each className given, space separated list\n\n if ( self.hasClass( className ) ) {\n\n self.removeClass( className );\n\n } else {\n\n self.addClass( className );\n\n }\n\n }\n\n\n\n // Toggle whole class name\n\n } else if ( type === strundefined || type === \"boolean\" ) {\n\n if ( this.className ) {\n\n // store className if set\n\n jQuery._data( this, \"__className__\", this.className );\n\n }\n\n\n\n // If the element has a class name or if we're passed \"false\",\n\n // then remove the whole classname (if there was one, the above saved it).\n\n // Otherwise bring back whatever was previously saved (if anything),\n\n // falling back to the empty string if nothing was stored.\n\n this.className = this.className || value === false ? \"\" : jQuery._data( this, \"__className__\" ) || \"\";\n\n }\n\n });\n\n },\n\n\n\n hasClass: function( selector ) {\n\n var className = \" \" + selector + \" \",\n\n i = 0,\n\n l = this.length;\n\n for ( ; i < l; i++ ) {\n\n if ( this[i].nodeType === 1 && (\" \" + this[i].className + \" \").replace(rclass, \" \").indexOf( className ) >= 0 ) {\n\n return true;\n\n }\n\n }\n\n\n\n return false;\n\n }\n\n });\n\n\n\n\n\n\n\n\n\n// Return jQuery for attributes-only inclusion\n\n\n\n\n\n jQuery.each( (\"blur focus focusin focusout load resize scroll unload click dblclick \" +\n\n \"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave \" +\n\n \"change select submit keydown keypress keyup error contextmenu\").split(\" \"), function( i, name ) {\n\n\n\n // Handle event binding\n\n jQuery.fn[ name ] = function( data, fn ) {\n\n return arguments.length > 0 ?\n\n this.on( name, null, data, fn ) :\n\n this.trigger( name );\n\n };\n\n });\n\n\n\n jQuery.fn.extend({\n\n hover: function( fnOver, fnOut ) {\n\n return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver );\n\n },\n\n\n\n bind: function( types, data, fn ) {\n\n return this.on( types, null, data, fn );\n\n },\n\n unbind: function( types, fn ) {\n\n return this.off( types, null, fn );\n\n },\n\n\n\n delegate: function( selector, types, data, fn ) {\n\n return this.on( types, selector, data, fn );\n\n },\n\n undelegate: function( selector, types, fn ) {\n\n // ( namespace ) or ( selector, types [, fn] )\n\n return arguments.length === 1 ? this.off( selector, \"**\" ) : this.off( types, selector || \"**\", fn );\n\n }\n\n });\n\n\n\n\n\n var nonce = jQuery.now();\n\n\n\n var rquery = (/\\?/);\n\n\n\n\n\n\n\n var rvalidtokens = /(,)|(\\[|{)|(}|])|\"(?:[^\"\\\\\\r\\n]|\\\\[\"\\\\\\/bfnrt]|\\\\u[\\da-fA-F]{4})*\"\\s*:?|true|false|null|-?(?!0\\d)\\d+(?:\\.\\d+|)(?:[eE][+-]?\\d+|)/g;\n\n\n\n jQuery.parseJSON = function( data ) {\n\n // Attempt to parse using the native JSON parser first\n\n if ( window.JSON && window.JSON.parse ) {\n\n // Support: Android 2.3\n\n // Workaround failure to string-cast null input\n\n return window.JSON.parse( data + \"\" );\n\n }\n\n\n\n var requireNonComma,\n\n depth = null,\n\n str = jQuery.trim( data + \"\" );\n\n\n\n // Guard against invalid (and possibly dangerous) input by ensuring that nothing remains\n\n // after removing valid tokens\n\n return str && !jQuery.trim( str.replace( rvalidtokens, function( token, comma, open, close ) {\n\n\n\n // Force termination if we see a misplaced comma\n\n if ( requireNonComma && comma ) {\n\n depth = 0;\n\n }\n\n\n\n // Perform no more replacements after returning to outermost depth\n\n if ( depth === 0 ) {\n\n return token;\n\n }\n\n\n\n // Commas must not follow \"[\", \"{\", or \",\"\n\n requireNonComma = open || comma;\n\n\n\n // Determine new depth\n\n // array/object open (\"[\" or \"{\"): depth += true - false (increment)\n\n // array/object close (\"]\" or \"}\"): depth += false - true (decrement)\n\n // other cases (\",\" or primitive): depth += true - true (numeric cast)\n\n depth += !close - !open;\n\n\n\n // Remove this token\n\n return \"\";\n\n }) ) ?\n\n ( Function( \"return \" + str ) )() :\n\n jQuery.error( \"Invalid JSON: \" + data );\n\n };\n\n\n\n\n\n// Cross-browser xml parsing\n\n jQuery.parseXML = function( data ) {\n\n var xml, tmp;\n\n if ( !data || typeof data !== \"string\" ) {\n\n return null;\n\n }\n\n try {\n\n if ( window.DOMParser ) { // Standard\n\n tmp = new DOMParser();\n\n xml = tmp.parseFromString( data, \"text/xml\" );\n\n } else { // IE\n\n xml = new ActiveXObject( \"Microsoft.XMLDOM\" );\n\n xml.async = \"false\";\n\n xml.loadXML( data );\n\n }\n\n } catch( e ) {\n\n xml = undefined;\n\n }\n\n if ( !xml || !xml.documentElement || xml.getElementsByTagName( \"parsererror\" ).length ) {\n\n jQuery.error( \"Invalid XML: \" + data );\n\n }\n\n return xml;\n\n };\n\n\n\n\n\n var\n\n // Document location\n\n ajaxLocParts,\n\n ajaxLocation,\n\n\n\n rhash = /#.*$/,\n\n rts = /([?&])_=[^&]*/,\n\n rheaders = /^(.*?):[ \\t]*([^\\r\\n]*)\\r?$/mg, // IE leaves an \\r character at EOL\n\n // #7653, #8125, #8152: local protocol detection\n\n rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/,\n\n rnoContent = /^(?:GET|HEAD)$/,\n\n rprotocol = /^\\/\\//,\n\n rurl = /^([\\w.+-]+:)(?:\\/\\/(?:[^\\/?#]*@|)([^\\/?#:]*)(?::(\\d+)|)|)/,\n\n\n\n /* Prefilters\n\n * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example)\n\n * 2) These are called:\n\n * - BEFORE asking for a transport\n\n * - AFTER param serialization (s.data is a string if s.processData is true)\n\n * 3) key is the dataType\n\n * 4) the catchall symbol \"*\" can be used\n\n * 5) execution will start with transport dataType and THEN continue down to \"*\" if needed\n\n */\n\n prefilters = {},\n\n\n\n /* Transports bindings\n\n * 1) key is the dataType\n\n * 2) the catchall symbol \"*\" can be used\n\n * 3) selection will start with transport dataType and THEN go to \"*\" if needed\n\n */\n\n transports = {},\n\n\n\n // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression\n\n allTypes = \"*/\".concat(\"*\");\n\n\n\n// #8138, IE may throw an exception when accessing\n\n// a field from window.location if document.domain has been set\n\n try {\n\n ajaxLocation = location.href;\n\n } catch( e ) {\n\n // Use the href attribute of an A element\n\n // since IE will modify it given document.location\n\n ajaxLocation = document.createElement( \"a\" );\n\n ajaxLocation.href = \"\";\n\n ajaxLocation = ajaxLocation.href;\n\n }\n\n\n\n// Segment location into parts\n\n ajaxLocParts = rurl.exec( ajaxLocation.toLowerCase() ) || [];\n\n\n\n// Base \"constructor\" for jQuery.ajaxPrefilter and jQuery.ajaxTransport\n\n function addToPrefiltersOrTransports( structure ) {\n\n\n\n // dataTypeExpression is optional and defaults to \"*\"\n\n return function( dataTypeExpression, func ) {\n\n\n\n if ( typeof dataTypeExpression !== \"string\" ) {\n\n func = dataTypeExpression;\n\n dataTypeExpression = \"*\";\n\n }\n\n\n\n var dataType,\n\n i = 0,\n\n dataTypes = dataTypeExpression.toLowerCase().match( rnotwhite ) || [];\n\n\n\n if ( jQuery.isFunction( func ) ) {\n\n // For each dataType in the dataTypeExpression\n\n while ( (dataType = dataTypes[i++]) ) {\n\n // Prepend if requested\n\n if ( dataType.charAt( 0 ) === \"+\" ) {\n\n dataType = dataType.slice( 1 ) || \"*\";\n\n (structure[ dataType ] = structure[ dataType ] || []).unshift( func );\n\n\n\n // Otherwise append\n\n } else {\n\n (structure[ dataType ] = structure[ dataType ] || []).push( func );\n\n }\n\n }\n\n }\n\n };\n\n }\n\n\n\n// Base inspection function for prefilters and transports\n\n function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) {\n\n\n\n var inspected = {},\n\n seekingTransport = ( structure === transports );\n\n\n\n function inspect( dataType ) {\n\n var selected;\n\n inspected[ dataType ] = true;\n\n jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) {\n\n var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR );\n\n if ( typeof dataTypeOrTransport === \"string\" && !seekingTransport && !inspected[ dataTypeOrTransport ] ) {\n\n options.dataTypes.unshift( dataTypeOrTransport );\n\n inspect( dataTypeOrTransport );\n\n return false;\n\n } else if ( seekingTransport ) {\n\n return !( selected = dataTypeOrTransport );\n\n }\n\n });\n\n return selected;\n\n }\n\n\n\n return inspect( options.dataTypes[ 0 ] ) || !inspected[ \"*\" ] && inspect( \"*\" );\n\n }\n\n\n\n// A special extend for ajax options\n\n// that takes \"flat\" options (not to be deep extended)\n\n// Fixes #9887\n\n function ajaxExtend( target, src ) {\n\n var deep, key,\n\n flatOptions = jQuery.ajaxSettings.flatOptions || {};\n\n\n\n for ( key in src ) {\n\n if ( src[ key ] !== undefined ) {\n\n ( flatOptions[ key ] ? target : ( deep || (deep = {}) ) )[ key ] = src[ key ];\n\n }\n\n }\n\n if ( deep ) {\n\n jQuery.extend( true, target, deep );\n\n }\n\n\n\n return target;\n\n }\n\n\n\n /* Handles responses to an ajax request:\n\n * - finds the right dataType (mediates between content-type and expected dataType)\n\n * - returns the corresponding response\n\n */\n\n function ajaxHandleResponses( s, jqXHR, responses ) {\n\n var firstDataType, ct, finalDataType, type,\n\n contents = s.contents,\n\n dataTypes = s.dataTypes;\n\n\n\n // Remove auto dataType and get content-type in the process\n\n while ( dataTypes[ 0 ] === \"*\" ) {\n\n dataTypes.shift();\n\n if ( ct === undefined ) {\n\n ct = s.mimeType || jqXHR.getResponseHeader(\"Content-Type\");\n\n }\n\n }\n\n\n\n // Check if we're dealing with a known content-type\n\n if ( ct ) {\n\n for ( type in contents ) {\n\n if ( contents[ type ] && contents[ type ].test( ct ) ) {\n\n dataTypes.unshift( type );\n\n break;\n\n }\n\n }\n\n }\n\n\n\n // Check to see if we have a response for the expected dataType\n\n if ( dataTypes[ 0 ] in responses ) {\n\n finalDataType = dataTypes[ 0 ];\n\n } else {\n\n // Try convertible dataTypes\n\n for ( type in responses ) {\n\n if ( !dataTypes[ 0 ] || s.converters[ type + \" \" + dataTypes[0] ] ) {\n\n finalDataType = type;\n\n break;\n\n }\n\n if ( !firstDataType ) {\n\n firstDataType = type;\n\n }\n\n }\n\n // Or just use first one\n\n finalDataType = finalDataType || firstDataType;\n\n }\n\n\n\n // If we found a dataType\n\n // We add the dataType to the list if needed\n\n // and return the corresponding response\n\n if ( finalDataType ) {\n\n if ( finalDataType !== dataTypes[ 0 ] ) {\n\n dataTypes.unshift( finalDataType );\n\n }\n\n return responses[ finalDataType ];\n\n }\n\n }\n\n\n\n /* Chain conversions given the request and the original response\n\n * Also sets the responseXXX fields on the jqXHR instance\n\n */\n\n function ajaxConvert( s, response, jqXHR, isSuccess ) {\n\n var conv2, current, conv, tmp, prev,\n\n converters = {},\n\n // Work with a copy of dataTypes in case we need to modify it for conversion\n\n dataTypes = s.dataTypes.slice();\n\n\n\n // Create converters map with lowercased keys\n\n if ( dataTypes[ 1 ] ) {\n\n for ( conv in s.converters ) {\n\n converters[ conv.toLowerCase() ] = s.converters[ conv ];\n\n }\n\n }\n\n\n\n current = dataTypes.shift();\n\n\n\n // Convert to each sequential dataType\n\n while ( current ) {\n\n\n\n if ( s.responseFields[ current ] ) {\n\n jqXHR[ s.responseFields[ current ] ] = response;\n\n }\n\n\n\n // Apply the dataFilter if provided\n\n if ( !prev && isSuccess && s.dataFilter ) {\n\n response = s.dataFilter( response, s.dataType );\n\n }\n\n\n\n prev = current;\n\n current = dataTypes.shift();\n\n\n\n if ( current ) {\n\n\n\n // There's only work to do if current dataType is non-auto\n\n if ( current === \"*\" ) {\n\n\n\n current = prev;\n\n\n\n // Convert response if prev dataType is non-auto and differs from current\n\n } else if ( prev !== \"*\" && prev !== current ) {\n\n\n\n // Seek a direct converter\n\n conv = converters[ prev + \" \" + current ] || converters[ \"* \" + current ];\n\n\n\n // If none found, seek a pair\n\n if ( !conv ) {\n\n for ( conv2 in converters ) {\n\n\n\n // If conv2 outputs current\n\n tmp = conv2.split( \" \" );\n\n if ( tmp[ 1 ] === current ) {\n\n\n\n // If prev can be converted to accepted input\n\n conv = converters[ prev + \" \" + tmp[ 0 ] ] ||\n\n converters[ \"* \" + tmp[ 0 ] ];\n\n if ( conv ) {\n\n // Condense equivalence converters\n\n if ( conv === true ) {\n\n conv = converters[ conv2 ];\n\n\n\n // Otherwise, insert the intermediate dataType\n\n } else if ( converters[ conv2 ] !== true ) {\n\n current = tmp[ 0 ];\n\n dataTypes.unshift( tmp[ 1 ] );\n\n }\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Apply converter (if not an equivalence)\n\n if ( conv !== true ) {\n\n\n\n // Unless errors are allowed to bubble, catch and return them\n\n if ( conv && s[ \"throws\" ] ) {\n\n response = conv( response );\n\n } else {\n\n try {\n\n response = conv( response );\n\n } catch ( e ) {\n\n return { state: \"parsererror\", error: conv ? e : \"No conversion from \" + prev + \" to \" + current };\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n return { state: \"success\", data: response };\n\n }\n\n\n\n jQuery.extend({\n\n\n\n // Counter for holding the number of active queries\n\n active: 0,\n\n\n\n // Last-Modified header cache for next request\n\n lastModified: {},\n\n etag: {},\n\n\n\n ajaxSettings: {\n\n url: ajaxLocation,\n\n type: \"GET\",\n\n isLocal: rlocalProtocol.test( ajaxLocParts[ 1 ] ),\n\n global: true,\n\n processData: true,\n\n async: true,\n\n contentType: \"application/x-www-form-urlencoded; charset=UTF-8\",\n\n /*\n\n timeout: 0,\n\n data: null,\n\n dataType: null,\n\n username: null,\n\n password: null,\n\n cache: null,\n\n throws: false,\n\n traditional: false,\n\n headers: {},\n\n */\n\n\n\n accepts: {\n\n \"*\": allTypes,\n\n text: \"text/plain\",\n\n html: \"text/html\",\n\n xml: \"application/xml, text/xml\",\n\n json: \"application/json, text/javascript\"\n\n },\n\n\n\n contents: {\n\n xml: /xml/,\n\n html: /html/,\n\n json: /json/\n\n },\n\n\n\n responseFields: {\n\n xml: \"responseXML\",\n\n text: \"responseText\",\n\n json: \"responseJSON\"\n\n },\n\n\n\n // Data converters\n\n // Keys separate source (or catchall \"*\") and destination types with a single space\n\n converters: {\n\n\n\n // Convert anything to text\n\n \"* text\": String,\n\n\n\n // Text to html (true = no transformation)\n\n \"text html\": true,\n\n\n\n // Evaluate text as a json expression\n\n \"text json\": jQuery.parseJSON,\n\n\n\n // Parse text as xml\n\n \"text xml\": jQuery.parseXML\n\n },\n\n\n\n // For options that shouldn't be deep extended:\n\n // you can add your own custom options here if\n\n // and when you create one that shouldn't be\n\n // deep extended (see ajaxExtend)\n\n flatOptions: {\n\n url: true,\n\n context: true\n\n }\n\n },\n\n\n\n // Creates a full fledged settings object into target\n\n // with both ajaxSettings and settings fields.\n\n // If target is omitted, writes into ajaxSettings.\n\n ajaxSetup: function( target, settings ) {\n\n return settings ?\n\n\n\n // Building a settings object\n\n ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) :\n\n\n\n // Extending ajaxSettings\n\n ajaxExtend( jQuery.ajaxSettings, target );\n\n },\n\n\n\n ajaxPrefilter: addToPrefiltersOrTransports( prefilters ),\n\n ajaxTransport: addToPrefiltersOrTransports( transports ),\n\n\n\n // Main method\n\n ajax: function( url, options ) {\n\n\n\n // If url is an object, simulate pre-1.5 signature\n\n if ( typeof url === \"object\" ) {\n\n options = url;\n\n url = undefined;\n\n }\n\n\n\n // Force options to be an object\n\n options = options || {};\n\n\n\n var // Cross-domain detection vars\n\n parts,\n\n // Loop variable\n\n i,\n\n // URL without anti-cache param\n\n cacheURL,\n\n // Response headers as string\n\n responseHeadersString,\n\n // timeout handle\n\n timeoutTimer,\n\n\n\n // To know if global events are to be dispatched\n\n fireGlobals,\n\n\n\n transport,\n\n // Response headers\n\n responseHeaders,\n\n // Create the final options object\n\n s = jQuery.ajaxSetup( {}, options ),\n\n // Callbacks context\n\n callbackContext = s.context || s,\n\n // Context for global events is callbackContext if it is a DOM node or jQuery collection\n\n globalEventContext = s.context && ( callbackContext.nodeType || callbackContext.jquery ) ?\n\n jQuery( callbackContext ) :\n\n jQuery.event,\n\n // Deferreds\n\n deferred = jQuery.Deferred(),\n\n completeDeferred = jQuery.Callbacks(\"once memory\"),\n\n // Status-dependent callbacks\n\n statusCode = s.statusCode || {},\n\n // Headers (they are sent all at once)\n\n requestHeaders = {},\n\n requestHeadersNames = {},\n\n // The jqXHR state\n\n state = 0,\n\n // Default abort message\n\n strAbort = \"canceled\",\n\n // Fake xhr\n\n jqXHR = {\n\n readyState: 0,\n\n\n\n // Builds headers hashtable if needed\n\n getResponseHeader: function( key ) {\n\n var match;\n\n if ( state === 2 ) {\n\n if ( !responseHeaders ) {\n\n responseHeaders = {};\n\n while ( (match = rheaders.exec( responseHeadersString )) ) {\n\n responseHeaders[ match[1].toLowerCase() ] = match[ 2 ];\n\n }\n\n }\n\n match = responseHeaders[ key.toLowerCase() ];\n\n }\n\n return match == null ? null : match;\n\n },\n\n\n\n // Raw string\n\n getAllResponseHeaders: function() {\n\n return state === 2 ? responseHeadersString : null;\n\n },\n\n\n\n // Caches the header\n\n setRequestHeader: function( name, value ) {\n\n var lname = name.toLowerCase();\n\n if ( !state ) {\n\n name = requestHeadersNames[ lname ] = requestHeadersNames[ lname ] || name;\n\n requestHeaders[ name ] = value;\n\n }\n\n return this;\n\n },\n\n\n\n // Overrides response content-type header\n\n overrideMimeType: function( type ) {\n\n if ( !state ) {\n\n s.mimeType = type;\n\n }\n\n return this;\n\n },\n\n\n\n // Status-dependent callbacks\n\n statusCode: function( map ) {\n\n var code;\n\n if ( map ) {\n\n if ( state < 2 ) {\n\n for ( code in map ) {\n\n // Lazy-add the new callback in a way that preserves old ones\n\n statusCode[ code ] = [ statusCode[ code ], map[ code ] ];\n\n }\n\n } else {\n\n // Execute the appropriate callbacks\n\n jqXHR.always( map[ jqXHR.status ] );\n\n }\n\n }\n\n return this;\n\n },\n\n\n\n // Cancel the request\n\n abort: function( statusText ) {\n\n var finalText = statusText || strAbort;\n\n if ( transport ) {\n\n transport.abort( finalText );\n\n }\n\n done( 0, finalText );\n\n return this;\n\n }\n\n };\n\n\n\n // Attach deferreds\n\n deferred.promise( jqXHR ).complete = completeDeferred.add;\n\n jqXHR.success = jqXHR.done;\n\n jqXHR.error = jqXHR.fail;\n\n\n\n // Remove hash character (#7531: and string promotion)\n\n // Add protocol if not provided (#5866: IE7 issue with protocol-less urls)\n\n // Handle falsy url in the settings object (#10093: consistency with old signature)\n\n // We also use the url parameter if available\n\n s.url = ( ( url || s.url || ajaxLocation ) + \"\" ).replace( rhash, \"\" ).replace( rprotocol, ajaxLocParts[ 1 ] + \"//\" );\n\n\n\n // Alias method option to type as per ticket #12004\n\n s.type = options.method || options.type || s.method || s.type;\n\n\n\n // Extract dataTypes list\n\n s.dataTypes = jQuery.trim( s.dataType || \"*\" ).toLowerCase().match( rnotwhite ) || [ \"\" ];\n\n\n\n // A cross-domain request is in order when we have a protocol:host:port mismatch\n\n if ( s.crossDomain == null ) {\n\n parts = rurl.exec( s.url.toLowerCase() );\n\n s.crossDomain = !!( parts &&\n\n ( parts[ 1 ] !== ajaxLocParts[ 1 ] || parts[ 2 ] !== ajaxLocParts[ 2 ] ||\n\n ( parts[ 3 ] || ( parts[ 1 ] === \"http:\" ? \"80\" : \"443\" ) ) !==\n\n ( ajaxLocParts[ 3 ] || ( ajaxLocParts[ 1 ] === \"http:\" ? \"80\" : \"443\" ) ) )\n\n );\n\n }\n\n\n\n // Convert data if not already a string\n\n if ( s.data && s.processData && typeof s.data !== \"string\" ) {\n\n s.data = jQuery.param( s.data, s.traditional );\n\n }\n\n\n\n // Apply prefilters\n\n inspectPrefiltersOrTransports( prefilters, s, options, jqXHR );\n\n\n\n // If request was aborted inside a prefilter, stop there\n\n if ( state === 2 ) {\n\n return jqXHR;\n\n }\n\n\n\n // We can fire global events as of now if asked to\n\n // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118)\n\n fireGlobals = jQuery.event && s.global;\n\n\n\n // Watch for a new set of requests\n\n if ( fireGlobals && jQuery.active++ === 0 ) {\n\n jQuery.event.trigger(\"ajaxStart\");\n\n }\n\n\n\n // Uppercase the type\n\n s.type = s.type.toUpperCase();\n\n\n\n // Determine if request has content\n\n s.hasContent = !rnoContent.test( s.type );\n\n\n\n // Save the URL in case we're toying with the If-Modified-Since\n\n // and/or If-None-Match header later on\n\n cacheURL = s.url;\n\n\n\n // More options handling for requests with no content\n\n if ( !s.hasContent ) {\n\n\n\n // If data is available, append data to url\n\n if ( s.data ) {\n\n cacheURL = ( s.url += ( rquery.test( cacheURL ) ? \"&\" : \"?\" ) + s.data );\n\n // #9682: remove data so that it's not used in an eventual retry\n\n delete s.data;\n\n }\n\n\n\n // Add anti-cache in url if needed\n\n if ( s.cache === false ) {\n\n s.url = rts.test( cacheURL ) ?\n\n\n\n // If there is already a '_' parameter, set its value\n\n cacheURL.replace( rts, \"$1_=\" + nonce++ ) :\n\n\n\n // Otherwise add one to the end\n\n cacheURL + ( rquery.test( cacheURL ) ? \"&\" : \"?\" ) + \"_=\" + nonce++;\n\n }\n\n }\n\n\n\n // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.\n\n if ( s.ifModified ) {\n\n if ( jQuery.lastModified[ cacheURL ] ) {\n\n jqXHR.setRequestHeader( \"If-Modified-Since\", jQuery.lastModified[ cacheURL ] );\n\n }\n\n if ( jQuery.etag[ cacheURL ] ) {\n\n jqXHR.setRequestHeader( \"If-None-Match\", jQuery.etag[ cacheURL ] );\n\n }\n\n }\n\n\n\n // Set the correct header, if data is being sent\n\n if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) {\n\n jqXHR.setRequestHeader( \"Content-Type\", s.contentType );\n\n }\n\n\n\n // Set the Accepts header for the server, depending on the dataType\n\n jqXHR.setRequestHeader(\n\n \"Accept\",\n\n s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[0] ] ?\n\n s.accepts[ s.dataTypes[0] ] + ( s.dataTypes[ 0 ] !== \"*\" ? \", \" + allTypes + \"; q=0.01\" : \"\" ) :\n\n s.accepts[ \"*\" ]\n\n );\n\n\n\n // Check for headers option\n\n for ( i in s.headers ) {\n\n jqXHR.setRequestHeader( i, s.headers[ i ] );\n\n }\n\n\n\n // Allow custom headers/mimetypes and early abort\n\n if ( s.beforeSend && ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || state === 2 ) ) {\n\n // Abort if not done already and return\n\n return jqXHR.abort();\n\n }\n\n\n\n // aborting is no longer a cancellation\n\n strAbort = \"abort\";\n\n\n\n // Install callbacks on deferreds\n\n for ( i in { success: 1, error: 1, complete: 1 } ) {\n\n jqXHR[ i ]( s[ i ] );\n\n }\n\n\n\n // Get transport\n\n transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR );\n\n\n\n // If no transport, we auto-abort\n\n if ( !transport ) {\n\n done( -1, \"No Transport\" );\n\n } else {\n\n jqXHR.readyState = 1;\n\n\n\n // Send global event\n\n if ( fireGlobals ) {\n\n globalEventContext.trigger( \"ajaxSend\", [ jqXHR, s ] );\n\n }\n\n // Timeout\n\n if ( s.async && s.timeout > 0 ) {\n\n timeoutTimer = setTimeout(function() {\n\n jqXHR.abort(\"timeout\");\n\n }, s.timeout );\n\n }\n\n\n\n try {\n\n state = 1;\n\n transport.send( requestHeaders, done );\n\n } catch ( e ) {\n\n // Propagate exception as error if not done\n\n if ( state < 2 ) {\n\n done( -1, e );\n\n // Simply rethrow otherwise\n\n } else {\n\n throw e;\n\n }\n\n }\n\n }\n\n\n\n // Callback for when everything is done\n\n function done( status, nativeStatusText, responses, headers ) {\n\n var isSuccess, success, error, response, modified,\n\n statusText = nativeStatusText;\n\n\n\n // Called once\n\n if ( state === 2 ) {\n\n return;\n\n }\n\n\n\n // State is \"done\" now\n\n state = 2;\n\n\n\n // Clear timeout if it exists\n\n if ( timeoutTimer ) {\n\n clearTimeout( timeoutTimer );\n\n }\n\n\n\n // Dereference transport for early garbage collection\n\n // (no matter how long the jqXHR object will be used)\n\n transport = undefined;\n\n\n\n // Cache response headers\n\n responseHeadersString = headers || \"\";\n\n\n\n // Set readyState\n\n jqXHR.readyState = status > 0 ? 4 : 0;\n\n\n\n // Determine if successful\n\n isSuccess = status >= 200 && status < 300 || status === 304;\n\n\n\n // Get response data\n\n if ( responses ) {\n\n response = ajaxHandleResponses( s, jqXHR, responses );\n\n }\n\n\n\n // Convert no matter what (that way responseXXX fields are always set)\n\n response = ajaxConvert( s, response, jqXHR, isSuccess );\n\n\n\n // If successful, handle type chaining\n\n if ( isSuccess ) {\n\n\n\n // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.\n\n if ( s.ifModified ) {\n\n modified = jqXHR.getResponseHeader(\"Last-Modified\");\n\n if ( modified ) {\n\n jQuery.lastModified[ cacheURL ] = modified;\n\n }\n\n modified = jqXHR.getResponseHeader(\"etag\");\n\n if ( modified ) {\n\n jQuery.etag[ cacheURL ] = modified;\n\n }\n\n }\n\n\n\n // if no content\n\n if ( status === 204 || s.type === \"HEAD\" ) {\n\n statusText = \"nocontent\";\n\n\n\n // if not modified\n\n } else if ( status === 304 ) {\n\n statusText = \"notmodified\";\n\n\n\n // If we have data, let's convert it\n\n } else {\n\n statusText = response.state;\n\n success = response.data;\n\n error = response.error;\n\n isSuccess = !error;\n\n }\n\n } else {\n\n // We extract error from statusText\n\n // then normalize statusText and status for non-aborts\n\n error = statusText;\n\n if ( status || !statusText ) {\n\n statusText = \"error\";\n\n if ( status < 0 ) {\n\n status = 0;\n\n }\n\n }\n\n }\n\n\n\n // Set data for the fake xhr object\n\n jqXHR.status = status;\n\n jqXHR.statusText = ( nativeStatusText || statusText ) + \"\";\n\n\n\n // Success/Error\n\n if ( isSuccess ) {\n\n deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] );\n\n } else {\n\n deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] );\n\n }\n\n\n\n // Status-dependent callbacks\n\n jqXHR.statusCode( statusCode );\n\n statusCode = undefined;\n\n\n\n if ( fireGlobals ) {\n\n globalEventContext.trigger( isSuccess ? \"ajaxSuccess\" : \"ajaxError\",\n\n [ jqXHR, s, isSuccess ? success : error ] );\n\n }\n\n\n\n // Complete\n\n completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] );\n\n\n\n if ( fireGlobals ) {\n\n globalEventContext.trigger( \"ajaxComplete\", [ jqXHR, s ] );\n\n // Handle the global AJAX counter\n\n if ( !( --jQuery.active ) ) {\n\n jQuery.event.trigger(\"ajaxStop\");\n\n }\n\n }\n\n }\n\n\n\n return jqXHR;\n\n },\n\n\n\n getJSON: function( url, data, callback ) {\n\n return jQuery.get( url, data, callback, \"json\" );\n\n },\n\n\n\n getScript: function( url, callback ) {\n\n return jQuery.get( url, undefined, callback, \"script\" );\n\n }\n\n });\n\n\n\n jQuery.each( [ \"get\", \"post\" ], function( i, method ) {\n\n jQuery[ method ] = function( url, data, callback, type ) {\n\n // shift arguments if data argument was omitted\n\n if ( jQuery.isFunction( data ) ) {\n\n type = type || callback;\n\n callback = data;\n\n data = undefined;\n\n }\n\n\n\n return jQuery.ajax({\n\n url: url,\n\n type: method,\n\n dataType: type,\n\n data: data,\n\n success: callback\n\n });\n\n };\n\n });\n\n\n\n\n\n jQuery._evalUrl = function( url ) {\n\n return jQuery.ajax({\n\n url: url,\n\n type: \"GET\",\n\n dataType: \"script\",\n\n async: false,\n\n global: false,\n\n \"throws\": true\n\n });\n\n };\n\n\n\n\n\n jQuery.fn.extend({\n\n wrapAll: function( html ) {\n\n if ( jQuery.isFunction( html ) ) {\n\n return this.each(function(i) {\n\n jQuery(this).wrapAll( html.call(this, i) );\n\n });\n\n }\n\n\n\n if ( this[0] ) {\n\n // The elements to wrap the target around\n\n var wrap = jQuery( html, this[0].ownerDocument ).eq(0).clone(true);\n\n\n\n if ( this[0].parentNode ) {\n\n wrap.insertBefore( this[0] );\n\n }\n\n\n\n wrap.map(function() {\n\n var elem = this;\n\n\n\n while ( elem.firstChild && elem.firstChild.nodeType === 1 ) {\n\n elem = elem.firstChild;\n\n }\n\n\n\n return elem;\n\n }).append( this );\n\n }\n\n\n\n return this;\n\n },\n\n\n\n wrapInner: function( html ) {\n\n if ( jQuery.isFunction( html ) ) {\n\n return this.each(function(i) {\n\n jQuery(this).wrapInner( html.call(this, i) );\n\n });\n\n }\n\n\n\n return this.each(function() {\n\n var self = jQuery( this ),\n\n contents = self.contents();\n\n\n\n if ( contents.length ) {\n\n contents.wrapAll( html );\n\n\n\n } else {\n\n self.append( html );\n\n }\n\n });\n\n },\n\n\n\n wrap: function( html ) {\n\n var isFunction = jQuery.isFunction( html );\n\n\n\n return this.each(function(i) {\n\n jQuery( this ).wrapAll( isFunction ? html.call(this, i) : html );\n\n });\n\n },\n\n\n\n unwrap: function() {\n\n return this.parent().each(function() {\n\n if ( !jQuery.nodeName( this, \"body\" ) ) {\n\n jQuery( this ).replaceWith( this.childNodes );\n\n }\n\n }).end();\n\n }\n\n });\n\n\n\n\n\n jQuery.expr.filters.hidden = function( elem ) {\n\n // Support: Opera <= 12.12\n\n // Opera reports offsetWidths and offsetHeights less than zero on some elements\n\n return elem.offsetWidth <= 0 && elem.offsetHeight <= 0 ||\n\n (!support.reliableHiddenOffsets() &&\n\n ((elem.style && elem.style.display) || jQuery.css( elem, \"display\" )) === \"none\");\n\n };\n\n\n\n jQuery.expr.filters.visible = function( elem ) {\n\n return !jQuery.expr.filters.hidden( elem );\n\n };\n\n\n\n\n\n\n\n\n\n var r20 = /%20/g,\n\n rbracket = /\\[\\]$/,\n\n rCRLF = /\\r?\\n/g,\n\n rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i,\n\n rsubmittable = /^(?:input|select|textarea|keygen)/i;\n\n\n\n function buildParams( prefix, obj, traditional, add ) {\n\n var name;\n\n\n\n if ( jQuery.isArray( obj ) ) {\n\n // Serialize array item.\n\n jQuery.each( obj, function( i, v ) {\n\n if ( traditional || rbracket.test( prefix ) ) {\n\n // Treat each array item as a scalar.\n\n add( prefix, v );\n\n\n\n } else {\n\n // Item is non-scalar (array or object), encode its numeric index.\n\n buildParams( prefix + \"[\" + ( typeof v === \"object\" ? i : \"\" ) + \"]\", v, traditional, add );\n\n }\n\n });\n\n\n\n } else if ( !traditional && jQuery.type( obj ) === \"object\" ) {\n\n // Serialize object item.\n\n for ( name in obj ) {\n\n buildParams( prefix + \"[\" + name + \"]\", obj[ name ], traditional, add );\n\n }\n\n\n\n } else {\n\n // Serialize scalar item.\n\n add( prefix, obj );\n\n }\n\n }\n\n\n\n// Serialize an array of form elements or a set of\n\n// key/values into a query string\n\n jQuery.param = function( a, traditional ) {\n\n var prefix,\n\n s = [],\n\n add = function( key, value ) {\n\n // If value is a function, invoke it and return its value\n\n value = jQuery.isFunction( value ) ? value() : ( value == null ? \"\" : value );\n\n s[ s.length ] = encodeURIComponent( key ) + \"=\" + encodeURIComponent( value );\n\n };\n\n\n\n // Set traditional to true for jQuery <= 1.3.2 behavior.\n\n if ( traditional === undefined ) {\n\n traditional = jQuery.ajaxSettings && jQuery.ajaxSettings.traditional;\n\n }\n\n\n\n // If an array was passed in, assume that it is an array of form elements.\n\n if ( jQuery.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) {\n\n // Serialize the form elements\n\n jQuery.each( a, function() {\n\n add( this.name, this.value );\n\n });\n\n\n\n } else {\n\n // If traditional, encode the \"old\" way (the way 1.3.2 or older\n\n // did it), otherwise encode params recursively.\n\n for ( prefix in a ) {\n\n buildParams( prefix, a[ prefix ], traditional, add );\n\n }\n\n }\n\n\n\n // Return the resulting serialization\n\n return s.join( \"&\" ).replace( r20, \"+\" );\n\n };\n\n\n\n jQuery.fn.extend({\n\n serialize: function() {\n\n return jQuery.param( this.serializeArray() );\n\n },\n\n serializeArray: function() {\n\n return this.map(function() {\n\n // Can add propHook for \"elements\" to filter or add form elements\n\n var elements = jQuery.prop( this, \"elements\" );\n\n return elements ? jQuery.makeArray( elements ) : this;\n\n })\n\n .filter(function() {\n\n var type = this.type;\n\n // Use .is(\":disabled\") so that fieldset[disabled] works\n\n return this.name && !jQuery( this ).is( \":disabled\" ) &&\n\n rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) &&\n\n ( this.checked || !rcheckableType.test( type ) );\n\n })\n\n .map(function( i, elem ) {\n\n var val = jQuery( this ).val();\n\n\n\n return val == null ?\n\n null :\n\n jQuery.isArray( val ) ?\n\n jQuery.map( val, function( val ) {\n\n return { name: elem.name, value: val.replace( rCRLF, \"\\r\\n\" ) };\n\n }) :\n\n { name: elem.name, value: val.replace( rCRLF, \"\\r\\n\" ) };\n\n }).get();\n\n }\n\n });\n\n\n\n\n\n// Create the request object\n\n// (This is still attached to ajaxSettings for backward compatibility)\n\n jQuery.ajaxSettings.xhr = window.ActiveXObject !== undefined ?\n\n // Support: IE6+\n\n function() {\n\n\n\n // XHR cannot access local files, always use ActiveX for that case\n\n return !this.isLocal &&\n\n\n\n // Support: IE7-8\n\n // oldIE XHR does not support non-RFC2616 methods (#13240)\n\n // See http://msdn.microsoft.com/en-us/library/ie/ms536648(v=vs.85).aspx\n\n // and http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9\n\n // Although this check for six methods instead of eight\n\n // since IE also does not support \"trace\" and \"connect\"\n\n /^(get|post|head|put|delete|options)$/i.test( this.type ) &&\n\n\n\n createStandardXHR() || createActiveXHR();\n\n } :\n\n // For all other browsers, use the standard XMLHttpRequest object\n\n createStandardXHR;\n\n\n\n var xhrId = 0,\n\n xhrCallbacks = {},\n\n xhrSupported = jQuery.ajaxSettings.xhr();\n\n\n\n// Support: IE<10\n\n// Open requests must be manually aborted on unload (#5280)\n\n// See https://support.microsoft.com/kb/2856746 for more info\n\n if ( window.attachEvent ) {\n\n window.attachEvent( \"onunload\", function() {\n\n for ( var key in xhrCallbacks ) {\n\n xhrCallbacks[ key ]( undefined, true );\n\n }\n\n });\n\n }\n\n\n\n// Determine support properties\n\n support.cors = !!xhrSupported && ( \"withCredentials\" in xhrSupported );\n\n xhrSupported = support.ajax = !!xhrSupported;\n\n\n\n// Create transport if the browser can provide an xhr\n\n if ( xhrSupported ) {\n\n\n\n jQuery.ajaxTransport(function( options ) {\n\n // Cross domain only allowed if supported through XMLHttpRequest\n\n if ( !options.crossDomain || support.cors ) {\n\n\n\n var callback;\n\n\n\n return {\n\n send: function( headers, complete ) {\n\n var i,\n\n xhr = options.xhr(),\n\n id = ++xhrId;\n\n\n\n // Open the socket\n\n xhr.open( options.type, options.url, options.async, options.username, options.password );\n\n\n\n // Apply custom fields if provided\n\n if ( options.xhrFields ) {\n\n for ( i in options.xhrFields ) {\n\n xhr[ i ] = options.xhrFields[ i ];\n\n }\n\n }\n\n\n\n // Override mime type if needed\n\n if ( options.mimeType && xhr.overrideMimeType ) {\n\n xhr.overrideMimeType( options.mimeType );\n\n }\n\n\n\n // X-Requested-With header\n\n // For cross-domain requests, seeing as conditions for a preflight are\n\n // akin to a jigsaw puzzle, we simply never set it to be sure.\n\n // (it can always be set on a per-request basis or even using ajaxSetup)\n\n // For same-domain requests, won't change header if already provided.\n\n if ( !options.crossDomain && !headers[\"X-Requested-With\"] ) {\n\n headers[\"X-Requested-With\"] = \"XMLHttpRequest\";\n\n }\n\n\n\n // Set headers\n\n for ( i in headers ) {\n\n // Support: IE<9\n\n // IE's ActiveXObject throws a 'Type Mismatch' exception when setting\n\n // request header to a null-value.\n\n //\n\n // To keep consistent with other XHR implementations, cast the value\n\n // to string and ignore `undefined`.\n\n if ( headers[ i ] !== undefined ) {\n\n xhr.setRequestHeader( i, headers[ i ] + \"\" );\n\n }\n\n }\n\n\n\n // Do send the request\n\n // This may raise an exception which is actually\n\n // handled in jQuery.ajax (so no try/catch here)\n\n xhr.send( ( options.hasContent && options.data ) || null );\n\n\n\n // Listener\n\n callback = function( _, isAbort ) {\n\n var status, statusText, responses;\n\n\n\n // Was never called and is aborted or complete\n\n if ( callback && ( isAbort || xhr.readyState === 4 ) ) {\n\n // Clean up\n\n delete xhrCallbacks[ id ];\n\n callback = undefined;\n\n xhr.onreadystatechange = jQuery.noop;\n\n\n\n // Abort manually if needed\n\n if ( isAbort ) {\n\n if ( xhr.readyState !== 4 ) {\n\n xhr.abort();\n\n }\n\n } else {\n\n responses = {};\n\n status = xhr.status;\n\n\n\n // Support: IE<10\n\n // Accessing binary-data responseText throws an exception\n\n // (#11426)\n\n if ( typeof xhr.responseText === \"string\" ) {\n\n responses.text = xhr.responseText;\n\n }\n\n\n\n // Firefox throws an exception when accessing\n\n // statusText for faulty cross-domain requests\n\n try {\n\n statusText = xhr.statusText;\n\n } catch( e ) {\n\n // We normalize with Webkit giving an empty statusText\n\n statusText = \"\";\n\n }\n\n\n\n // Filter status for non standard behaviors\n\n\n\n // If the request is local and we have data: assume a success\n\n // (success with no data won't get notified, that's the best we\n\n // can do given current implementations)\n\n if ( !status && options.isLocal && !options.crossDomain ) {\n\n status = responses.text ? 200 : 404;\n\n // IE - #1450: sometimes returns 1223 when it should be 204\n\n } else if ( status === 1223 ) {\n\n status = 204;\n\n }\n\n }\n\n }\n\n\n\n // Call complete if needed\n\n if ( responses ) {\n\n complete( status, statusText, responses, xhr.getAllResponseHeaders() );\n\n }\n\n };\n\n\n\n if ( !options.async ) {\n\n // if we're in sync mode we fire the callback\n\n callback();\n\n } else if ( xhr.readyState === 4 ) {\n\n // (IE6 & IE7) if it's in cache and has been\n\n // retrieved directly we need to fire the callback\n\n setTimeout( callback );\n\n } else {\n\n // Add to the list of active xhr callbacks\n\n xhr.onreadystatechange = xhrCallbacks[ id ] = callback;\n\n }\n\n },\n\n\n\n abort: function() {\n\n if ( callback ) {\n\n callback( undefined, true );\n\n }\n\n }\n\n };\n\n }\n\n });\n\n }\n\n\n\n// Functions to create xhrs\n\n function createStandardXHR() {\n\n try {\n\n return new window.XMLHttpRequest();\n\n } catch( e ) {}\n\n }\n\n\n\n function createActiveXHR() {\n\n try {\n\n return new window.ActiveXObject( \"Microsoft.XMLHTTP\" );\n\n } catch( e ) {}\n\n }\n\n\n\n\n\n\n\n\n\n// Install script dataType\n\n jQuery.ajaxSetup({\n\n accepts: {\n\n script: \"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript\"\n\n },\n\n contents: {\n\n script: /(?:java|ecma)script/\n\n },\n\n converters: {\n\n \"text script\": function( text ) {\n\n jQuery.globalEval( text );\n\n return text;\n\n }\n\n }\n\n });\n\n\n\n// Handle cache's special case and global\n\n jQuery.ajaxPrefilter( \"script\", function( s ) {\n\n if ( s.cache === undefined ) {\n\n s.cache = false;\n\n }\n\n if ( s.crossDomain ) {\n\n s.type = \"GET\";\n\n s.global = false;\n\n }\n\n });\n\n\n\n// Bind script tag hack transport\n\n jQuery.ajaxTransport( \"script\", function(s) {\n\n\n\n // This transport only deals with cross domain requests\n\n if ( s.crossDomain ) {\n\n\n\n var script,\n\n head = document.head || jQuery(\"head\")[0] || document.documentElement;\n\n\n\n return {\n\n\n\n send: function( _, callback ) {\n\n\n\n script = document.createElement(\"script\");\n\n\n\n script.async = true;\n\n\n\n if ( s.scriptCharset ) {\n\n script.charset = s.scriptCharset;\n\n }\n\n\n\n script.src = s.url;\n\n\n\n // Attach handlers for all browsers\n\n script.onload = script.onreadystatechange = function( _, isAbort ) {\n\n\n\n if ( isAbort || !script.readyState || /loaded|complete/.test( script.readyState ) ) {\n\n\n\n // Handle memory leak in IE\n\n script.onload = script.onreadystatechange = null;\n\n\n\n // Remove the script\n\n if ( script.parentNode ) {\n\n script.parentNode.removeChild( script );\n\n }\n\n\n\n // Dereference the script\n\n script = null;\n\n\n\n // Callback if not abort\n\n if ( !isAbort ) {\n\n callback( 200, \"success\" );\n\n }\n\n }\n\n };\n\n\n\n // Circumvent IE6 bugs with base elements (#2709 and #4378) by prepending\n\n // Use native DOM manipulation to avoid our domManip AJAX trickery\n\n head.insertBefore( script, head.firstChild );\n\n },\n\n\n\n abort: function() {\n\n if ( script ) {\n\n script.onload( undefined, true );\n\n }\n\n }\n\n };\n\n }\n\n });\n\n\n\n\n\n\n\n\n\n var oldCallbacks = [],\n\n rjsonp = /(=)\\?(?=&|$)|\\?\\?/;\n\n\n\n// Default jsonp settings\n\n jQuery.ajaxSetup({\n\n jsonp: \"callback\",\n\n jsonpCallback: function() {\n\n var callback = oldCallbacks.pop() || ( jQuery.expando + \"_\" + ( nonce++ ) );\n\n this[ callback ] = true;\n\n return callback;\n\n }\n\n });\n\n\n\n// Detect, normalize options and install callbacks for jsonp requests\n\n jQuery.ajaxPrefilter( \"json jsonp\", function( s, originalSettings, jqXHR ) {\n\n\n\n var callbackName, overwritten, responseContainer,\n\n jsonProp = s.jsonp !== false && ( rjsonp.test( s.url ) ?\n\n \"url\" :\n\n typeof s.data === \"string\" && !( s.contentType || \"\" ).indexOf(\"application/x-www-form-urlencoded\") && rjsonp.test( s.data ) && \"data\"\n\n );\n\n\n\n // Handle iff the expected data type is \"jsonp\" or we have a parameter to set\n\n if ( jsonProp || s.dataTypes[ 0 ] === \"jsonp\" ) {\n\n\n\n // Get callback name, remembering preexisting value associated with it\n\n callbackName = s.jsonpCallback = jQuery.isFunction( s.jsonpCallback ) ?\n\n s.jsonpCallback() :\n\n s.jsonpCallback;\n\n\n\n // Insert callback into url or form data\n\n if ( jsonProp ) {\n\n s[ jsonProp ] = s[ jsonProp ].replace( rjsonp, \"$1\" + callbackName );\n\n } else if ( s.jsonp !== false ) {\n\n s.url += ( rquery.test( s.url ) ? \"&\" : \"?\" ) + s.jsonp + \"=\" + callbackName;\n\n }\n\n\n\n // Use data converter to retrieve json after script execution\n\n s.converters[\"script json\"] = function() {\n\n if ( !responseContainer ) {\n\n jQuery.error( callbackName + \" was not called\" );\n\n }\n\n return responseContainer[ 0 ];\n\n };\n\n\n\n // force json dataType\n\n s.dataTypes[ 0 ] = \"json\";\n\n\n\n // Install callback\n\n overwritten = window[ callbackName ];\n\n window[ callbackName ] = function() {\n\n responseContainer = arguments;\n\n };\n\n\n\n // Clean-up function (fires after converters)\n\n jqXHR.always(function() {\n\n // Restore preexisting value\n\n window[ callbackName ] = overwritten;\n\n\n\n // Save back as free\n\n if ( s[ callbackName ] ) {\n\n // make sure that re-using the options doesn't screw things around\n\n s.jsonpCallback = originalSettings.jsonpCallback;\n\n\n\n // save the callback name for future use\n\n oldCallbacks.push( callbackName );\n\n }\n\n\n\n // Call if it was a function and we have a response\n\n if ( responseContainer && jQuery.isFunction( overwritten ) ) {\n\n overwritten( responseContainer[ 0 ] );\n\n }\n\n\n\n responseContainer = overwritten = undefined;\n\n });\n\n\n\n // Delegate to script\n\n return \"script\";\n\n }\n\n });\n\n\n\n\n\n\n\n\n\n// data: string of html\n\n// context (optional): If specified, the fragment will be created in this context, defaults to document\n\n// keepScripts (optional): If true, will include scripts passed in the html string\n\n jQuery.parseHTML = function( data, context, keepScripts ) {\n\n if ( !data || typeof data !== \"string\" ) {\n\n return null;\n\n }\n\n if ( typeof context === \"boolean\" ) {\n\n keepScripts = context;\n\n context = false;\n\n }\n\n context = context || document;\n\n\n\n var parsed = rsingleTag.exec( data ),\n\n scripts = !keepScripts && [];\n\n\n\n // Single tag\n\n if ( parsed ) {\n\n return [ context.createElement( parsed[1] ) ];\n\n }\n\n\n\n parsed = jQuery.buildFragment( [ data ], context, scripts );\n\n\n\n if ( scripts && scripts.length ) {\n\n jQuery( scripts ).remove();\n\n }\n\n\n\n return jQuery.merge( [], parsed.childNodes );\n\n };\n\n\n\n\n\n// Keep a copy of the old load method\n\n var _load = jQuery.fn.load;\n\n\n\n /**\n\n * Load a url into a page\n\n */\n\n jQuery.fn.load = function( url, params, callback ) {\n\n if ( typeof url !== \"string\" && _load ) {\n\n return _load.apply( this, arguments );\n\n }\n\n\n\n var selector, response, type,\n\n self = this,\n\n off = url.indexOf(\" \");\n\n\n\n if ( off >= 0 ) {\n\n selector = jQuery.trim( url.slice( off, url.length ) );\n\n url = url.slice( 0, off );\n\n }\n\n\n\n // If it's a function\n\n if ( jQuery.isFunction( params ) ) {\n\n\n\n // We assume that it's the callback\n\n callback = params;\n\n params = undefined;\n\n\n\n // Otherwise, build a param string\n\n } else if ( params && typeof params === \"object\" ) {\n\n type = \"POST\";\n\n }\n\n\n\n // If we have elements to modify, make the request\n\n if ( self.length > 0 ) {\n\n jQuery.ajax({\n\n url: url,\n\n\n\n // if \"type\" variable is undefined, then \"GET\" method will be used\n\n type: type,\n\n dataType: \"html\",\n\n data: params\n\n }).done(function( responseText ) {\n\n\n\n // Save response for use in complete callback\n\n response = arguments;\n\n\n\n self.html( selector ?\n\n\n\n // If a selector was specified, locate the right elements in a dummy div\n\n // Exclude scripts to avoid IE 'Permission Denied' errors\n\n jQuery(\"<div>\").append( jQuery.parseHTML( responseText ) ).find( selector ) :\n\n\n\n // Otherwise use the full result\n\n responseText );\n\n\n\n }).complete( callback && function( jqXHR, status ) {\n\n self.each( callback, response || [ jqXHR.responseText, status, jqXHR ] );\n\n });\n\n }\n\n\n\n return this;\n\n };\n\n\n\n\n\n\n\n\n\n// Attach a bunch of functions for handling common AJAX events\n\n jQuery.each( [ \"ajaxStart\", \"ajaxStop\", \"ajaxComplete\", \"ajaxError\", \"ajaxSuccess\", \"ajaxSend\" ], function( i, type ) {\n\n jQuery.fn[ type ] = function( fn ) {\n\n return this.on( type, fn );\n\n };\n\n });\n\n\n\n\n\n\n\n\n\n jQuery.expr.filters.animated = function( elem ) {\n\n return jQuery.grep(jQuery.timers, function( fn ) {\n\n return elem === fn.elem;\n\n }).length;\n\n };\n\n\n\n\n\n\n\n\n\n\n\n var docElem = window.document.documentElement;\n\n\n\n /**\n\n * Gets a window from an element\n\n */\n\n function getWindow( elem ) {\n\n return jQuery.isWindow( elem ) ?\n\n elem :\n\n elem.nodeType === 9 ?\n\n elem.defaultView || elem.parentWindow :\n\n false;\n\n }\n\n\n\n jQuery.offset = {\n\n setOffset: function( elem, options, i ) {\n\n var curPosition, curLeft, curCSSTop, curTop, curOffset, curCSSLeft, calculatePosition,\n\n position = jQuery.css( elem, \"position\" ),\n\n curElem = jQuery( elem ),\n\n props = {};\n\n\n\n // set position first, in-case top/left are set even on static elem\n\n if ( position === \"static\" ) {\n\n elem.style.position = \"relative\";\n\n }\n\n\n\n curOffset = curElem.offset();\n\n curCSSTop = jQuery.css( elem, \"top\" );\n\n curCSSLeft = jQuery.css( elem, \"left\" );\n\n calculatePosition = ( position === \"absolute\" || position === \"fixed\" ) &&\n\n jQuery.inArray(\"auto\", [ curCSSTop, curCSSLeft ] ) > -1;\n\n\n\n // need to be able to calculate position if either top or left is auto and position is either absolute or fixed\n\n if ( calculatePosition ) {\n\n curPosition = curElem.position();\n\n curTop = curPosition.top;\n\n curLeft = curPosition.left;\n\n } else {\n\n curTop = parseFloat( curCSSTop ) || 0;\n\n curLeft = parseFloat( curCSSLeft ) || 0;\n\n }\n\n\n\n if ( jQuery.isFunction( options ) ) {\n\n options = options.call( elem, i, curOffset );\n\n }\n\n\n\n if ( options.top != null ) {\n\n props.top = ( options.top - curOffset.top ) + curTop;\n\n }\n\n if ( options.left != null ) {\n\n props.left = ( options.left - curOffset.left ) + curLeft;\n\n }\n\n\n\n if ( \"using\" in options ) {\n\n options.using.call( elem, props );\n\n } else {\n\n curElem.css( props );\n\n }\n\n }\n\n };\n\n\n\n jQuery.fn.extend({\n\n offset: function( options ) {\n\n if ( arguments.length ) {\n\n return options === undefined ?\n\n this :\n\n this.each(function( i ) {\n\n jQuery.offset.setOffset( this, options, i );\n\n });\n\n }\n\n\n\n var docElem, win,\n\n box = { top: 0, left: 0 },\n\n elem = this[ 0 ],\n\n doc = elem && elem.ownerDocument;\n\n\n\n if ( !doc ) {\n\n return;\n\n }\n\n\n\n docElem = doc.documentElement;\n\n\n\n // Make sure it's not a disconnected DOM node\n\n if ( !jQuery.contains( docElem, elem ) ) {\n\n return box;\n\n }\n\n\n\n // If we don't have gBCR, just use 0,0 rather than error\n\n // BlackBerry 5, iOS 3 (original iPhone)\n\n if ( typeof elem.getBoundingClientRect !== strundefined ) {\n\n box = elem.getBoundingClientRect();\n\n }\n\n win = getWindow( doc );\n\n return {\n\n top: box.top + ( win.pageYOffset || docElem.scrollTop ) - ( docElem.clientTop || 0 ),\n\n left: box.left + ( win.pageXOffset || docElem.scrollLeft ) - ( docElem.clientLeft || 0 )\n\n };\n\n },\n\n\n\n position: function() {\n\n if ( !this[ 0 ] ) {\n\n return;\n\n }\n\n\n\n var offsetParent, offset,\n\n parentOffset = { top: 0, left: 0 },\n\n elem = this[ 0 ];\n\n\n\n // fixed elements are offset from window (parentOffset = {top:0, left: 0}, because it is its only offset parent\n\n if ( jQuery.css( elem, \"position\" ) === \"fixed\" ) {\n\n // we assume that getBoundingClientRect is available when computed position is fixed\n\n offset = elem.getBoundingClientRect();\n\n } else {\n\n // Get *real* offsetParent\n\n offsetParent = this.offsetParent();\n\n\n\n // Get correct offsets\n\n offset = this.offset();\n\n if ( !jQuery.nodeName( offsetParent[ 0 ], \"html\" ) ) {\n\n parentOffset = offsetParent.offset();\n\n }\n\n\n\n // Add offsetParent borders\n\n parentOffset.top += jQuery.css( offsetParent[ 0 ], \"borderTopWidth\", true );\n\n parentOffset.left += jQuery.css( offsetParent[ 0 ], \"borderLeftWidth\", true );\n\n }\n\n\n\n // Subtract parent offsets and element margins\n\n // note: when an element has margin: auto the offsetLeft and marginLeft\n\n // are the same in Safari causing offset.left to incorrectly be 0\n\n return {\n\n top: offset.top - parentOffset.top - jQuery.css( elem, \"marginTop\", true ),\n\n left: offset.left - parentOffset.left - jQuery.css( elem, \"marginLeft\", true)\n\n };\n\n },\n\n\n\n offsetParent: function() {\n\n return this.map(function() {\n\n var offsetParent = this.offsetParent || docElem;\n\n\n\n while ( offsetParent && ( !jQuery.nodeName( offsetParent, \"html\" ) && jQuery.css( offsetParent, \"position\" ) === \"static\" ) ) {\n\n offsetParent = offsetParent.offsetParent;\n\n }\n\n return offsetParent || docElem;\n\n });\n\n }\n\n });\n\n\n\n// Create scrollLeft and scrollTop methods\n\n jQuery.each( { scrollLeft: \"pageXOffset\", scrollTop: \"pageYOffset\" }, function( method, prop ) {\n\n var top = /Y/.test( prop );\n\n\n\n jQuery.fn[ method ] = function( val ) {\n\n return access( this, function( elem, method, val ) {\n\n var win = getWindow( elem );\n\n\n\n if ( val === undefined ) {\n\n return win ? (prop in win) ? win[ prop ] :\n\n win.document.documentElement[ method ] :\n\n elem[ method ];\n\n }\n\n\n\n if ( win ) {\n\n win.scrollTo(\n\n !top ? val : jQuery( win ).scrollLeft(),\n\n top ? val : jQuery( win ).scrollTop()\n\n );\n\n\n\n } else {\n\n elem[ method ] = val;\n\n }\n\n }, method, val, arguments.length, null );\n\n };\n\n });\n\n\n\n// Add the top/left cssHooks using jQuery.fn.position\n\n// Webkit bug: https://bugs.webkit.org/show_bug.cgi?id=29084\n\n// getComputedStyle returns percent when specified for top/left/bottom/right\n\n// rather than make the css module depend on the offset module, we just check for it here\n\n jQuery.each( [ \"top\", \"left\" ], function( i, prop ) {\n\n jQuery.cssHooks[ prop ] = addGetHookIf( support.pixelPosition,\n\n function( elem, computed ) {\n\n if ( computed ) {\n\n computed = curCSS( elem, prop );\n\n // if curCSS returns percentage, fallback to offset\n\n return rnumnonpx.test( computed ) ?\n\n jQuery( elem ).position()[ prop ] + \"px\" :\n\n computed;\n\n }\n\n }\n\n );\n\n });\n\n\n\n\n\n// Create innerHeight, innerWidth, height, width, outerHeight and outerWidth methods\n\n jQuery.each( { Height: \"height\", Width: \"width\" }, function( name, type ) {\n\n jQuery.each( { padding: \"inner\" + name, content: type, \"\": \"outer\" + name }, function( defaultExtra, funcName ) {\n\n // margin is only for outerHeight, outerWidth\n\n jQuery.fn[ funcName ] = function( margin, value ) {\n\n var chainable = arguments.length && ( defaultExtra || typeof margin !== \"boolean\" ),\n\n extra = defaultExtra || ( margin === true || value === true ? \"margin\" : \"border\" );\n\n\n\n return access( this, function( elem, type, value ) {\n\n var doc;\n\n\n\n if ( jQuery.isWindow( elem ) ) {\n\n // As of 5/8/2012 this will yield incorrect results for Mobile Safari, but there\n\n // isn't a whole lot we can do. See pull request at this URL for discussion:\n\n // https://github.com/jquery/jquery/pull/764\n\n return elem.document.documentElement[ \"client\" + name ];\n\n }\n\n\n\n // Get document width or height\n\n if ( elem.nodeType === 9 ) {\n\n doc = elem.documentElement;\n\n\n\n // Either scroll[Width/Height] or offset[Width/Height] or client[Width/Height], whichever is greatest\n\n // unfortunately, this causes bug #3838 in IE6/8 only, but there is currently no good, small way to fix it.\n\n return Math.max(\n\n elem.body[ \"scroll\" + name ], doc[ \"scroll\" + name ],\n\n elem.body[ \"offset\" + name ], doc[ \"offset\" + name ],\n\n doc[ \"client\" + name ]\n\n );\n\n }\n\n\n\n return value === undefined ?\n\n // Get width or height on the element, requesting but not forcing parseFloat\n\n jQuery.css( elem, type, extra ) :\n\n\n\n // Set width or height on the element\n\n jQuery.style( elem, type, value, extra );\n\n }, type, chainable ? margin : undefined, chainable, null );\n\n };\n\n });\n\n });\n\n\n\n\n\n// The number of elements contained in the matched element set\n\n jQuery.fn.size = function() {\n\n return this.length;\n\n };\n\n\n\n jQuery.fn.andSelf = jQuery.fn.addBack;\n\n\n\n\n\n\n\n\n\n// Register as a named AMD module, since jQuery can be concatenated with other\n\n// files that may use define, but not via a proper concatenation script that\n\n// understands anonymous AMD modules. A named AMD is safest and most robust\n\n// way to register. Lowercase jquery is used because AMD module names are\n\n// derived from file names, and jQuery is normally delivered in a lowercase\n\n// file name. Do this after creating the global so that if an AMD module wants\n\n// to call noConflict to hide this version of jQuery, it will work.\n\n\n\n// Note that for maximum portability, libraries that are not jQuery should\n\n// declare themselves as anonymous modules, and avoid setting a global if an\n\n// AMD loader is present. jQuery is a special case. For more information, see\n\n// https://github.com/jrburke/requirejs/wiki/Updating-existing-libraries#wiki-anon\n\n\n\n if ( typeof define === \"function\" && define.amd ) {\n\n define( \"jquery\", [], function() {\n\n return jQuery;\n\n });\n\n }\n\n\n\n\n\n\n\n\n\n var\n\n // Map over jQuery in case of overwrite\n\n _jQuery = window.jQuery,\n\n\n\n // Map over the $ in case of overwrite\n\n _$ = window.$;\n\n\n\n jQuery.noConflict = function( deep ) {\n\n if ( window.$ === jQuery ) {\n\n window.$ = _$;\n\n }\n\n\n\n if ( deep && window.jQuery === jQuery ) {\n\n window.jQuery = _jQuery;\n\n }\n\n\n\n return jQuery;\n\n };\n\n\n\n// Expose jQuery and $ identifiers, even in\n\n// AMD (#7102#comment:10, https://github.com/jquery/jquery/pull/557)\n\n// and CommonJS for browser emulators (#13566)\n\n if ( typeof noGlobal === strundefined ) {\n\n window.jQuery = window.$ = jQuery;\n\n }\n\n\n\n\n\n\n\n\n\n return jQuery;\n\n\n\n}));\n", "file_path": "tests/js/jquery.js", "rank": 55, "score": 13279.832052728194 }, { "content": "use error::JsResult;\n\nuse lexer::enums::{TokenType};\n\nuse scope::parser::{Parser, Item};\n\n\n\nmacro_rules! wait {\n\n ($expr:expr) => (match $expr {\n\n Item::Item => return Ok(Item::Item),\n\n Item::None => (),\n\n })\n\n}\n\n\n\n\n\nmacro_rules! none {\n\n ($expr:expr) => (match $expr {\n\n Item::None => return Ok(Item::None),\n\n Item::Item => (),\n\n })\n\n}\n\n\n\nimpl Parser {\n", "file_path": "src/scope/object.rs", "rank": 56, "score": 44.321786055619086 }, { "content": "use error::JsResult;\n\n//use error::error::{Error, ErrorType, SyntaxErrorType};\n\nuse lexer::enums::{TokenType};\n\nuse scope::parser::{Parser, Item};\n\n\n\nmacro_rules! wait {\n\n ($expr:expr) => (match $expr {\n\n Item::None => (),\n\n Item::Item => return Ok(Item::Item),\n\n })\n\n}\n\n\n\nmacro_rules! none {\n\n ($expr:expr) => (match $expr {\n\n Item::None => return Ok(Item::None),\n\n Item::Item => (),\n\n })\n\n}\n\n\n\nimpl Parser {\n", "file_path": "src/scope/operators.rs", "rank": 58, "score": 44.014311296395434 }, { "content": "use error::JsResult;\n\n//use error::error::{Error, ErrorType, SyntaxErrorType};\n\nuse lexer::enums::{TokenType};\n\nuse scope::parser::{Parser, Item};\n\n\n\nmacro_rules! wait {\n\n ($expr:expr) => (match $expr {\n\n Item::Item => return Ok(Item::Item),\n\n Item::None => (),\n\n })\n\n}\n\n\n\n\n\nmacro_rules! none {\n\n ($expr:expr) => (match $expr {\n\n Item::None => return Ok(Item::None),\n\n Item::Item => (),\n\n })\n\n}\n\n\n", "file_path": "src/scope/function.rs", "rank": 59, "score": 39.30436976346611 }, { "content": "use error::JsResult;\n\nuse error::error::{Error, ErrorType, SyntaxErrorType};\n\nuse lexer::enums::{TokenType, LiteralType};\n\nuse scope::parser::{Parser, Item};\n\n\n\nmacro_rules! wait {\n\n ($expr:expr) => (match $expr {\n\n Item::Item => return Ok(Item::Item),\n\n Item::None => (),\n\n })\n\n}\n\n\n\n\n\nmacro_rules! none {\n\n ($expr:expr) => (match $expr {\n\n Item::None => return Ok(Item::None),\n\n Item::Item => (),\n\n })\n\n}\n\n\n", "file_path": "src/scope/block.rs", "rank": 60, "score": 39.21208896087012 }, { "content": "use error::JsResult;\n\nuse error::error::SyntaxErrorType;\n\nuse lexer::enums::{TokenType};\n\nuse scope::parser::{Parser, Item};\n\n\n\nimpl Parser {\n\n pub fn parse_catch_parameter(&mut self) -> JsResult<Item> {\n\n match self.peek() {\n\n Some(TokenType::Identifier(_)) => {\n\n try!(self.bump());\n\n }\n\n Some(TokenType::LeftBrace) => return Ok(Item::Item),\n\n Some(TokenType::LeftBracket) => return Ok(Item::Item),\n\n Some(t) => {\n\n try!(self.fatal(SyntaxErrorType::Unexpected(t)));\n\n }\n\n None => {\n\n try!(self.fatal(SyntaxErrorType::UnexpectedEOF));\n\n }\n\n }\n", "file_path": "src/scope/try.rs", "rank": 61, "score": 34.699567875757445 }, { "content": "use error::JsResult;\n\nuse error::error::SyntaxErrorType;\n\nuse lexer::enums::{TokenType};\n\nuse scope::parser::{Parser, Item};\n\n\n\nimpl Parser {\n\n pub fn parse_case_clause(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.parse_expr());\n\n try!(self.expect(TokenType::Colon));\n\n try!(self.parse_stmt_list());\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_default_clause(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.expect(TokenType::Colon));\n\n try!(self.parse_stmt_list());\n\n Ok(Item::Item)\n\n }\n", "file_path": "src/scope/switch.rs", "rank": 62, "score": 31.43807252929782 }, { "content": "use error::JsResult;\n\nuse lexer::enums::{LexerMode, CommentType, TokenType};\n\nuse lexer::state::{LexerState};\n\n\n\nimpl LexerState {\n\n fn comment(&mut self) -> JsResult<()> {\n\n let tmp = self.tmp();\n\n try!(self.push(TokenType::CommentLiteral(tmp)));\n\n self.update(LexerMode::None);\n\n Ok(())\n\n }\n\n\n\n pub fn parse_comment(&mut self) -> JsResult<bool> {\n\n loop {\n\n let c = self.current_char();\n\n let t = match self.mode() {\n\n LexerMode::Comment(t) => t,\n\n _ => {\n\n panic!(\"Unhandled Parser State Reached: {:?}, {:?}, {:?}, col {:?}, line {:?}\", c, self.mode(), self.is_escaped(), self.col(), self.line())\n\n }\n", "file_path": "src/lexer/mode/comment.rs", "rank": 66, "score": 26.940196295858385 }, { "content": "use error::JsResult;\n\nuse lexer::enums::{LexerMode, NumberType, TokenType, LiteralType};\n\nuse lexer::state::{LexerState};\n\nuse std::str::FromStr;\n\n\n\nimpl LexerState {\n\n fn number(&mut self, t: LiteralType) -> JsResult<()> {\n\n try!(self.push(TokenType::Literal(t)));\n\n self.update(LexerMode::None);\n\n Ok(())\n\n }\n\n\n\n pub fn parse_number(&mut self) -> JsResult<bool> {\n\n let mut handled: bool;\n\n loop {\n\n let c = self.current_char();\n\n let t = match self.mode() {\n\n LexerMode::Number(t) => t,\n\n _ => {\n\n panic!(\"Unhandled Parser State Reached: {:?}, {:?}, {:?}, col {:?}, line {:?}\", c, self.mode(), self.is_escaped(), self.col(), self.line())\n", "file_path": "src/lexer/mode/number.rs", "rank": 67, "score": 26.749286934372847 }, { "content": "use error::JsResult;\n\nuse lexer::enums::{LexerMode, RegexState, TokenType, LiteralType, RegexIdentifier};\n\nuse lexer::state::{LexerState};\n\n\n\nimpl LexerState {\n\n fn regex(&mut self, t: RegexIdentifier) -> JsResult<()> {\n\n let tmp = self.tmp();\n\n try!(self.push(TokenType::Literal(LiteralType::Regex(tmp, t))));\n\n self.update(LexerMode::None);\n\n Ok(())\n\n }\n\n\n\n pub fn parse_regex(&mut self) -> JsResult<bool> {\n\n let mut handled: bool;\n\n loop {\n\n let c = self.current_char();\n\n let t = match self.mode() {\n\n LexerMode::Regex(t) => t,\n\n _ => {\n\n panic!(\"Unhandled Parser State Reached: {:?}, {:?}, {:?}, col {:?}, line {:?}\", c, self.mode(), self.is_escaped(), self.col(), self.line())\n", "file_path": "src/lexer/mode/regex.rs", "rank": 68, "score": 26.273695248142488 }, { "content": " pub fn dump_and_parse_logical_expr(&mut self) -> JsResult<Item> {\n\n println!(\"dump_and_parse_logical_expr {:?}\", self.peek());\n\n try!(self.bump());\n\n self.parse_logical_expr(Item::None)\n\n }\n\n\n\n pub fn parse_logical_expr(&mut self, first: Item) -> JsResult<Item> {\n\n println!(\"parse_logical_expr {:?} {:?}\", self.peek(), first);\n\n none!(try!(self.parse_unary_expr(first)));\n\n println!(\" parse_logical_expr {:?}\", self.peek());\n\n loop {\n\n match self.peek() {\n\n Some(TokenType::Plus) => self.dump_and_parse_logical_expr(),\n\n Some(TokenType::Minus) => self.dump_and_parse_logical_expr(),\n\n Some(TokenType::Multiple) => self.dump_and_parse_logical_expr(),\n\n Some(TokenType::Divide) => self.dump_and_parse_logical_expr(),\n\n Some(TokenType::Mod) => self.dump_and_parse_logical_expr(),\n\n Some(TokenType::Exp) => self.dump_and_parse_logical_expr(),\n\n Some(TokenType::LeftShift) => self.dump_and_parse_logical_expr(),\n\n Some(TokenType::RightShift) => self.dump_and_parse_logical_expr(),\n", "file_path": "src/scope/operators.rs", "rank": 69, "score": 25.390790948320586 }, { "content": "impl Parser {\n\n pub fn parse_function(&mut self) -> JsResult<Item> {\n\n println!(\"parse_function {:?}\", self.peek());\n\n if !try!(self.consume(TokenType::Function)) {\n\n return Ok(Item::None)\n\n }\n\n try!(self.consume(TokenType::Multiple));\n\n try!(self.consume_identifier());\n\n try!(self.parse_formal_parameters());\n\n self.parse_block()\n\n }\n\n\n\n pub fn parse_formals_list(&mut self) -> JsResult<Item> {\n\n try!(self.consume_identifier());\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_formal_parameters(&mut self) -> JsResult<Item> {\n\n try!(self.expect(TokenType::LeftParen));\n\n try!(self.parse_formals_list());\n", "file_path": "src/scope/function.rs", "rank": 70, "score": 24.589190531426553 }, { "content": " Some(t) => {\n\n self.last_token = Some(t.clone());\n\n let token = Token::new(t, self.col, self.line);\n\n self.tokens.push(token)\n\n }\n\n None => ()\n\n };\n\n Ok(())\n\n }\n\n\n\n pub fn tokens(&self) -> Vec<Token> {\n\n self.tokens.clone()\n\n }\n\n}\n\n\n\nimpl Iterator for LexerState {\n\n type Item = Token;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n //let c = self.next_char();\n", "file_path": "src/lexer/state.rs", "rank": 71, "score": 24.04949041221348 }, { "content": "impl Parser {\n\n\n\n pub fn consume_all_lineterminates(&mut self) -> JsResult<()> {\n\n println!(\"consume_all_lineterminates {:?}\", self.peek());\n\n while try!(self.consume(TokenType::LineTerminate)) {}\n\n Ok(())\n\n }\n\n\n\n pub fn parse_stmt_list(&mut self) -> JsResult<()> {\n\n loop {\n\n println!(\"parse_stmt_list {:?}\", self.peek());\n\n match self.peek() {\n\n None |\n\n Some(TokenType::RightBrace) |\n\n Some(TokenType::Case) |\n\n Some(TokenType::Default) => return Ok(()),\n\n _ => {}\n\n }\n\n\n\n match try!(self.parse_stmt()) {\n", "file_path": "src/scope/block.rs", "rank": 72, "score": 23.94556113675427 }, { "content": "use error::JsResult;\n\nuse error::error::{ErrorType, SyntaxErrorType};\n\nuse lexer::enums::{LexerMode, TokenType, LiteralType};\n\nuse lexer::enums::StringType::*;\n\nuse lexer::state::{LexerState};\n\n\n\nimpl LexerState {\n\n pub fn parse_string(&mut self) -> JsResult<bool> {\n\n loop {\n\n let escaped = self.is_escaped();\n\n let c = self.current_char();\n\n let t = match self.mode() {\n\n LexerMode::String(t) => t,\n\n _ => {\n\n panic!(\"Unhandled Parser State Reached: {:?}, {:?}, {:?}, col {:?}, line {:?}\", c, self.mode(), self.is_escaped(), self.col(), self.line())\n\n }\n\n };\n\n match (c, escaped, t) {\n\n (Some('\"'), true, DoubleQuote) => {\n\n self.escaped(false);\n", "file_path": "src/lexer/mode/string.rs", "rank": 73, "score": 23.815793426497052 }, { "content": "use error::JsResult;\n\nuse lexer::enums::{LexerMode, TokenType, CommentType, RegexState};\n\nuse lexer::state::{LexerState};\n\n\n\nimpl LexerState {\n\n fn punctuator(&mut self, t: TokenType) -> JsResult<()> {\n\n self.update(LexerMode::None);\n\n self.push(t)\n\n }\n\n\n\n fn mode_punctuator(&mut self, t: TokenType, i: i32) {\n\n self.update(LexerMode::Punctuator(t, i));\n\n }\n\n\n\n pub fn parse_punctuator(&mut self, c: Option<char>, t: TokenType, i: i32) -> JsResult<bool> {\n\n let handled = match (c, t.clone()) {\n\n (Some('<'), TokenType::SmallThan) => {\n\n self.mode_punctuator(TokenType::LeftShift, 0);\n\n true\n\n }\n", "file_path": "src/lexer/mode/punctuator.rs", "rank": 75, "score": 22.40853987417915 }, { "content": " Item::Item => (),\n\n Item::None => {\n\n try!(self.parse_declaration());\n\n },\n\n }\n\n }\n\n }\n\n\n\n pub fn parse_labelled(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.expect(TokenType::Colon));\n\n match self.peek() {\n\n Some(TokenType::Function) => self.parse_function(),\n\n Some(_) => self.parse_stmt(),\n\n None => {\n\n return Err(Error::new(ErrorType::SyntaxError(SyntaxErrorType::UnexpectedEOF), 0, 0, None))\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/scope/block.rs", "rank": 78, "score": 20.680589189815738 }, { "content": " while try!(self.consume(TokenType::Comma)) {\n\n try!(self.parse_formals_list());\n\n }\n\n try!(self.expect(TokenType::RightParen));\n\n Ok(Item::Item)\n\n }\n\n\n\n\n\n pub fn parse_function_expr(&mut self) -> JsResult<Item> {\n\n println!(\"parse_function_expr {:?}\", self.peek());\n\n if !try!(self.consume(TokenType::Function)) {\n\n return Ok(Item::None)\n\n }\n\n try!(self.consume(TokenType::Multiple));\n\n try!(self.consume_identifier());\n\n try!(self.parse_formal_parameters());\n\n self.parse_block()\n\n }\n\n}", "file_path": "src/scope/function.rs", "rank": 79, "score": 20.144198826009067 }, { "content": " Some(TokenType::Const) => self.parse_const(),\n\n Some(TokenType::LineTerminate) => {\n\n try!(self.bump());\n\n println!(\"Warning for LineTerminate\");\n\n Ok(Item::None)\n\n }\n\n Some(t) => Err(Error::new(ErrorType::SyntaxError(SyntaxErrorType::Unexpected(t)), 0, 0, None)),\n\n None => Ok(Item::None)\n\n }\n\n }\n\n\n\n pub fn parse_stmt(&mut self) -> JsResult<Item> {\n\n println!(\"parse_stmt {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::LeftBrace) => self.parse_block(),\n\n Some(TokenType::Var) => self.parse_variable(),\n\n Some(TokenType::Semicolon) => self.parse_empty(),\n\n Some(TokenType::If) => self.parse_if(),\n\n\n\n Some(TokenType::While) => self.parse_while(),// Breakable\n", "file_path": "src/scope/block.rs", "rank": 80, "score": 19.698545045232173 }, { "content": " }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n let len = self.s.len() - self.pos;\n\n ((len + 3) / 4, Some(len)) // see the Chars impl for detail\n\n }\n\n}\n\n\n\nimpl JsContext {\n\n pub fn new() -> JsContext {\n\n JsContext {}\n\n }\n\n\n\n pub fn parse(&mut self, str: String) -> Result<(), ErrorType> {\n\n let chars = OwningChars::new(str);\n\n let state = &mut LexerState::new(Box::new(chars.into_iter()));\n\n match state.parse() {\n\n Ok(_)=> (),\n\n Err(err) => return Err(err.error_type),\n\n }\n\n let tokens = state.tokens();\n\n match Parser::from_tokens(tokens) {\n\n Ok(_)=> Ok(()),\n\n Err(err) => Err(err.error_type)\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 81, "score": 19.609228471099478 }, { "content": " pub fn parse_object_literal(&mut self) -> JsResult<Item> {\n\n println!(\"parse_object_literal {:?}\", self.peek());\n\n try!(self.expect(TokenType::LeftBrace));\n\n loop {\n\n try!(self.consume_all_lineterminates());\n\n match try!(self.parse_property_definition()) {\n\n Item::None => {\n\n println!(\"empty\");\n\n break\n\n },\n\n Item::Item => ()\n\n }\n\n if !try!(self.consume(TokenType::Comma)) {\n\n break\n\n }\n\n }\n\n try!(self.consume_all_lineterminates());\n\n try!(self.expect(TokenType::RightBrace));\n\n Ok(Item::Item)\n\n }\n", "file_path": "src/scope/object.rs", "rank": 82, "score": 19.43689149485256 }, { "content": " None => {\n\n try!(self.fatal(SyntaxErrorType::UnexpectedEOF));\n\n }\n\n }\n\n }\n\n\n\n try!(self.expect(TokenType::RightBrace));\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_switch(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.expect(TokenType::LeftParen));\n\n try!(self.parse_expr());\n\n try!(self.expect(TokenType::RightParen));\n\n self.parse_case_block()\n\n }\n\n}", "file_path": "src/scope/switch.rs", "rank": 83, "score": 19.41470640491359 }, { "content": "\n\n pub fn parse_property_definition(&mut self) -> JsResult<Item> {\n\n println!(\"parse_property_definition {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::Get) => {\n\n try!(self.bump());\n\n if try!(self.consume(TokenType::Colon)) {\n\n self.parse_assign_expr()\n\n } else {\n\n try!(self.expect_identifier());\n\n try!(self.expect(TokenType::LeftParen));\n\n try!(self.expect(TokenType::RightParen));\n\n self.parse_block()\n\n }\n\n }\n\n Some(TokenType::Set) => {\n\n try!(self.bump());\n\n if try!(self.consume(TokenType::Colon)) {\n\n self.parse_assign_expr()\n\n } else {\n", "file_path": "src/scope/object.rs", "rank": 84, "score": 18.482353932839374 }, { "content": "use error::JsResult;\n\nuse lexer::enums::{LexerMode, TokenType, LiteralType};\n\nuse lexer::state::{LexerState};\n\n\n\nimpl LexerState {\n\n fn raw(&mut self) -> JsResult<()> {\n\n self.update(LexerMode::None);\n\n let tmp = self.tmp();\n\n let tmp = tmp.as_str();\n\n let token = match tmp {\n\n \"var\" => TokenType::Var,\n\n \"if\" => TokenType::If,\n\n \"else\" => TokenType::Else,\n\n \"do\" => TokenType::Do,\n\n \"typeof\" => TokenType::Typeof,\n\n \"switch\" => TokenType::Switch,\n\n \"catch\" => TokenType::Catch,\n\n \"try\" => TokenType::Try,\n\n \"instanceof\" => TokenType::Instanceof,\n\n \"export\" => TokenType::Export,\n", "file_path": "src/lexer/mode/raw.rs", "rank": 85, "score": 18.29590133072623 }, { "content": "//pub mod error;\n\npub mod lexer;\n\npub mod scope;\n\npub mod error;\n\n\n\nuse lexer::enums::TokenType;\n\nuse error::error::{ErrorType};\n\nuse lexer::state::{LexerState};\n\nuse scope::parser::Parser;\n\n\n\npub struct JsContext {}\n\n\n", "file_path": "src/lib.rs", "rank": 87, "score": 17.567801190172165 }, { "content": " Some(TokenType::Plus) => self.parse_expr_stmt(),\n\n Some(TokenType::Literal(LiteralType::String(_))) => self.parse_expr_stmt(),\n\n Some(TokenType::Literal(LiteralType::Integer(_))) => self.parse_expr_stmt(),\n\n\n\n Some(TokenType::Identifier(_)) => {\n\n if Some(TokenType::Colon) == self.peek_at(1) {\n\n self.parse_labelled()\n\n } else {\n\n self.parse_expr_stmt()\n\n }\n\n }\n\n Some(TokenType::Yield) => self.parse_yield_expr(),\n\n Some(_) => Ok(Item::None),\n\n None => Ok(Item::None)\n\n }\n\n }\n\n}", "file_path": "src/scope/block.rs", "rank": 88, "score": 17.015831211640062 }, { "content": " }\n\n\n\n pub fn mode(&self) -> LexerMode {\n\n self.mode.clone()\n\n }\n\n\n\n pub fn update(&mut self, t: LexerMode) {\n\n self.mode = t\n\n }\n\n\n\n pub fn last_token(&self) -> Option<TokenType> {\n\n self.last_token.clone()\n\n }\n\n\n\n pub fn push(&mut self, t: TokenType) -> JsResult<()>{\n\n let t = match t {\n\n TokenType::CommentLiteral(_) => None,\n\n TokenType::LineTerminate => {\n\n match self.last_token {\n\n None => None,\n", "file_path": "src/lexer/state.rs", "rank": 89, "score": 16.96749799166365 }, { "content": " try!(self.expect(TokenType::RightParen));\n\n self.parse_block()\n\n } else {\n\n if try!(self.consume(TokenType::Colon)) {\n\n return self.parse_assign_expr()\n\n } else {\n\n Ok(Item::None)\n\n }\n\n }\n\n },\n\n Some(_) => Ok(Item::None),\n\n None => Ok(Item::None),\n\n }\n\n }\n\n}", "file_path": "src/scope/object.rs", "rank": 90, "score": 16.6226772645061 }, { "content": "use lexer::enums::{TokenType};\n\nuse error::error::{CodePos};\n\n\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Clone)]\n\npub struct Token {\n\n pub token: TokenType,\n\n pub col: u32,\n\n pub line: u64\n\n}\n\n\n\nimpl Token {\n\n pub fn new(token: TokenType, col: u32, line: u64) -> Token {\n\n Token {\n\n token: token,\n\n col: col,\n\n line: line,\n\n }\n\n }\n\n}\n\n\n\nimpl CodePos for Token {\n\n fn location(&self) -> (u64, u32) {\n\n (self.line, self.col)\n\n }\n\n}\n", "file_path": "src/lexer/token.rs", "rank": 91, "score": 16.533306784974176 }, { "content": " pub fn parse_block(&mut self) -> JsResult<Item> {\n\n println!(\"parse_block {:?}\", self.peek());\n\n try!(self.expect(TokenType::LeftBrace));\n\n try!(self.parse_stmt_list());\n\n try!(self.expect(TokenType::RightBrace));\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_empty(&mut self) -> JsResult<Item> {\n\n println!(\"parse_empty {:?}\", self.peek());\n\n try!(self.bump());\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_declaration(&mut self) -> JsResult<Item> {\n\n println!(\"parse_declaration {:?}\", self.peek());\n\n match self.peek() {\n\n Some(TokenType::Function) => self.parse_function(),\n\n Some(TokenType::Class) => self.parse_class(),\n\n Some(TokenType::Let) => self.parse_let(),\n", "file_path": "src/scope/block.rs", "rank": 93, "score": 15.834187180018562 }, { "content": "}\n\n\n\nimpl LexerState {\n\n pub fn new(input: LexerStateIterator) -> LexerState {\n\n LexerState {\n\n input: input,\n\n tokens: Vec::new(),\n\n mode: LexerMode::None,\n\n tmp: String::new(),\n\n escaped: false,\n\n last_char: None,\n\n last_char_is_unicode: false,\n\n last_token: None,\n\n current_char: None,\n\n col: 1,\n\n line: 1\n\n }\n\n }\n\n\n\n pub fn parse(&mut self) -> JsResult<()> {\n", "file_path": "src/lexer/state.rs", "rank": 94, "score": 15.665248946540569 }, { "content": " Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_try(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.parse_block());\n\n try!(self.consume_all_lineterminates());\n\n if try!(self.consume(TokenType::Catch)) {\n\n try!(self.expect(TokenType::LeftParen));\n\n try!(self.parse_catch_parameter());\n\n try!(self.expect(TokenType::RightParen));\n\n try!(self.parse_block());\n\n try!(self.consume_all_lineterminates());\n\n }\n\n\n\n if try!(self.consume(TokenType::Finally)) {\n\n try!(self.parse_block());\n\n try!(self.consume_all_lineterminates());\n\n }\n\n Ok(Item::Item)\n\n }\n\n\n\n pub fn parse_throw(&mut self) -> JsResult<Item> {\n\n try!(self.bump());\n\n try!(self.parse_assign_expr());\n\n try!(self.consume(TokenType::Semicolon));\n\n Ok(Item::Item)\n\n }\n\n}", "file_path": "src/scope/try.rs", "rank": 95, "score": 15.598789144051304 }, { "content": "extern crate js_parser_rs;\n\n\n\nuse js_parser_rs::JsContext;\n\nuse js_parser_rs::error::error::ErrorType;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\n\n\nmacro_rules! test {\n\n ($name:ident, $file:expr) => {\n\n #[test] #[allow(non_snake_case)] fn $name() {\n\n parse(concat!(\"tests/tc39/test/\", $file));\n\n }\n\n }\n\n}\n\n\n\ntest!(language_arguments_object_10_5_1_s, \"language/arguments-object/10.5-1-s.js\");\n\ntest!(language_arguments_object_10_5_1gs, \"language/arguments-object/10.5-1gs.js\");\n\ntest!(language_arguments_object_10_5_7_b_1_s, \"language/arguments-object/10.5-7-b-1-s.js\");\n\ntest!(language_arguments_object_10_5_7_b_2_s, \"language/arguments-object/10.5-7-b-2-s.js\");\n", "file_path": "tests/tc39.rs", "rank": 96, "score": 15.526420658973382 }, { "content": "use lexer::enums::{TokenType, LexerMode};\n\nuse lexer::token::Token;\n\nuse error::JsResult;\n\nuse error::error::{Error, ErrorType, SyntaxErrorType};\n\nuse std::char;\n\n\n\npub type LexerStateIterator = Box<Iterator<Item = char>>;\n\n\n\npub struct LexerState {\n\n input: LexerStateIterator,\n\n tokens: Vec<Token>,\n\n last_token: Option<TokenType>,\n\n mode: LexerMode,\n\n tmp: String,\n\n escaped: bool,\n\n last_char: Option<char>,\n\n current_char: Option<char>,\n\n last_char_is_unicode: bool,\n\n col: u32,\n\n line: u64\n", "file_path": "src/lexer/state.rs", "rank": 97, "score": 15.220087054826706 }, { "content": "//pub mod state;\n\npub mod parser;\n\nmod expr;\n\nmod try;\n\nmod switch;\n\nmod block;\n\nmod lefthandsideexpr;\n\nmod function;\n\nmod object;\n\nmod operators;", "file_path": "src/scope/mod.rs", "rank": 98, "score": 15.185828100906228 }, { "content": " Some('\\\\') => {\n\n let unicode = self.read_unicode();\n\n match unicode {\n\n Some(c) => {\n\n println!(\"{:?}\", c);\n\n self.overwrite_current_char_with_unicode(c);\n\n handled = false\n\n }\n\n _ => {\n\n panic!(\"Unhandled Parser State Reached: {:?}, {:?}, {:?}, col {:?}, line {:?}\", c, self.mode(), self.is_escaped(), self.col(), self.line());\n\n }\n\n }\n\n }\n\n _ => {\n\n panic!(\"Unhandled Parser State Reached: {:?}, {:?}, {:?}, col {:?}, line {:?}\", c, self.mode(), self.is_escaped(), self.col(), self.line());\n\n //self.update(LexerMode::EOF);\n\n }\n\n }*/\n\n None\n\n }\n\n}", "file_path": "src/lexer/state.rs", "rank": 99, "score": 14.491256358361301 } ]
Rust
src/lib/runner.rs
jokeyrhyme/tuning
06b8efa15bebb1ddfefddd3e2322a81cce101edc
use std::{ collections::HashMap, sync::{Arc, Mutex}, thread, }; use thiserror::Error as ThisError; use crate::jobs::{self, is_result_done, is_result_settled, Execute, Status}; const MAX_THREADS: usize = 2; #[derive(Debug, ThisError)] pub enum Error { #[error(transparent)] Job { #[from] source: jobs::Error, }, } pub fn run(jobs: Vec<(impl Execute + Send + 'static)>) { let mut results = HashMap::<String, jobs::Result>::new(); jobs.iter().for_each(|job| { if job.needs().is_empty() { results.insert(job.name(), Ok(Status::Pending)); } else { results.insert(job.name(), Ok(Status::Blocked)); } }); let jobs_arc = Arc::new(Mutex::new(jobs)); let results_arc = Arc::new(Mutex::new(results)); let mut handles = Vec::<thread::JoinHandle<_>>::with_capacity(MAX_THREADS); for _ in 0..MAX_THREADS { let my_jobs_arc = jobs_arc.clone(); let my_results_arc = results_arc.clone(); let handle = thread::spawn(move || { loop { let current_job; { let mut my_jobs = my_jobs_arc.lock().unwrap(); let mut my_results = my_results_arc.lock().unwrap(); for job in my_jobs.iter() { let name = job.name(); if !job.when() { my_results.insert(name.clone(), Ok(Status::Skipped)); } } for job in my_jobs.iter() { let name = job.name(); if is_equal_status(my_results.get(&name).unwrap(), &Status::Blocked) && job .needs() .iter() .all(|n| is_result_done(my_results.get(n).unwrap())) { my_results.insert(name, Ok(Status::Pending)); } } if is_all_settled(&my_results) { return; } let index = match my_jobs.iter().enumerate().find(|(_, job)| { let name = job.name(); is_equal_status(my_results.get(&name).unwrap(), &Status::Pending) }) { Some((i, _)) => i, None => { return; } }; current_job = my_jobs.remove(index); let name = current_job.name(); my_results.insert(name.clone(), Ok(Status::InProgress)); println!( "job: {}: {}", &name, jobs::result_display(my_results.get(&name).unwrap()) ); } let name = current_job.name(); let result = current_job.execute(); { let mut my_results = my_results_arc.lock().unwrap(); my_results.insert(name.clone(), result); println!( "job: {}: {}", &name, jobs::result_display(my_results.get(&name).unwrap()) ); } } }); handles.push(handle); } for handle in handles { handle.join().expect("worker thread failed"); } } fn is_all_settled(results: &HashMap<String, jobs::Result>) -> bool { results.iter().all(|(_, result)| is_result_settled(result)) } fn is_equal_status(result: &jobs::Result, status: &Status) -> bool { match result { Ok(s) => s == status, Err(_) => false, } } #[cfg(test)] mod tests { use std::time::{Duration, Instant}; use super::*; struct FakeJob { name: String, needs: Vec<String>, result: jobs::Result, sleep: Duration, spy_arc: Arc<Mutex<FakeJobSpy>>, when: bool, } impl Default for FakeJob { fn default() -> Self { Self { name: String::new(), needs: Vec::<String>::new(), result: Ok(jobs::Status::Done), sleep: Duration::from_millis(0), spy_arc: Arc::new(Mutex::new(FakeJobSpy { calls: 0, time: None, })), when: true, } } } impl FakeJob { fn new<S>(name: S, result: jobs::Result) -> (Self, Arc<Mutex<FakeJobSpy>>) where S: AsRef<str>, { let job = FakeJob { name: String::from(name.as_ref()), result, ..Default::default() }; let spy_arc = job.spy_arc.clone(); (job, spy_arc) } } impl Execute for FakeJob { fn execute(&self) -> jobs::Result { thread::sleep(self.sleep); let mut my_spy = self.spy_arc.lock().unwrap(); my_spy.calls += 1; my_spy.time = Some(Instant::now()); result_clone(&self.result) } fn name(&self) -> String { self.name.clone() } fn needs(&self) -> Vec<String> { self.needs.clone() } fn when(&self) -> bool { self.when } } struct FakeJobSpy { calls: usize, time: Option<Instant>, } impl FakeJobSpy { fn assert_called_once(&self) { assert_eq!(self.calls, 1); assert!(self.time.is_some()); } fn assert_never_called(&self) { assert_eq!(self.calls, 0); assert!(self.time.is_none()); } } #[test] fn run_does_not_execute_job_with_false_when_or_needs_job_with_false_when() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); a.when = false; let (mut b, b_spy) = FakeJob::new("b", Ok(jobs::Status::Done)); b.needs.push(String::from("a")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); my_a_spy.assert_never_called(); let my_b_spy = b_spy.lock().unwrap(); my_b_spy.assert_never_called(); } #[test] fn run_executes_unordered_jobs() { const MAX_COUNT: usize = 10; let mut jobs = Vec::<FakeJob>::with_capacity(MAX_COUNT); let mut spy_arcs = Vec::<Arc<Mutex<FakeJobSpy>>>::with_capacity(MAX_COUNT); for i in 0..MAX_COUNT { let (job, spy_arc) = FakeJob::new( format!("{}", i), match i % 2 { 0 => Ok(jobs::Status::Done), _ => Ok(jobs::Status::NoChange(format!("{}", i))), }, ); jobs.push(job); spy_arcs.push(spy_arc); } run(jobs); for spy_arc in spy_arcs { let spy = spy_arc.lock().unwrap(); spy.assert_called_once(); } } #[test] fn run_executes_unordered_jobs_concurrently() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (mut b, b_spy) = FakeJob::new("b", Ok(jobs::Status::Done)); a.sleep = Duration::from_millis(500); b.sleep = Duration::from_millis(500); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_called_once(); my_b_spy.assert_called_once(); assert!(my_a_spy.time.expect("a").elapsed() < Duration::from_millis(100)); assert!(my_b_spy.time.expect("b").elapsed() < Duration::from_millis(100)); } #[test] fn run_executes_jobs_with_complex_needs() { const MAX_COUNT: usize = 100; let mut jobs = Vec::<FakeJob>::with_capacity(MAX_COUNT); let mut spy_arcs = Vec::<Arc<Mutex<FakeJobSpy>>>::with_capacity(MAX_COUNT); for i in 0..MAX_COUNT { let (mut job, spy_arc) = FakeJob::new( format!("{}", i), match i % 2 { 0 => Ok(jobs::Status::Done), _ => Ok(jobs::Status::NoChange(format!("{}", i))), }, ); match i % 10 { 2 => { job.needs = vec![format!("{}", i + 2)]; } 3 => { job.needs = vec![format!("{}", i - 3)]; } 4 => { job.needs = vec![format!("{}", i + 3)]; } 7 => { job.needs = vec![String::from("99")]; } _ => { /* noop */ } } jobs.push(job); spy_arcs.push(spy_arc); } run(jobs); for i in 0..MAX_COUNT { let spy_arc = &spy_arcs[i]; let spy = spy_arc.lock().unwrap(); spy.assert_called_once(); match i % 10 { 2 => { let spyx4_arc = &spy_arcs[i + 2]; let spyx4 = spyx4_arc.lock().unwrap(); assert!(spy.time.expect("x4") > spyx4.time.expect("x7")); } 3 => { let spyx0_arc = &spy_arcs[i - 3]; let spyx0 = spyx0_arc.lock().unwrap(); assert!(spy.time.expect("x3") > spyx0.time.expect("x7")); } 4 => { let spyx7_arc = &spy_arcs[i + 3]; let spyx7 = spyx7_arc.lock().unwrap(); assert!(spy.time.expect("x4") > spyx7.time.expect("x7")); } 7 => { let spy99_arc = &spy_arcs[99]; let spy99 = spy99_arc.lock().unwrap(); assert!(spy.time.expect("x7") > spy99.time.expect("99")); } _ => { /* noop */ } } } } #[test] fn run_executes_ordered_jobs() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (b, b_spy) = FakeJob::new("b", Ok(jobs::Status::NoChange(String::from("b")))); a.needs.push(String::from("b")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_called_once(); my_b_spy.assert_called_once(); assert!(my_a_spy.time.expect("a") > my_b_spy.time.expect("b")); } #[test] fn run_does_not_execute_ordered_job_when_needs_are_not_done() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (b, b_spy) = FakeJob::new("b", Err(jobs::Error::SomethingBad)); a.needs.push(String::from("b")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_never_called(); my_b_spy.assert_called_once(); } #[test] fn run_does_not_execute_ordered_job_when_some_needs_are_not_done() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (mut b, b_spy) = FakeJob::new("b", Err(jobs::Error::SomethingBad)); let (c, c_spy) = FakeJob::new("c", Ok(jobs::Status::Done)); a.needs.push(String::from("b")); a.needs.push(String::from("c")); b.needs.push(String::from("c")); let jobs = vec![a, b, c]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); let my_c_spy = c_spy.lock().unwrap(); my_a_spy.assert_never_called(); my_b_spy.assert_called_once(); my_c_spy.assert_called_once(); } fn result_clone(result: &jobs::Result) -> jobs::Result { match result { Ok(s) => Ok(s.clone()), Err(_) => Err(jobs::Error::SomethingBad), } } }
use std::{ collections::HashMap, sync::{Arc, Mutex}, thread, }; use thiserror::Error as ThisError; use crate::jobs::{self, is_result_done, is_result_settled, Execute, Status}; const MAX_THREADS: usize = 2; #[derive(Debug, ThisError)] pub enum Error { #[error(transparent)] Job { #[from] source: jobs::Error, }, } pub fn run(jobs: Vec<(impl Execute + Send + 'static)>) { let mut results = HashMap::<String, jobs::Result>::new(); jobs.iter().for_each(|job| { if job.needs().is_empty() { results.insert(job.name(), Ok(Status::Pending)); } else { results.insert(job.name(), Ok(Status::Blocked)); } }); let jobs_arc = Arc::new(Mutex::new(jobs)); let results_arc = Arc::new(Mutex::new(results)); let mut handles = Vec::<thread::JoinHandle<_>>::with_capacity(MAX_THREADS); for _ in 0..MAX_THREADS { let my_jobs_arc = jobs_arc.clone(); let my_results_arc = results_arc.clone(); let handle = thread::spawn(move || { loop { let current_job; { let mut my_jobs = my_jobs_arc.lock().unwrap(); let mut my_results = my_results_arc.lock().unwrap(); for job in my_jobs.iter() { let name = job.name(); if !job.when() { my_results.insert(name.clone(), Ok(Status::Skipped)); } } for job in my_jobs.iter() { let name = job.name(); if is_equal_status(my_results.get(&name).unwrap(), &Status::Blocked) && job .needs() .iter() .all(|n| is_result_done(my_results.get(n).unwrap())) { my_results.insert(name, Ok(Status::Pending)); } } if is_all_settled(&my_results) { return; } let index = match my_jobs.iter().enumerate().find(|(_, job)| { let name = job.name(); is_equal_status(my_results.get(&name).unwrap(), &Status::Pending) }) { Some((i, _)) => i, None => { return; } }; current_job = my_jobs.remove(index); let name = current_job.name(); my_results.insert(name.clone(), Ok(Status::InProgress)); println!( "job: {}: {}", &name, jobs::result_display(my_results.get(&name).unwrap()) ); } let name = current_job.name(); let result = current_job.execute(); { let mut my_results = my_results_arc.lock().unwrap(); my_results.insert(name.clone(), result); println!( "job: {}: {}", &name, jobs::result_display(my_results.get(&name).unwrap()) ); } } }); handles.push(handle); } for handle in handles { handle.join().expect("worker thread failed"); } } fn is_all_settled(results: &HashMap<String, jobs::Result>) -> bool { results.iter().all(|(_, result)| is_result_settled(result)) } fn is_equal_status(result: &jobs::Result, status: &Status) -> bool { match result { Ok(s) => s == status, Err(_) => false, } } #[cfg(test)] mod tests { use std::time::{Duration, Instant}; use super::*; struct FakeJob { name: String, needs: Vec<String>, result: jobs::Result, sleep: Duration, spy_arc: Arc<Mutex<FakeJobSpy>>, when: bool, } impl Default for FakeJob { fn default() -> Self { Self { name: String::new(), needs: Vec::<String>::new(), result: Ok(jobs::Status::Done), slee
} impl FakeJob { fn new<S>(name: S, result: jobs::Result) -> (Self, Arc<Mutex<FakeJobSpy>>) where S: AsRef<str>, { let job = FakeJob { name: String::from(name.as_ref()), result, ..Default::default() }; let spy_arc = job.spy_arc.clone(); (job, spy_arc) } } impl Execute for FakeJob { fn execute(&self) -> jobs::Result { thread::sleep(self.sleep); let mut my_spy = self.spy_arc.lock().unwrap(); my_spy.calls += 1; my_spy.time = Some(Instant::now()); result_clone(&self.result) } fn name(&self) -> String { self.name.clone() } fn needs(&self) -> Vec<String> { self.needs.clone() } fn when(&self) -> bool { self.when } } struct FakeJobSpy { calls: usize, time: Option<Instant>, } impl FakeJobSpy { fn assert_called_once(&self) { assert_eq!(self.calls, 1); assert!(self.time.is_some()); } fn assert_never_called(&self) { assert_eq!(self.calls, 0); assert!(self.time.is_none()); } } #[test] fn run_does_not_execute_job_with_false_when_or_needs_job_with_false_when() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); a.when = false; let (mut b, b_spy) = FakeJob::new("b", Ok(jobs::Status::Done)); b.needs.push(String::from("a")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); my_a_spy.assert_never_called(); let my_b_spy = b_spy.lock().unwrap(); my_b_spy.assert_never_called(); } #[test] fn run_executes_unordered_jobs() { const MAX_COUNT: usize = 10; let mut jobs = Vec::<FakeJob>::with_capacity(MAX_COUNT); let mut spy_arcs = Vec::<Arc<Mutex<FakeJobSpy>>>::with_capacity(MAX_COUNT); for i in 0..MAX_COUNT { let (job, spy_arc) = FakeJob::new( format!("{}", i), match i % 2 { 0 => Ok(jobs::Status::Done), _ => Ok(jobs::Status::NoChange(format!("{}", i))), }, ); jobs.push(job); spy_arcs.push(spy_arc); } run(jobs); for spy_arc in spy_arcs { let spy = spy_arc.lock().unwrap(); spy.assert_called_once(); } } #[test] fn run_executes_unordered_jobs_concurrently() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (mut b, b_spy) = FakeJob::new("b", Ok(jobs::Status::Done)); a.sleep = Duration::from_millis(500); b.sleep = Duration::from_millis(500); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_called_once(); my_b_spy.assert_called_once(); assert!(my_a_spy.time.expect("a").elapsed() < Duration::from_millis(100)); assert!(my_b_spy.time.expect("b").elapsed() < Duration::from_millis(100)); } #[test] fn run_executes_jobs_with_complex_needs() { const MAX_COUNT: usize = 100; let mut jobs = Vec::<FakeJob>::with_capacity(MAX_COUNT); let mut spy_arcs = Vec::<Arc<Mutex<FakeJobSpy>>>::with_capacity(MAX_COUNT); for i in 0..MAX_COUNT { let (mut job, spy_arc) = FakeJob::new( format!("{}", i), match i % 2 { 0 => Ok(jobs::Status::Done), _ => Ok(jobs::Status::NoChange(format!("{}", i))), }, ); match i % 10 { 2 => { job.needs = vec![format!("{}", i + 2)]; } 3 => { job.needs = vec![format!("{}", i - 3)]; } 4 => { job.needs = vec![format!("{}", i + 3)]; } 7 => { job.needs = vec![String::from("99")]; } _ => { /* noop */ } } jobs.push(job); spy_arcs.push(spy_arc); } run(jobs); for i in 0..MAX_COUNT { let spy_arc = &spy_arcs[i]; let spy = spy_arc.lock().unwrap(); spy.assert_called_once(); match i % 10 { 2 => { let spyx4_arc = &spy_arcs[i + 2]; let spyx4 = spyx4_arc.lock().unwrap(); assert!(spy.time.expect("x4") > spyx4.time.expect("x7")); } 3 => { let spyx0_arc = &spy_arcs[i - 3]; let spyx0 = spyx0_arc.lock().unwrap(); assert!(spy.time.expect("x3") > spyx0.time.expect("x7")); } 4 => { let spyx7_arc = &spy_arcs[i + 3]; let spyx7 = spyx7_arc.lock().unwrap(); assert!(spy.time.expect("x4") > spyx7.time.expect("x7")); } 7 => { let spy99_arc = &spy_arcs[99]; let spy99 = spy99_arc.lock().unwrap(); assert!(spy.time.expect("x7") > spy99.time.expect("99")); } _ => { /* noop */ } } } } #[test] fn run_executes_ordered_jobs() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (b, b_spy) = FakeJob::new("b", Ok(jobs::Status::NoChange(String::from("b")))); a.needs.push(String::from("b")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_called_once(); my_b_spy.assert_called_once(); assert!(my_a_spy.time.expect("a") > my_b_spy.time.expect("b")); } #[test] fn run_does_not_execute_ordered_job_when_needs_are_not_done() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (b, b_spy) = FakeJob::new("b", Err(jobs::Error::SomethingBad)); a.needs.push(String::from("b")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_never_called(); my_b_spy.assert_called_once(); } #[test] fn run_does_not_execute_ordered_job_when_some_needs_are_not_done() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (mut b, b_spy) = FakeJob::new("b", Err(jobs::Error::SomethingBad)); let (c, c_spy) = FakeJob::new("c", Ok(jobs::Status::Done)); a.needs.push(String::from("b")); a.needs.push(String::from("c")); b.needs.push(String::from("c")); let jobs = vec![a, b, c]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); let my_c_spy = c_spy.lock().unwrap(); my_a_spy.assert_never_called(); my_b_spy.assert_called_once(); my_c_spy.assert_called_once(); } fn result_clone(result: &jobs::Result) -> jobs::Result { match result { Ok(s) => Ok(s.clone()), Err(_) => Err(jobs::Error::SomethingBad), } } }
p: Duration::from_millis(0), spy_arc: Arc::new(Mutex::new(FakeJobSpy { calls: 0, time: None, })), when: true, } }
function_block-function_prefixed
[ { "content": "pub fn result_display(result: &Result) -> String {\n\n match result {\n\n Ok(s) => format!(\"{}\", s),\n\n Err(e) => format!(\"{:#?}\", e).red().to_string(),\n\n }\n\n}\n", "file_path": "src/lib/jobs/mod.rs", "rank": 0, "score": 172012.30064669653 }, { "content": "pub fn is_result_done(result: &Result) -> bool {\n\n match result {\n\n Ok(s) => s.is_done(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Status {\n\n Blocked, // when \"needs\" are not yet Done\n\n Changed(String, String), // more specific kind of Done\n\n Done,\n\n InProgress,\n\n NoChange(String), // more specific kind of Done\n\n Pending, // when no \"needs\"; or \"needs\" are all Done\n\n Skipped, // when \"when\" is false\n\n}\n\nimpl fmt::Display for Status {\n\n // TODO: should Display include terminal output concerns?\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "src/lib/jobs/mod.rs", "rank": 1, "score": 171749.16894922988 }, { "content": "pub fn is_result_settled(result: &Result) -> bool {\n\n match result {\n\n Ok(s) => match s {\n\n Status::Blocked | Status::Skipped => true,\n\n _ => s.is_done(),\n\n },\n\n Err(_) => true,\n\n }\n\n}\n", "file_path": "src/lib/jobs/mod.rs", "rank": 2, "score": 171749.16894922993 }, { "content": "fn default_when_value() -> bool {\n\n true\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::PathBuf;\n\n\n\n use file::FileState;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn command_toml() -> std::result::Result<(), Error> {\n\n let input = r#\"\n\n [[jobs]]\n\n name = \"run something\"\n\n type = \"command\"\n\n command = \"something\"\n\n argv = [ \"foo\" ]\n", "file_path": "src/lib/jobs/mod.rs", "rank": 4, "score": 135848.40926904464 }, { "content": "fn fs_create_dir_all<P>(p: P) -> std::result::Result<(), Error>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n fs::create_dir_all(&p).map_err(|e| Error::CreatePath {\n\n path: p.as_ref().to_path_buf(),\n\n source: e,\n\n })\n\n}\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 6, "score": 128379.57185622421 }, { "content": "fn fs_write<P, C>(p: P, c: C) -> std::result::Result<(), Error>\n\nwhere\n\n P: AsRef<Path>,\n\n C: AsRef<[u8]>,\n\n{\n\n fs::write(&p, c).map_err(|e| Error::WritePath {\n\n path: p.as_ref().to_path_buf(),\n\n source: e,\n\n })\n\n}\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 7, "score": 118515.76085643652 }, { "content": "fn execute_directory<P>(path: P, force: bool) -> Result\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let p = path.as_ref();\n\n let previously;\n\n if p.is_dir() {\n\n return Ok(Status::NoChange(format!(\"directory: {}\", p.display())));\n\n } else if p.exists() {\n\n if !force {\n\n return Err(Error::PathExists {\n\n path: p.to_path_buf(),\n\n });\n\n }\n\n previously = String::from(\"not directory\");\n\n execute_absent(&p)?;\n\n } else {\n\n previously = String::from(\"absent\");\n\n }\n\n\n\n fs_create_dir_all(&p)?;\n\n Ok(Status::Changed(\n\n previously,\n\n format!(\"directory: {}\", p.display()),\n\n ))\n\n}\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 8, "score": 111702.92816934269 }, { "content": "type Result<T> = std::result::Result<T, Error>;\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 107985.68537092791 }, { "content": "pub trait Execute {\n\n fn execute(&self) -> Result;\n\n fn name(&self) -> String;\n\n fn needs(&self) -> Vec<String>;\n\n fn when(&self) -> bool;\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(rename_all = \"lowercase\", tag = \"type\")]\n\npub struct Job {\n\n #[serde(flatten)]\n\n metadata: Metadata,\n\n\n\n #[serde(flatten)]\n\n spec: Spec,\n\n}\n\nimpl Execute for Job {\n\n fn execute(&self) -> Result {\n\n match &self.spec {\n\n Spec::Command(j) => j.execute().map_err(|e| Error::CommandJob { source: e }),\n", "file_path": "src/lib/jobs/mod.rs", "rank": 11, "score": 102811.3501810845 }, { "content": "fn execute_link<P>(src: P, dest: P, force: bool) -> Result\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let s = src.as_ref();\n\n if std::fs::symlink_metadata(&s).is_err() && !force {\n\n return Err(Error::SrcNotFound {\n\n src: s.to_path_buf(),\n\n });\n\n }\n\n\n\n let d = dest.as_ref();\n\n let mut previously = String::from(\"absent\");\n\n\n\n if let Ok(target) = std::fs::read_link(&d) {\n\n previously = format!(\"{} -> {}\", target.display(), d.display());\n\n if s == target {\n\n return Ok(Status::NoChange(previously));\n\n }\n\n if !force {\n", "file_path": "src/lib/jobs/file.rs", "rank": 12, "score": 102110.72501119178 }, { "content": "pub fn render<S>(input: S, facts: &Facts) -> Result<String>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let context = Context::from_serialize(facts)?;\n\n\n\n let mut t = Tera::new(\"template/**/*\").expect(\"unable to prepare template system\");\n\n t.add_raw_template(\n\n \"main.toml\",\n\n &DIR_EXPRESSION_RE.replace_all(input.as_ref(), \"_dir | addslashes }}\"),\n\n )?;\n\n t.register_function(\"has_executable\", template_function_has_executable);\n\n\n\n let output = t.render(\"main.toml\", &context)?;\n\n\n\n Main::try_from(output.as_str())?; // check that we have valid TOML first\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "src/lib/template.rs", "rank": 13, "score": 101004.13767435885 }, { "content": "fn execute_absent<P>(path: P) -> Result\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let p = path.as_ref();\n\n if !p.exists() {\n\n return Ok(Status::NoChange(format!(\"{}\", p.display())));\n\n }\n\n\n\n (if p.is_dir() {\n\n fs::remove_dir_all(&p)\n\n } else {\n\n fs::remove_file(&p)\n\n })\n\n .map_err(|e| Error::RemovePath {\n\n path: p.to_path_buf(),\n\n source: e,\n\n })?;\n\n Ok(Status::Changed(\n\n format!(\"{}\", p.display()),\n\n String::from(\"absent\"),\n\n ))\n\n}\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 14, "score": 93541.43214078248 }, { "content": "fn execute_touch<P>(path: P) -> Result\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let p = path.as_ref();\n\n if p.exists() {\n\n // TODO: consider bumping access/modify time like real `touch`\n\n return Ok(Status::NoChange(format!(\"{}\", p.display())));\n\n }\n\n if let Some(parent) = p.parent() {\n\n execute_directory(&parent, false)?;\n\n }\n\n fs_write(p, \"\")?;\n\n Ok(Status::Changed(\n\n String::from(\"absent\"),\n\n format!(\"{}\", p.display()),\n\n ))\n\n}\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 15, "score": 93541.43214078248 }, { "content": "#[derive(Debug, ThisError)]\n\nenum Error {\n\n #[error(\"valid config file not found\")]\n\n ConfigNotFound,\n\n #[error(transparent)]\n\n Facts {\n\n #[from]\n\n source: facts::Error,\n\n },\n\n #[error(transparent)]\n\n Io {\n\n #[from]\n\n source: io::Error,\n\n },\n\n #[error(transparent)]\n\n Job {\n\n #[from]\n\n source: jobs::Error,\n\n },\n\n #[error(transparent)]\n\n Template {\n\n #[from]\n\n source: template::Error,\n\n },\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 92612.40905492133 }, { "content": "fn template_function_has_executable(args: &HashMap<String, Value>) -> tera::Result<Value> {\n\n match args.get(\"exe\") {\n\n Some(val) => match from_value::<String>(val.clone()) {\n\n Ok(v) => Ok(to_value(which(v).is_ok()).unwrap()),\n\n Err(_) => Err(tera::Error::from(r#\"\"exe\" must be a string\"#)),\n\n },\n\n None => Err(tera::Error::from(r#\"missing \"exe\" argument\"#)),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::PathBuf;\n\n\n\n use super::super::facts::Facts;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn render_errs_if_not_toml() {\n", "file_path": "src/lib/template.rs", "rank": 17, "score": 90376.032784214 }, { "content": "fn main() -> Result<()> {\n\n let facts = Facts::gather()?;\n\n let m = read_config(&facts)?;\n\n runner::run(m.jobs);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 79491.67769293697 }, { "content": "#[cfg(windows)]\n\nfn symbolic_link<P>(src: P, dest: P) -> io::Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let src_attr = std::fs::symlink_metadata(&src)?;\n\n if src_attr.is_dir() {\n\n return std::os::windows::fs::symlink_dir(&src, dest);\n\n }\n\n\n\n std::os::windows::fs::symlink_file(&src, dest)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use mktemp::Temp;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn absent_deletes_existing_file() -> std::result::Result<(), Error> {\n", "file_path": "src/lib/jobs/file.rs", "rank": 19, "score": 63710.21227968542 }, { "content": "fn read_config(facts: &Facts) -> Result<Main> {\n\n let config_paths = [\n\n facts\n\n .config_dir\n\n .join(env!(\"CARGO_PKG_NAME\"))\n\n .join(MAIN_TOML_FILE),\n\n facts\n\n .home_dir\n\n .join(\".dotfiles\")\n\n .join(env!(\"CARGO_PKG_NAME\"))\n\n .join(MAIN_TOML_FILE),\n\n ];\n\n for config_path in config_paths.iter() {\n\n println!(\"reading: {}\", &config_path.display());\n\n let text = match fs::read_to_string(&config_path) {\n\n Ok(s) => s,\n\n Err(e) => {\n\n println!(\"{:?}\", e);\n\n continue;\n\n }\n", "file_path": "src/main.rs", "rank": 20, "score": 61499.29901671809 }, { "content": " Spec::File(j) => j.execute().map_err(|e| Error::FileJob { source: e }),\n\n }\n\n }\n\n fn name(&self) -> String {\n\n match &self.spec {\n\n Spec::Command(j) => self.metadata.name.clone().unwrap_or_else(|| j.name()),\n\n Spec::File(j) => self.metadata.name.clone().unwrap_or_else(|| j.name()),\n\n }\n\n }\n\n fn needs(&self) -> Vec<String> {\n\n self.metadata.needs.clone().unwrap_or_default()\n\n }\n\n fn when(&self) -> bool {\n\n self.metadata.when\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\npub struct Metadata {\n\n name: Option<String>,\n", "file_path": "src/lib/jobs/mod.rs", "rank": 21, "score": 44971.65963913682 }, { "content": "\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\npub struct Main {\n\n pub jobs: Vec<Job>,\n\n}\n\nimpl TryFrom<&str> for Main {\n\n type Error = Error;\n\n fn try_from(s: &str) -> std::result::Result<Self, Self::Error> {\n\n toml::from_str(s).map_err(|e| Error::ParseToml { source: e })\n\n }\n\n}\n\n\n\npub type Result = std::result::Result<Status, Error>;\n", "file_path": "src/lib/jobs/mod.rs", "rank": 22, "score": 44970.474411945856 }, { "content": " needs: Option<Vec<String>>,\n\n #[serde(default = \"default_when_value\")]\n\n when: bool,\n\n}\n\nimpl Default for Metadata {\n\n fn default() -> Self {\n\n Self {\n\n name: None,\n\n needs: None,\n\n when: true,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(rename_all = \"lowercase\", tag = \"type\")]\n\npub enum Spec {\n\n Command(Command),\n\n File(File),\n\n}\n", "file_path": "src/lib/jobs/mod.rs", "rank": 23, "score": 44969.692113314064 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn file_toml() -> std::result::Result<(), Error> {\n\n let input = r#\"\n\n [[jobs]]\n\n name = \"mkdir /tmp\"\n\n type = \"file\"\n\n path = \"/tmp\"\n\n state = \"directory\"\n\n \"#;\n\n\n\n let got = Main::try_from(input)?;\n\n\n\n let want = Main {\n\n jobs: vec![Job {\n\n metadata: Metadata {\n\n name: Some(String::from(\"mkdir /tmp\")),\n", "file_path": "src/lib/jobs/mod.rs", "rank": 24, "score": 44967.52503148212 }, { "content": "mod command;\n\nmod file;\n\n\n\nuse std::{convert::TryFrom, fmt};\n\n\n\nuse colored::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse thiserror::Error as ThisError;\n\n\n\nuse command::Command;\n\nuse file::File;\n\n\n\n#[derive(Debug, ThisError)]\n\npub enum Error {\n\n #[error(transparent)]\n\n CommandJob {\n\n #[from]\n\n source: command::Error,\n\n },\n\n #[error(transparent)]\n", "file_path": "src/lib/jobs/mod.rs", "rank": 25, "score": 44966.653050938796 }, { "content": " ..Default::default()\n\n },\n\n spec: Spec::File(File {\n\n force: None,\n\n src: None,\n\n path: PathBuf::from(\"/tmp\"),\n\n state: FileState::Directory,\n\n }),\n\n }],\n\n };\n\n\n\n assert_eq!(got.jobs.len(), 1);\n\n assert_eq!(got, want);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn absent_when_defaults_to_true() -> std::result::Result<(), Error> {\n\n let input = r#\"\n", "file_path": "src/lib/jobs/mod.rs", "rank": 26, "score": 44966.53982214948 }, { "content": " [[jobs]]\n\n name = \"run something\"\n\n type = \"command\"\n\n command = \"something\"\n\n \"#;\n\n\n\n let got = Main::try_from(input)?;\n\n\n\n let want = Main {\n\n jobs: vec![Job {\n\n metadata: Metadata {\n\n name: Some(String::from(\"run something\")),\n\n when: true,\n\n ..Default::default()\n\n },\n\n spec: Spec::Command(Command {\n\n command: String::from(\"something\"),\n\n ..Default::default()\n\n }),\n\n }],\n\n };\n\n\n\n assert_eq!(got.jobs.len(), 1);\n\n assert_eq!(got, want);\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/lib/jobs/mod.rs", "rank": 27, "score": 44961.31937464076 }, { "content": " FileJob {\n\n #[from]\n\n source: file::Error,\n\n },\n\n #[error(transparent)]\n\n ParseToml {\n\n #[from]\n\n source: toml::de::Error,\n\n },\n\n #[allow(dead_code)] // TODO: fake test-only errors should not be here\n\n #[error(\"fake test-only error\")]\n\n SomethingBad,\n\n}\n\n\n", "file_path": "src/lib/jobs/mod.rs", "rank": 28, "score": 44960.29231206559 }, { "content": " match self {\n\n Self::Blocked => write!(f, \"{}\", \"blocked\".red().dimmed()),\n\n Self::Changed(from, to) => write!(\n\n f,\n\n \"{}: {} => {}\",\n\n \"changed\".yellow(),\n\n from.yellow().dimmed(),\n\n to.yellow()\n\n ),\n\n Self::Done => write!(f, \"{}\", \"done\".blue()),\n\n Self::InProgress => write!(f, \"{}\", \"inprogress\".cyan()),\n\n Self::NoChange(s) => write!(f, \"{}: {}\", \"nochange\".green(), s.green()),\n\n Self::Pending => write!(f, \"{}\", \"pending\".white()),\n\n Self::Skipped => write!(f, \"{}\", \"skipped\".blue()),\n\n }\n\n }\n\n}\n\nimpl Status {\n\n pub fn is_done(&self) -> bool {\n\n match &self {\n\n Self::Changed(_, _) | Self::Done | Self::NoChange(_) => true,\n\n Self::Blocked | Self::InProgress | Self::Pending | Self::Skipped => false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib/jobs/mod.rs", "rank": 29, "score": 44960.046354575396 }, { "content": " \"#;\n\n\n\n let got = Main::try_from(input)?;\n\n\n\n let want = Main {\n\n jobs: vec![Job {\n\n metadata: Metadata {\n\n name: Some(String::from(\"run something\")),\n\n ..Default::default()\n\n },\n\n spec: Spec::Command(Command {\n\n argv: Some(vec![String::from(\"foo\")]),\n\n command: String::from(\"something\"),\n\n ..Default::default()\n\n }),\n\n }],\n\n };\n\n\n\n assert_eq!(got.jobs.len(), 1);\n\n assert_eq!(got, want);\n", "file_path": "src/lib/jobs/mod.rs", "rank": 30, "score": 44959.29340509086 }, { "content": "pub mod facts;\n\npub mod jobs;\n\npub mod runner;\n\npub mod template;\n", "file_path": "src/lib/mod.rs", "rank": 31, "score": 25687.517751250314 }, { "content": " pub removes: Option<PathBuf>,\n\n}\n\nimpl Default for Command {\n\n fn default() -> Self {\n\n Command {\n\n argv: None,\n\n chdir: None,\n\n command: String::new(),\n\n creates: None,\n\n removes: None,\n\n }\n\n }\n\n}\n\nimpl Command {\n\n pub fn execute(&self) -> Result {\n\n match &self.creates {\n\n Some(p) => {\n\n if p.exists() {\n\n return Ok(Status::NoChange(format!(\"{:?} already created\", p)));\n\n }\n", "file_path": "src/lib/jobs/command.rs", "rank": 32, "score": 20555.801889282255 }, { "content": "use std::{env, io, path::PathBuf, sync::Mutex, thread};\n\n\n\nuse lazy_static::lazy_static;\n\nuse serde::{Deserialize, Serialize};\n\nuse subprocess::{Exec, PopenError, Redirection};\n\nuse thiserror::Error as ThisError;\n\n\n\nuse super::Status;\n\n\n\nlazy_static! {\n\n static ref MUTEX: Mutex<()> = Mutex::new(());\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(rename_all = \"lowercase\", tag = \"type\")]\n\npub struct Command {\n\n pub argv: Option<Vec<String>>,\n\n pub chdir: Option<PathBuf>,\n\n pub command: String,\n\n pub creates: Option<PathBuf>,\n", "file_path": "src/lib/jobs/command.rs", "rank": 33, "score": 20554.85747534095 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn done_after_running_command() {\n\n let cmd = Command {\n\n argv: Some(vec![String::from(\"--version\")]),\n\n command: String::from(\"cargo\"),\n\n ..Default::default()\n\n };\n\n match cmd.execute() {\n\n Ok(s) => assert_eq!(s, Status::Done),\n\n Err(_) => unreachable!(), // fail\n\n }\n\n // TODO: should also test stdout/stderr\n\n }\n\n\n\n #[test]\n", "file_path": "src/lib/jobs/command.rs", "rank": 34, "score": 20554.772639219107 }, { "content": " pub path: PathBuf,\n\n pub src: Option<PathBuf>,\n\n pub state: FileState,\n\n}\n\nimpl Default for File {\n\n fn default() -> Self {\n\n Self {\n\n force: None,\n\n path: PathBuf::new(),\n\n src: None,\n\n state: FileState::Touch,\n\n }\n\n }\n\n}\n\nimpl File {\n\n pub fn execute(&self) -> Result {\n\n match self.state {\n\n FileState::Absent => execute_absent(&self.path),\n\n FileState::Directory => execute_directory(&self.path, self.force.unwrap_or(false)),\n\n FileState::Link => match &self.src {\n", "file_path": "src/lib/jobs/file.rs", "rank": 35, "score": 20551.513168053854 }, { "content": " fn error_after_running_failed_command() {\n\n let cmd = Command {\n\n argv: Some(vec![String::from(\"--flag-does-not-exist\")]),\n\n command: String::from(\"cargo\"),\n\n ..Default::default()\n\n };\n\n if cmd.execute().is_ok() {\n\n unreachable!(); // fail\n\n }\n\n }\n\n\n\n #[test]\n\n fn skips_when_creates_file_already_exists() {\n\n let cmd = Command {\n\n command: String::from(\"./throw_if_attempt_to_execute\"),\n\n creates: Some(PathBuf::from(\"Cargo.toml\")),\n\n ..Default::default()\n\n };\n\n match cmd.execute() {\n\n Ok(s) => assert_eq!(\n", "file_path": "src/lib/jobs/command.rs", "rank": 36, "score": 20551.21067223113 }, { "content": " Some(s) => execute_link(s, &self.path, self.force.unwrap_or(false)),\n\n None => Err(Error::StateRequiresSrc { state: self.state }),\n\n },\n\n FileState::Touch => execute_touch(&self.path),\n\n _ => Err(Error::StateNotImplemented { state: self.state }),\n\n }\n\n }\n\n\n\n pub fn name(&self) -> String {\n\n let force = self.force.unwrap_or(false);\n\n let pd = self.path.display();\n\n match self.state {\n\n FileState::Absent => format!(\"rm -r{} {}\", if force { \"f\" } else { \"\" }, pd),\n\n FileState::Directory => format!(\"mkdir -p {}\", pd),\n\n FileState::Link => format!(\n\n \"ln -s{} {} {}\",\n\n if force { \"f\" } else { \"\" },\n\n self.src.clone().unwrap_or_default().display(),\n\n pd\n\n ),\n\n FileState::Touch => format!(\"touch {}\", pd),\n\n _ => format!(\"{:#?}\", self),\n\n }\n\n }\n\n}\n\n\n\npub type Result = std::result::Result<Status, Error>;\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 37, "score": 20550.18372943581 }, { "content": " ..Default::default()\n\n };\n\n\n\n let got = file.execute()?;\n\n\n\n assert_eq!(\n\n got,\n\n Status::Changed(String::from(\"absent\"), format!(\"{}\", file.path.display()))\n\n );\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn touch_makes_nochange_for_existing_path() -> std::result::Result<(), Error> {\n\n let file = File {\n\n path: temp_file()?.to_path_buf(),\n\n state: FileState::Touch,\n\n ..Default::default()\n\n };\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 38, "score": 20549.049919236255 }, { "content": " }\n\n parts.push(self.command.clone());\n\n if let Some(a) = &self.argv {\n\n parts.extend(a.clone());\n\n }\n\n parts.join(\" \")\n\n }\n\n}\n\n\n\n#[derive(Debug, ThisError)]\n\npub enum Error {\n\n #[error(\"`{}` could not begin: {}\", cmd, source)]\n\n CommandBegin { cmd: String, source: PopenError },\n\n #[error(\"`{}` could not continue: {}\", cmd, source)]\n\n CommandWait { cmd: String, source: PopenError },\n\n #[error(\"`{}` exited with non-zero status code\", cmd)]\n\n NonZeroExitStatus { cmd: String },\n\n}\n\n\n\npub type Result = std::result::Result<Status, Error>;\n", "file_path": "src/lib/jobs/command.rs", "rank": 39, "score": 20548.62521833016 }, { "content": " fs_create_dir_all(file.path.parent().unwrap())?;\n\n fs_write(&file.path, \"\")?;\n\n let got = file.execute()?;\n\n\n\n assert_eq!(got, Status::NoChange(format!(\"{}\", file.path.display())));\n\n Ok(())\n\n }\n\n\n\n fn fs_read<P>(p: P) -> std::result::Result<String, Error>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n let pb = p.as_ref().to_path_buf();\n\n fs::read_to_string(&pb).map_err(|e| Error::ReadPath {\n\n path: pb,\n\n source: e,\n\n })\n\n }\n\n fn temp_dir() -> std::result::Result<mktemp::Temp, Error> {\n\n Temp::new_dir().map_err(|e| Error::TempPath { source: e })\n\n }\n\n fn temp_file() -> std::result::Result<mktemp::Temp, Error> {\n\n Temp::new_file().map_err(|e| Error::TempPath { source: e })\n\n }\n\n}\n", "file_path": "src/lib/jobs/file.rs", "rank": 40, "score": 20548.020287947933 }, { "content": " s,\n\n Status::NoChange(String::from(r#\"\"Cargo.toml\" already created\"#))\n\n ),\n\n Err(_) => unreachable!(), // fail\n\n }\n\n }\n\n\n\n #[test]\n\n fn skips_when_removes_file_already_gone() {\n\n let cmd = Command {\n\n command: String::from(\"./throw_if_attempt_to_execute\"),\n\n removes: Some(PathBuf::from(\"does_not_exist.toml\")),\n\n ..Default::default()\n\n };\n\n match cmd.execute() {\n\n Ok(s) => assert_eq!(\n\n s,\n\n Status::NoChange(String::from(r#\"\"does_not_exist.toml\" already removed\"#))\n\n ),\n\n Err(_) => unreachable!(), // fail\n", "file_path": "src/lib/jobs/command.rs", "rank": 41, "score": 20547.739957489703 }, { "content": "use std::{\n\n fs, io,\n\n path::{Path, PathBuf},\n\n};\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse thiserror::Error as ThisError;\n\n\n\nuse super::Status;\n\n\n\n#[derive(Debug, ThisError)]\n\npub enum Error {\n\n #[error(\"unable to link {}->{}: {}\", src.display(), path.display(), source)]\n\n CreateLink {\n\n path: PathBuf,\n\n src: PathBuf,\n\n source: io::Error,\n\n },\n\n #[error(\"unable to create {}: {}\", path.display(), source)]\n\n CreatePath { path: PathBuf, source: io::Error },\n", "file_path": "src/lib/jobs/file.rs", "rank": 42, "score": 20547.469002815 }, { "content": " }\n\n None => {}\n\n }\n\n match &self.removes {\n\n Some(p) => {\n\n if !p.exists() {\n\n return Ok(Status::NoChange(format!(\"{:?} already removed\", p)));\n\n }\n\n }\n\n None => {}\n\n }\n\n\n\n // we want exactly one \"command\" to use stdout at a time,\n\n // at least until we decide how sharing stdout should work\n\n let _lock = MUTEX.lock().unwrap();\n\n\n\n let args = match &self.argv {\n\n Some(a) => a.clone(),\n\n None => Vec::<String>::new(),\n\n };\n", "file_path": "src/lib/jobs/command.rs", "rank": 43, "score": 20546.967464353722 }, { "content": " let file = File {\n\n path: temp_dir()?.to_path_buf(),\n\n state: FileState::Absent,\n\n ..Default::default()\n\n };\n\n\n\n fs_create_dir_all(&file.path)?;\n\n let got = file.execute()?;\n\n\n\n assert_eq!(\n\n got,\n\n Status::Changed(format!(\"{}\", file.path.display()), String::from(\"absent\"))\n\n );\n\n assert!(fs::symlink_metadata(&file.path).is_err());\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn absent_makes_nochange_when_already_absent() -> std::result::Result<(), Error> {\n\n let file = File {\n", "file_path": "src/lib/jobs/file.rs", "rank": 44, "score": 20546.72065893706 }, { "content": " let file = File {\n\n path: temp_file()?.to_path_buf(),\n\n state: FileState::Absent,\n\n ..Default::default()\n\n };\n\n\n\n fs_create_dir_all(&file.path.parent().unwrap())?;\n\n fs_write(&file.path, \"\")?;\n\n let got = file.execute()?;\n\n\n\n assert_eq!(\n\n got,\n\n Status::Changed(format!(\"{}\", file.path.display()), String::from(\"absent\"))\n\n );\n\n assert!(fs::symlink_metadata(&file.path).is_err());\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn absent_deletes_existing_directory() -> std::result::Result<(), Error> {\n", "file_path": "src/lib/jobs/file.rs", "rank": 45, "score": 20546.366864271025 }, { "content": " assert_eq!(fs_read(&file.path)?, \"hello\");\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn link_without_force_requires_src_to_exist() -> std::result::Result<(), Error> {\n\n let src = temp_file()?.to_path_buf();\n\n let file = File {\n\n path: temp_dir()?.to_path_buf(),\n\n src: Some(src.clone()),\n\n state: FileState::Link,\n\n ..Default::default()\n\n };\n\n\n\n let got = file.execute();\n\n\n\n assert!(got.is_err());\n\n assert_eq!(got.err().unwrap(), Error::SrcNotFound { src },);\n\n Ok(())\n\n }\n", "file_path": "src/lib/jobs/file.rs", "rank": 46, "score": 20545.991522357668 }, { "content": " let cwd = match &self.chdir {\n\n Some(c) => c.clone(),\n\n None => env::current_dir().unwrap(),\n\n };\n\n let mut p = Exec::cmd(&self.command)\n\n .args(&args)\n\n .cwd(&cwd)\n\n .stdout(Redirection::Pipe)\n\n .stderr(Redirection::Pipe)\n\n .popen()\n\n .map_err(|e| Error::CommandBegin {\n\n cmd: self.command.clone(),\n\n source: e,\n\n })?;\n\n let (mut stderr, mut stdout) = (p.stderr.take().unwrap(), p.stdout.take().unwrap());\n\n thread::spawn(move || io::copy(&mut stderr, &mut io::stderr()));\n\n thread::spawn(move || io::copy(&mut stdout, &mut io::stdout()));\n\n let status = p.wait().map_err(|e| Error::CommandWait {\n\n cmd: self.command.clone(),\n\n source: e,\n", "file_path": "src/lib/jobs/command.rs", "rank": 47, "score": 20545.627841474823 }, { "content": " path: temp_dir()?.join(\"missing.txt\"),\n\n state: FileState::Absent,\n\n ..Default::default()\n\n };\n\n\n\n let got = file.execute()?;\n\n\n\n assert_eq!(got, Status::NoChange(format!(\"{}\", file.path.display())));\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn link_symlinks_src_to_path() -> std::result::Result<(), Error> {\n\n let src = temp_file()?.to_path_buf();\n\n let file = File {\n\n path: temp_file()?.to_path_buf(),\n\n src: Some(src.clone()),\n\n state: FileState::Link,\n\n ..Default::default()\n\n };\n", "file_path": "src/lib/jobs/file.rs", "rank": 48, "score": 20545.437611338897 }, { "content": " })?;\n\n if status.success() {\n\n Ok(Status::Done)\n\n } else {\n\n Err(Error::NonZeroExitStatus {\n\n cmd: self.command.clone(),\n\n })\n\n }\n\n }\n\n\n\n pub fn name(&self) -> String {\n\n let mut parts = Vec::<String>::new();\n\n if let Some(c) = &self.creates {\n\n parts.push(format!(\"[ ! -e {} ] &&\", c.display()));\n\n }\n\n if let Some(r) = &self.removes {\n\n parts.push(format!(\"[ -e {} ] &&\", r.display()));\n\n }\n\n if let Some(c) = &self.chdir {\n\n parts.push(format!(\"cd {} &&\", c.display()));\n", "file_path": "src/lib/jobs/command.rs", "rank": 49, "score": 20545.04271596942 }, { "content": "\n\n fs_write(&src, \"hello\")?;\n\n let got = file.execute()?;\n\n\n\n assert_eq!(\n\n got,\n\n Status::Changed(\n\n String::from(\"absent\"),\n\n format!(\"{} -> {}\", &src.display(), file.path.display())\n\n )\n\n );\n\n assert_eq!(fs_read(&file.path)?, \"hello\");\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn link_symlinks_src_to_path_in_new_directory() -> std::result::Result<(), Error> {\n\n let src = temp_file()?.to_path_buf();\n\n let file = File {\n\n path: temp_dir()?.join(\"symlink.txt\"),\n", "file_path": "src/lib/jobs/file.rs", "rank": 50, "score": 20545.02122272401 }, { "content": "\n\n #[test]\n\n fn link_without_force_requires_path_to_not_exist() -> std::result::Result<(), Error> {\n\n let src = temp_file()?.to_path_buf();\n\n let file = File {\n\n path: temp_dir()?.to_path_buf(),\n\n src: Some(src.clone()),\n\n state: FileState::Link,\n\n ..Default::default()\n\n };\n\n\n\n fs_write(&src, \"hello\")?;\n\n fs_create_dir_all(&file.path)?;\n\n let got = file.execute();\n\n\n\n assert!(got.is_err());\n\n assert_eq!(got.err().unwrap(), Error::PathExists { path: file.path },);\n\n Ok(())\n\n }\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 51, "score": 20545.011220201697 }, { "content": " assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_touch() {\n\n let file = File {\n\n path: PathBuf::from(\"foo\"),\n\n state: FileState::Touch,\n\n ..Default::default()\n\n };\n\n let got = file.name();\n\n let want = \"touch foo\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn touch_creates_new_empty_file() -> std::result::Result<(), Error> {\n\n let file = File {\n\n path: temp_dir()?.join(\"new.txt\"),\n\n state: FileState::Touch,\n", "file_path": "src/lib/jobs/file.rs", "rank": 52, "score": 20544.977495836192 }, { "content": " }\n\n }\n\n\n\n #[test]\n\n fn name_with_command() {\n\n let cmd = Command {\n\n command: String::from(\"foo\"),\n\n ..Default::default()\n\n };\n\n let got = cmd.name();\n\n let want = \"foo\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_with_command_and_argv() {\n\n let cmd = Command {\n\n argv: Some(vec![String::from(\"--bar\"), String::from(\"baz\")]),\n\n command: String::from(\"foo\"),\n\n ..Default::default()\n", "file_path": "src/lib/jobs/command.rs", "rank": 53, "score": 20544.293404101438 }, { "content": " return Err(Error::PathExists {\n\n path: d.to_path_buf(),\n\n });\n\n }\n\n };\n\n // dest does not exist, or is wrong symlink, or is not a symlink\n\n\n\n match std::fs::symlink_metadata(&d) {\n\n Ok(attr) => {\n\n if !attr.file_type().is_symlink() {\n\n previously = format!(\"existing: {}\", &d.display());\n\n }\n\n if force {\n\n execute_absent(&d)?;\n\n } else {\n\n return Err(Error::PathExists {\n\n path: d.to_path_buf(),\n\n });\n\n }\n\n }\n", "file_path": "src/lib/jobs/file.rs", "rank": 54, "score": 20542.929642176583 }, { "content": " fs_write(&src, \"hello\")?;\n\n let got = file.execute()?;\n\n\n\n assert_eq!(\n\n got,\n\n Status::Changed(\n\n format!(\"{} -> {}\", &src_old.display(), file.path.display()),\n\n format!(\"{} -> {}\", &src.display(), file.path.display())\n\n )\n\n );\n\n assert_eq!(fs_read(&file.path)?, \"hello\");\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn link_removes_existing_file_at_path() -> std::result::Result<(), Error> {\n\n let src = temp_file()?.to_path_buf();\n\n let file = File {\n\n force: Some(true),\n\n path: temp_file()?.to_path_buf(),\n", "file_path": "src/lib/jobs/file.rs", "rank": 55, "score": 20542.900960085848 }, { "content": " #[test]\n\n fn link_corrects_existing_symlink() -> std::result::Result<(), Error> {\n\n let src_old = temp_file()?.to_path_buf();\n\n let file_old = File {\n\n path: temp_dir()?.join(\"symlink.txt\"),\n\n src: Some(src_old.clone()),\n\n state: FileState::Link,\n\n ..Default::default()\n\n };\n\n fs_write(&src_old, \"hello_old\")?;\n\n file_old.execute()?;\n\n\n\n let src = temp_file()?.to_path_buf();\n\n let file = File {\n\n force: Some(true),\n\n path: file_old.path,\n\n src: Some(src.clone()),\n\n state: FileState::Link,\n\n };\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 56, "score": 20542.474832877255 }, { "content": " let cmd = Command {\n\n command: String::from(\"foo\"),\n\n creates: Some(PathBuf::from(\"bar\")),\n\n ..Default::default()\n\n };\n\n let got = cmd.name();\n\n let want = \"[ ! -e bar ] && foo\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_with_command_and_removes() {\n\n let cmd = Command {\n\n command: String::from(\"foo\"),\n\n removes: Some(PathBuf::from(\"bar\")),\n\n ..Default::default()\n\n };\n\n let got = cmd.name();\n\n let want = \"[ -e bar ] && foo\";\n\n assert_eq!(got, want);\n\n }\n\n}\n", "file_path": "src/lib/jobs/command.rs", "rank": 57, "score": 20542.32389255629 }, { "content": " };\n\n let got = cmd.name();\n\n let want = \"foo --bar baz\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_with_command_and_chdir() {\n\n let cmd = Command {\n\n chdir: Some(PathBuf::from(\"bar\")),\n\n command: String::from(\"foo\"),\n\n ..Default::default()\n\n };\n\n let got = cmd.name();\n\n let want = \"cd bar && foo\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_with_command_and_creates() {\n", "file_path": "src/lib/jobs/command.rs", "rank": 58, "score": 20542.183275898034 }, { "content": " #[error(\"{} already exists\", path.display())]\n\n PathExists { path: PathBuf },\n\n #[allow(dead_code)] // TODO: test-only errors should not be here\n\n #[error(\"unable to read {}: {}\", path.display(), source)]\n\n ReadPath { path: PathBuf, source: io::Error },\n\n #[error(\"unable to remove {}: {}\", path.display(), source)]\n\n RemovePath { path: PathBuf, source: io::Error },\n\n #[error(\"{} not found\", src.display())]\n\n SrcNotFound { src: PathBuf },\n\n #[error(\"state={} requires src\", format!(\"{:?}\", state).to_lowercase())]\n\n StateRequiresSrc { state: FileState },\n\n #[error(\"state={} is not yet implemented\", format!(\"{:?}\", state).to_lowercase())]\n\n StateNotImplemented { state: FileState },\n\n #[allow(dead_code)] // TODO: test-only errors should not be here\n\n #[error(transparent)]\n\n TempPath { source: io::Error },\n\n #[error(\"unable to write {}: {}\", path.display(), source)]\n\n WritePath { path: PathBuf, source: io::Error },\n\n}\n\nimpl PartialEq for Error {\n", "file_path": "src/lib/jobs/file.rs", "rank": 59, "score": 20542.098460467652 }, { "content": " Err(_) => {\n\n if let Some(parent) = d.parent() {\n\n execute_directory(&parent, force)?;\n\n }\n\n }\n\n }\n\n\n\n symbolic_link(&s, &d).map_err(|e| Error::CreateLink {\n\n path: d.to_path_buf(),\n\n src: s.to_path_buf(),\n\n source: e,\n\n })?;\n\n\n\n Ok(Status::Changed(\n\n previously,\n\n format!(\"{} -> {}\", s.display(), d.display(),),\n\n ))\n\n}\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 60, "score": 20541.200933678963 }, { "content": " #[test]\n\n fn name_absent() {\n\n let file = File {\n\n path: PathBuf::from(\"foo\"),\n\n state: FileState::Absent,\n\n ..Default::default()\n\n };\n\n let got = file.name();\n\n let want = \"rm -r foo\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_absent_force() {\n\n let file = File {\n\n force: Some(true),\n\n path: PathBuf::from(\"foo\"),\n\n state: FileState::Absent,\n\n ..Default::default()\n\n };\n", "file_path": "src/lib/jobs/file.rs", "rank": 61, "score": 20540.68572920102 }, { "content": " let got = file.name();\n\n let want = \"rm -rf foo\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_directory() {\n\n let file = File {\n\n path: PathBuf::from(\"foo\"),\n\n state: FileState::Directory,\n\n ..Default::default()\n\n };\n\n let got = file.name();\n\n let want = \"mkdir -p foo\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_link() {\n\n let file = File {\n", "file_path": "src/lib/jobs/file.rs", "rank": 62, "score": 20540.311152354396 }, { "content": " fn eq(&self, other: &Error) -> bool {\n\n format!(\"{:?}\", self) == format!(\"{:?}\", other)\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum FileState {\n\n Absent,\n\n Directory,\n\n File,\n\n Hard,\n\n Link,\n\n Touch,\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(rename_all = \"lowercase\", tag = \"type\")]\n\npub struct File {\n\n pub force: Option<bool>,\n", "file_path": "src/lib/jobs/file.rs", "rank": 63, "score": 20540.295972113283 }, { "content": " src: Some(src.clone()),\n\n state: FileState::Link,\n\n ..Default::default()\n\n };\n\n\n\n fs_create_dir_all(file.path.parent().unwrap())?;\n\n fs_write(&src, \"hello\")?;\n\n let got = file.execute()?;\n\n\n\n assert_eq!(\n\n got,\n\n Status::Changed(\n\n String::from(\"absent\"),\n\n format!(\"{} -> {}\", &src.display(), file.path.display())\n\n )\n\n );\n\n assert_eq!(fs_read(&file.path)?, \"hello\");\n\n Ok(())\n\n }\n\n\n", "file_path": "src/lib/jobs/file.rs", "rank": 64, "score": 20539.822129792286 }, { "content": " fn link_removes_existing_directory_at_path() -> std::result::Result<(), Error> {\n\n let src = temp_file()?.to_path_buf();\n\n let file = File {\n\n force: Some(true),\n\n path: temp_dir()?.to_path_buf(),\n\n src: Some(src.clone()),\n\n state: FileState::Link,\n\n };\n\n\n\n fs_write(&src, \"hello\")?;\n\n fs_create_dir_all(&file.path)?;\n\n let got = file.execute()?;\n\n\n\n assert_eq!(\n\n got,\n\n Status::Changed(\n\n format!(\"existing: {}\", file.path.display()),\n\n format!(\"{} -> {}\", &src.display(), file.path.display())\n\n )\n\n );\n", "file_path": "src/lib/jobs/file.rs", "rank": 65, "score": 20539.66437299358 }, { "content": " path: PathBuf::from(\"foo\"),\n\n src: Some(PathBuf::from(\"bar\")),\n\n state: FileState::Link,\n\n ..Default::default()\n\n };\n\n let got = file.name();\n\n let want = \"ln -s bar foo\";\n\n assert_eq!(got, want);\n\n }\n\n\n\n #[test]\n\n fn name_link_force() {\n\n let file = File {\n\n force: Some(true),\n\n path: PathBuf::from(\"foo\"),\n\n src: Some(PathBuf::from(\"bar\")),\n\n state: FileState::Link,\n\n };\n\n let got = file.name();\n\n let want = \"ln -sf bar foo\";\n", "file_path": "src/lib/jobs/file.rs", "rank": 66, "score": 20538.795894399474 }, { "content": " src: Some(src.clone()),\n\n state: FileState::Link,\n\n };\n\n\n\n fs_write(&src, \"hello\")?;\n\n fs_write(&file.path, \"existing\")?;\n\n let got = file.execute()?;\n\n\n\n assert_eq!(\n\n got,\n\n Status::Changed(\n\n format!(\"existing: {}\", file.path.display()),\n\n format!(\"{} -> {}\", &src.display(), file.path.display())\n\n )\n\n );\n\n assert_eq!(fs_read(&file.path)?, \"hello\");\n\n Ok(())\n\n }\n\n\n\n #[test]\n", "file_path": "src/lib/jobs/file.rs", "rank": 67, "score": 20537.983431692755 }, { "content": " pub is_os_linux: bool,\n\n pub is_os_macos: bool,\n\n pub is_os_windows: bool,\n\n}\n\nimpl Facts {\n\n pub fn gather() -> Result {\n\n Ok(Self {\n\n cache_dir: dirs::cache_dir().ok_or(Error::CacheDir)?,\n\n config_dir: dirs::config_dir().ok_or(Error::ConfigDir)?,\n\n home_dir: dirs::home_dir().ok_or(Error::HomeDir)?,\n\n is_os_linux: OS == \"linux\",\n\n is_os_macos: OS == \"macos\",\n\n is_os_windows: OS == \"windows\",\n\n })\n\n }\n\n}\n\nimpl Default for Facts {\n\n fn default() -> Self {\n\n Self {\n\n cache_dir: PathBuf::new(),\n", "file_path": "src/lib/facts.rs", "rank": 77, "score": 19.058586330095885 }, { "content": " Job {\n\n #[from]\n\n source: jobs::Error,\n\n },\n\n #[error(\"template error: {}\", source)]\n\n Tera {\n\n #[from]\n\n source: tera::Error,\n\n },\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n", "file_path": "src/lib/template.rs", "rank": 78, "score": 18.17304794108276 }, { "content": "use std::{collections::HashMap, convert::TryFrom};\n\n\n\nuse lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse tera::{self, from_value, to_value, Context, Tera, Value};\n\nuse thiserror::Error as ThisError;\n\nuse which::which;\n\n\n\nuse super::{\n\n facts::Facts,\n\n jobs::{self, Main},\n\n};\n\n\n\nlazy_static! {\n\n static ref DIR_EXPRESSION_RE: Regex = Regex::new(r\"_dir\\s*\\}\\}\").unwrap();\n\n}\n\n\n\n#[derive(Debug, ThisError)]\n\npub enum Error {\n\n #[error(transparent)]\n", "file_path": "src/lib/template.rs", "rank": 80, "score": 17.193350673607373 }, { "content": " ..Default::default()\n\n };\n\n let want = r#\"\n\n [[jobs]]\n\n name = \"c:\\\\my_cache_dir my_home_dir\"\n\n type = \"command\"\n\n command = \"my_config_dir\"\n\n when = false\n\n \"#;\n\n let result = dbg!(render(input, &facts));\n\n assert!(result.is_ok());\n\n if let Ok(got) = result {\n\n assert_eq!(got, want);\n\n }\n\n }\n\n\n\n #[test]\n\n fn render_toml_with_function_expressions() {\n\n let input = r#\"\n\n [[jobs]]\n", "file_path": "src/lib/template.rs", "rank": 85, "score": 14.210183843692633 }, { "content": "# job metadata\n\n\n\nthese are fields that are not specific to the job type,\n\nbut rather relate to running the job\n\n\n\n## name (string, optional)\n\n\n\nset a unique name / label / description / identifier for the job,\n\nwhich appears in logs when the job runs\n\n\n\ne.g.\n\n\n\n```\n\n[[jobs]]\n\nname = \"something to do\"\n\n# ...\n\n```\n\n\n\n## needs (string[], optional)\n\n\n\nset dependencies for the job,\n\nwhich **all** need to complete without errors,\n\nbefore this job can run\n\n\n\ne.g.\n\n\n\n```\n\n[[jobs]]\n\nname = \"first thing\"\n\n# ...\n\n\n\n[[jobs]]\n\nname = \"second thing\"\n\n# ...\n\nneeds = [\"first thing\"]\n\n```\n\n\n\n## when (boolean; default = true)\n\n\n\ne.g.\n\n\n\n```\n\n[[jobs]]\n\nname = \"something to do\"\n\n# ...\n\nwhen = true\n\n```\n\n\n\n- `true`: run the job\n\n- `false`: skip the job\n\n\n\nthis makes the most sense when combined with a boolean\n\n[template expression](./template.md)\n\n\n\ne.g.\n\n\n\n```\n\n[[jobs]]\n\nname = \"something to do\"\n\n# ...\n\nwhen = {{ is_os_linux or is_os_macos }}\n\n```\n", "file_path": "docs/metadata.md", "rank": 86, "score": 14.134676909082796 }, { "content": "#![deny(clippy::all)]\n\n\n\nmod lib;\n\n\n\nuse std::{convert::TryFrom, fs, io};\n\n\n\nuse thiserror::Error as ThisError;\n\n\n\nuse lib::{\n\n facts::{self, Facts},\n\n jobs::{self, Main},\n\n runner, template,\n\n};\n\n\n\nconst MAIN_TOML_FILE: &str = \"main.toml\";\n\n\n\n#[derive(Debug, ThisError)]\n", "file_path": "src/main.rs", "rank": 88, "score": 12.960067867148851 }, { "content": " name = \"{{ has_executable(exe=\"missing_command\") }}\"\n\n type = \"command\"\n\n command = \"foo\"\n\n \"#;\n\n let facts = Facts::default();\n\n let want = r#\"\n\n [[jobs]]\n\n name = \"false\"\n\n type = \"command\"\n\n command = \"foo\"\n\n \"#;\n\n let result = dbg!(render(input, &facts));\n\n assert!(result.is_ok());\n\n if let Ok(got) = result {\n\n assert_eq!(got, want);\n\n }\n\n }\n\n}\n", "file_path": "src/lib/template.rs", "rank": 90, "score": 12.533586417780844 }, { "content": "# Changelog\n\n\n\nWe will document all notable changes to this project in this file.\n\n\n\nWe use the [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) format,\n\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n\n\n## [Unreleased]\n\n\n\n## [0.1.8] - 2020-05-03\n\n\n\n### Added\n\n\n\n- `\"when\"` for conditional jobs\n\n- new [facts](./docs/template.md) for OS-detection\n\n- document available template expression values / [facts](./docs/template.md)\n\n- `{{ has_executable(foo) }}` template function to check for executables\n\n\n\n### Changed\n\n\n\n- also check for tuning/main.toml in ~/.dotfiles\n\n\n\n### Fixed\n\n\n\n- escape path expressions so Windows paths are valid TOML (#17)\n\n\n\n## [0.1.7] - 2020-04-06\n\n\n\n### Added\n\n\n\n- colorized output for job status\n\n- generate friendlier names for command jobs\n\n- generate friendlier names for file jobs\n\n- internally centralise handling of common fields like `\"name\"`\n\n\n\n## [0.1.6] - 2020-03-13\n\n\n\n### Added\n\n\n\n- links to wiki documentation in README\n\n- read config file from default location\n\n- `\"needs\"` for inter-job dependencies\n\n- command: job type to run commands\n\n- concurrent job runner using 2 threads\n\n- file: job type to manipulate files\n\n- support `{{ home_dir }}` and other expressions in template\n\n\n\n## [0.1.5] - 2019-08-16\n\n\n\n### Fixed\n\n\n\n- ci: cannot use `contains()` with array\n\n\n\n## [0.1.4] - 2019-08-16\n\n\n\n### Fixed\n\n\n\n- ci: tweak handling of \"release\" GitHub Action\n\n\n\n## [0.1.3] - 2019-08-16\n\n\n\n### Fixed\n\n\n\n- add missing description and license metadata\n\n\n\n## [0.1.2] - 2019-08-16\n\n\n\n### Added\n\n\n\n- ci: debug GitHub Actions workflow\n\n\n\n## [0.1.1] - 2019-08-16\n\n\n\n### Fixed\n\n\n\n- ci: fix broken GitHub Actions `if`\n\n\n\n## [0.1.0] - 2019-08-16\n\n\n\n### Added\n\n\n\n- initial (non-functional) release to crates.io\n", "file_path": "CHANGELOG.md", "rank": 91, "score": 12.119905872499677 }, { "content": " };\n\n let rendered = match template::render(text, &facts) {\n\n Ok(s) => s,\n\n Err(e) => {\n\n println!(\"{:?}\", e);\n\n continue;\n\n }\n\n };\n\n match Main::try_from(rendered.as_str()) {\n\n Ok(m) => {\n\n return Ok(m);\n\n }\n\n Err(e) => {\n\n println!(\"{:?}\", e);\n\n }\n\n }\n\n }\n\n Err(Error::ConfigNotFound)\n\n}\n", "file_path": "src/main.rs", "rank": 92, "score": 11.771606697307684 }, { "content": " type = \"command\"\n\n command = \"{{ missing_value }}\"\n\n \"#;\n\n let facts = Facts::default();\n\n let got = render(input, &facts);\n\n assert!(got.is_err());\n\n // TODO: assert on error contents\n\n }\n\n\n\n #[test]\n\n fn render_toml_without_expressions() {\n\n let input = r#\"\n\n [[jobs]]\n\n type = \"command\"\n\n command = \"something\"\n\n \"#;\n\n let facts = Facts::default();\n\n let want = String::from(input);\n\n let result = render(input, &facts);\n\n assert!(result.is_ok());\n", "file_path": "src/lib/template.rs", "rank": 93, "score": 11.764999114553664 }, { "content": "use std::{env::consts::OS, path::PathBuf};\n\n\n\nuse serde::Serialize;\n\nuse thiserror::Error as ThisError;\n\n\n\n#[derive(Debug, ThisError)]\n\npub enum Error {\n\n #[error(\"unable to find cache_dir\")]\n\n CacheDir,\n\n #[error(\"unable to find config_dir\")]\n\n ConfigDir,\n\n #[error(\"unable to find home_dir\")]\n\n HomeDir,\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct Facts {\n\n pub cache_dir: PathBuf,\n\n pub config_dir: PathBuf,\n\n pub home_dir: PathBuf,\n", "file_path": "src/lib/facts.rs", "rank": 95, "score": 11.088779733423072 }, { "content": " config_dir: PathBuf::new(),\n\n home_dir: PathBuf::new(),\n\n is_os_linux: false,\n\n is_os_macos: false,\n\n is_os_windows: false,\n\n }\n\n }\n\n}\n\n\n\npub type Result = std::result::Result<Facts, Error>;\n", "file_path": "src/lib/facts.rs", "rank": 96, "score": 9.87685638503775 }, { "content": " if let Ok(got) = result {\n\n assert_eq!(got, want);\n\n }\n\n }\n\n\n\n #[test]\n\n fn render_toml_with_expressions() {\n\n let input = r#\"\n\n [[jobs]]\n\n name = \"{{ cache_dir }} {{ home_dir }}\"\n\n type = \"command\"\n\n command = \"{{ config_dir }}\"\n\n when = {{ is_os_linux or is_os_macos }}\n\n \"#;\n\n let facts = Facts {\n\n cache_dir: PathBuf::from(\"c:\\\\my_cache_dir\"), // like Windows\n\n config_dir: PathBuf::from(\"my_config_dir\"),\n\n home_dir: PathBuf::from(\"my_home_dir\"),\n\n is_os_linux: false,\n\n is_os_macos: false,\n", "file_path": "src/lib/template.rs", "rank": 97, "score": 8.24476068222015 }, { "content": "# roadmap\n\n\n\n- [x] read config from user's HOME directory\n\n- [x] `needs` to support optional sequencing of jobs\n\n- [x] support the \"command\" job\n\n- [x] support the \"file\" job\n\n- [x] resolve references to path expressions (e.g. ~) ([#9](https://github.com/jokeyrhyme/tuning/issues/9))\n\n- [x] `when` to support conditional jobs\n\n- [x] specify that a job needs a certain OS\n\n- [x] specify that a job needs certain executables\n\n- [ ] `needs_any` for flexible sequencing of jobs\n\n- [ ] support the \"git\" job\n\n- [ ] flag to point at a different config file\n\n- [ ] `import` or `include` to help decompose large config files\n\n\n\n# see also\n\n\n\n- https://github.com/rash-sh/rash\n\n- https://www.ansible.com/\n", "file_path": "README.md", "rank": 98, "score": 7.705788616282763 }, { "content": " let input = r#\"{\"hello\": \"world\"}\"#;\n\n let facts = Facts::default();\n\n let got = render(input, &facts);\n\n assert!(got.is_err());\n\n // TODO: assert on error contents\n\n }\n\n\n\n #[test]\n\n fn render_errs_if_bad_toml() {\n\n let input = r#\"unexpected_key = \"value\"\"#;\n\n let facts = Facts::default();\n\n let got = render(input, &facts);\n\n assert!(got.is_err());\n\n // TODO: assert on error contents\n\n }\n\n\n\n #[test]\n\n fn render_toml_with_missing_value() {\n\n let input = r#\"\n\n [[jobs]]\n", "file_path": "src/lib/template.rs", "rank": 99, "score": 6.562265336714395 } ]
Rust
rust/lib-hedgewars-engine/src/render/gear.rs
emorrp1/hw
0afbbead97ad3aa231391f60605a73cd2629a849
use crate::render::{ atlas::{AtlasCollection, SpriteIndex}, camera::Camera, gl::{ Buffer, BufferType, BufferUsage, InputElement, InputFormat, InputLayout, PipelineState, Shader, Texture2D, TextureDataType, TextureFilter, TextureFormat, TextureInternalFormat, VariableBinding, }, }; use integral_geometry::{Rect, Size}; use png::{ColorType, Decoder, DecodingError}; use std::{ collections::HashMap, ffi::OsString, fs::{read_dir, File}, io, io::BufReader, mem::size_of, path::{Path, PathBuf}, }; const VERTEX_SHADER: &'static str = r#" #version 330 core uniform mat4 projection; layout(location = 0) in vec2 position; layout(location = 1) in vec2 texCoords; out vec2 varTexCoords; void main() { varTexCoords = texCoords; gl_Position = projection * vec4(position, 0.0, 1.0); } "#; const PIXEL_SHADER: &'static str = r#" #version 330 core uniform sampler2D texture; in vec2 varTexCoords; out vec4 outColor; void main() { outColor = texture2D(texture, varTexCoords); } "#; #[repr(C)] #[derive(Copy, Clone)] struct Vertex { position: [f32; 2], tex_coords: [f32; 2], } #[derive(PartialEq, Debug, Clone, Copy)] #[repr(u32)] pub enum SpriteId { Mine = 0, Grenade, Cheese, Cleaver, MaxSprite, } const SPRITE_LOAD_LIST: &[(SpriteId, &str)] = &[ ( SpriteId::Mine, "../../share/hedgewars/Data/Graphics/MineOn.png", ), ( SpriteId::Grenade, "../../share/hedgewars/Data/Graphics/Bomb.png", ), ( SpriteId::Cheese, "../../share/hedgewars/Data/Graphics/cheese.png", ), ( SpriteId::Cleaver, "../../share/hedgewars/Data/Graphics/cleaver.png", ), ]; const MAX_SPRITES: usize = SpriteId::MaxSprite as usize + 1; type SpriteTexCoords = (u32, [[f32; 2]; 4]); pub struct GearEntry { position: [f32; 2], size: Size, } impl GearEntry { pub fn new(x: f32, y: f32, size: Size) -> Self { Self { position: [x, y], size, } } } pub struct GearRenderer { atlas: AtlasCollection, texture: Texture2D, allocation: Box<[SpriteTexCoords; MAX_SPRITES]>, shader: Shader, layout: InputLayout, vertex_buffer: Buffer, } struct SpriteData { size: Size, filename: PathBuf, } const ATLAS_SIZE: Size = Size::square(2048); impl GearRenderer { pub fn new() -> Self { let mut atlas = AtlasCollection::new(ATLAS_SIZE); let texture = Texture2D::new( ATLAS_SIZE, TextureInternalFormat::Rgba8, TextureFilter::Linear, ); let mut allocation = Box::new([Default::default(); MAX_SPRITES]); for (sprite, file) in SPRITE_LOAD_LIST { let path = Path::new(file); let size = load_sprite_size(path).expect(&format!("Unable to open {}", file)); let index = atlas .insert_sprite(size) .expect(&format!("Could not store sprite {:?}", sprite)); let (texture_index, rect) = atlas.get_rect(index).unwrap(); let mut pixels = vec![255u8; size.area() * 4].into_boxed_slice(); load_sprite_pixels(path, &mut pixels).expect("Unable to load Graphics"); texture.update( rect, &pixels, None, TextureFormat::Rgba, TextureDataType::UnsignedByte, ); let mut tex_coords = [ [rect.left() as f32, rect.bottom() as f32 + 1.0], [rect.right() as f32 + 1.0, rect.bottom() as f32 + 1.0], [rect.left() as f32, rect.top() as f32], [rect.right() as f32 + 1.0, rect.top() as f32], ]; for coords in &mut tex_coords { coords[0] /= ATLAS_SIZE.width as f32; coords[1] /= ATLAS_SIZE.height as f32; } allocation[*sprite as usize] = (texture_index, tex_coords); } let shader = Shader::new( VERTEX_SHADER, Some(PIXEL_SHADER), &[VariableBinding::Sampler("texture", 0)], ) .unwrap(); let layout = InputLayout::new(vec![ InputElement { shader_slot: 0, buffer_slot: 0, format: InputFormat::Float(gl::FLOAT, false), components: 2, stride: size_of::<Vertex>() as u32, offset: 0, }, InputElement { shader_slot: 1, buffer_slot: 0, format: InputFormat::Float(gl::FLOAT, false), components: 2, stride: size_of::<Vertex>() as u32, offset: size_of::<[f32; 2]>() as u32, }, ]); let vertex_buffer = Buffer::empty(BufferType::Array, BufferUsage::DynamicDraw); Self { atlas, texture, allocation, shader, layout, vertex_buffer, } } pub fn render(&mut self, camera: &Camera, entries: &[GearEntry]) { let mut data = Vec::with_capacity(entries.len() * 6); for (index, entry) in entries.iter().enumerate() { let sprite_id = match index & 0b11 { 0 => SpriteId::Mine, 1 => SpriteId::Grenade, 2 => SpriteId::Cheese, _ => SpriteId::Cleaver, }; let sprite_coords = &self.allocation[sprite_id as usize].1; let v = [ Vertex { position: [ entry.position[0] - entry.size.width as f32 / 2.0, entry.position[1] + entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[0], }, Vertex { position: [ entry.position[0] + entry.size.width as f32 / 2.0, entry.position[1] + entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[1], }, Vertex { position: [ entry.position[0] - entry.size.width as f32 / 2.0, entry.position[1] - entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[2], }, Vertex { position: [ entry.position[0] + entry.size.width as f32 / 2.0, entry.position[1] - entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[3], }, ]; data.extend_from_slice(&[v[0], v[1], v[2], v[1], v[3], v[2]]); } let projection = camera.projection(); self.shader.bind(); self.shader.set_matrix("projection", projection.as_ptr()); self.shader.bind_texture_2d(0, &self.texture); self.vertex_buffer.write_typed(&data); let _buffer_bind = self.layout.bind(&[(0, &self.vertex_buffer)], None); let _state = PipelineState::new().with_blend(); unsafe { gl::DrawArrays(gl::TRIANGLES, 0, entries.len() as i32 * 6); } } } fn load_sprite_pixels(path: &Path, buffer: &mut [u8]) -> io::Result<Size> { let decoder = Decoder::new(BufReader::new(File::open(path)?)); let (info, mut reader) = decoder.read_info()?; let size = Size::new(info.width as usize, info.height as usize); reader.next_frame(buffer)?; Ok(size) } fn load_sprite_size(path: &Path) -> io::Result<Size> { let decoder = Decoder::new(BufReader::new(File::open(path)?)); let (info, mut reader) = decoder.read_info()?; let size = Size::new(info.width as usize, info.height as usize); Ok(size) } fn load_sprites(path: &Path) -> io::Result<Vec<SpriteData>> { let mut result = vec![]; for file in read_dir(path)? { let file = file?; if let Some(extension) = file.path().extension() { if extension == "png" { let path = file.path(); let sprite = load_sprite_size(&path)?; result.push(SpriteData { size: sprite, filename: path, }); } } } Ok(result) }
use crate::render::{ atlas::{AtlasCollection, SpriteIndex}, camera::Camera, gl::{ Buffer, BufferType, BufferUsage, InputElement, InputFormat, InputLayout, PipelineState, Shader, Texture2D, TextureDataType, TextureFilter, TextureFormat, TextureInternalFormat, VariableBinding, }, }; use integral_geometry::{Rect, Size}; use png::{ColorType, Decoder, DecodingError}; use std::{ collections::HashMap, ffi::OsString, fs::{read_dir, File}, io, io::BufReader, mem::size_of, path::{Path, PathBuf}, }; const VERTEX_SHADER: &'static str = r#" #version 330 core uniform mat4 projection; layout(location = 0) in vec2 position; layout(location = 1) in vec2 texCoords; out vec2 varTexCoords; void main() { varTexCoords = texCoords; gl_Position = projection * vec4(position, 0.0, 1.0); } "#; const PIXEL_SHADER: &'static str = r#" #version 330 core uniform sampler2D texture; in vec2 varTexCoords; out vec4 outColor; void main() { outColor = texture2D(texture, varTexCoords); } "#; #[repr(C)] #[derive(Copy, Clone)] struct Vertex { position: [f32; 2], tex_coords: [f32; 2], } #[derive(PartialEq, Debug, Clone, Copy)] #[repr(u32)] pub enum SpriteId { Mine = 0, Grenade, Cheese, Cleaver, MaxSprite, } const SPRITE_LOAD_LIST: &[(SpriteId, &str)] = &[ ( SpriteId::Mine, "../../share/hedgewars/Data/Graphics/MineOn.png", ), ( SpriteId::Grenade, "../../share/hedgewars/Data/Graphics/Bomb.png", ), ( SpriteId::Cheese, "../../share/hedgewars/Data/Graphics/cheese.png", ), ( SpriteId::Cleaver, "../../share/hedgewars/Data/Graphics/cleaver.png", ), ]; const MAX_SPRITES: usize = SpriteId::MaxSprite as usize + 1; type SpriteTexCoords = (u32, [[f32; 2]; 4]); pub struct GearEntry { position: [f32; 2], size: Size, } impl GearEntry { pub fn new(x: f32, y: f32, size: Size) -> Self { Self { position: [x, y], size, } } } pub struct GearRenderer { atlas: AtlasCollection, texture: Texture2D, allocation: Box<[SpriteTexCoords; MAX_SPRITES]>, shader: Shader, layout: InputLayout, vertex_buffer: Buffer, } struct SpriteData { size: Size, filename: PathBuf, } const ATLAS_SIZE: Size = Size::square(2048); impl GearRenderer { pub fn new() -> Self { let mut atlas = AtlasCollection::new(ATLAS_SIZE); let texture = Texture2D::new( ATLAS_SIZE, TextureInternalFormat::Rgba8, TextureFilter::Linear, ); let mut allocation = Box::new([Default::default(); MAX_SPRITES]); for (sprite, file) in SPRITE_LOAD_LIST { let path = Path::new(file); let size = load_sprite_size(path).expect(&format!("Unable to open {}", file)); let index = atlas .insert_sprite(size) .expect(&format!("Could not store sprite {:?}", sprite)); let (texture_index, rect) = atlas.get_rect(index).unwrap(); let mut pixels = vec![255u8; size.area() * 4].into_boxed_slice(); load_sprite_pixels(path, &mut pixels).expect("Unable to load Graphics"); texture.update( rect, &pixels, None, TextureFormat::Rgba, TextureDataType::UnsignedByte, ); let mut tex_coords = [ [rect.left() as f32, rect.bottom() as f32 + 1.0], [rect.right() as f32 + 1.0, rect.bottom() as f32 + 1.0], [rect.left() as f32, rect.top() as f32], [rect.right() as f32 + 1.0, rect.top() as f32], ]; for coords in &mut tex_coords { coords[0] /= ATLAS_SIZE.width as f32; coords[1] /= ATLAS_SIZE.height as f32; } allocation[*sprite as usize] = (texture_index, tex_coords); } let shader = Shader::new( VERTEX_SHADER, Some(PIXEL_SHADER), &[VariableBinding::Sampler("texture", 0)], ) .unwrap(); let layout = InputLayout::new(vec![ InputElement { shader_slot: 0, buffer_slot: 0, format: InputFormat::Float(gl::FLOAT, false), components: 2, stride: size_of::<Vertex>() as u32, offset: 0, }, InputElement { shader_slot: 1, buffer_slot: 0, format: InputFormat::Float(gl::FLOAT, false), components: 2, stride: size_of::<Vertex>() as u32, offset: size_of::<[f32; 2]>() as u32, }, ]); let vertex_buffer = Buffer::empty(BufferType::Array, BufferUsage::DynamicDraw); Self { atlas, texture, allocation, shader, layout, vertex_buffer, } } pub fn render(&mut self, camera: &Camera, entries: &[GearEntry]) { let mut data = Vec::with_capacity(entries.len() * 6); for (index, entry) in entries.iter().enumerate() { let sprite_id = match index & 0b11 { 0 => SpriteId::Mine, 1 => SpriteId::Grenade, 2 => SpriteId::Cheese, _ => SpriteId::Cleaver, }; let sprite_coords = &self.allocation[sprite_id as usize].1; let v = [ Vertex { position: [ entry.position[0] - entry.size.width as f32 / 2.0,
} fn load_sprite_pixels(path: &Path, buffer: &mut [u8]) -> io::Result<Size> { let decoder = Decoder::new(BufReader::new(File::open(path)?)); let (info, mut reader) = decoder.read_info()?; let size = Size::new(info.width as usize, info.height as usize); reader.next_frame(buffer)?; Ok(size) } fn load_sprite_size(path: &Path) -> io::Result<Size> { let decoder = Decoder::new(BufReader::new(File::open(path)?)); let (info, mut reader) = decoder.read_info()?; let size = Size::new(info.width as usize, info.height as usize); Ok(size) } fn load_sprites(path: &Path) -> io::Result<Vec<SpriteData>> { let mut result = vec![]; for file in read_dir(path)? { let file = file?; if let Some(extension) = file.path().extension() { if extension == "png" { let path = file.path(); let sprite = load_sprite_size(&path)?; result.push(SpriteData { size: sprite, filename: path, }); } } } Ok(result) }
entry.position[1] + entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[0], }, Vertex { position: [ entry.position[0] + entry.size.width as f32 / 2.0, entry.position[1] + entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[1], }, Vertex { position: [ entry.position[0] - entry.size.width as f32 / 2.0, entry.position[1] - entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[2], }, Vertex { position: [ entry.position[0] + entry.size.width as f32 / 2.0, entry.position[1] - entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[3], }, ]; data.extend_from_slice(&[v[0], v[1], v[2], v[1], v[3], v[2]]); } let projection = camera.projection(); self.shader.bind(); self.shader.set_matrix("projection", projection.as_ptr()); self.shader.bind_texture_2d(0, &self.texture); self.vertex_buffer.write_typed(&data); let _buffer_bind = self.layout.bind(&[(0, &self.vertex_buffer)], None); let _state = PipelineState::new().with_blend(); unsafe { gl::DrawArrays(gl::TRIANGLES, 0, entries.len() as i32 * 6); } }
function_block-function_prefix_line
[ { "content": "fn get_protocol_number(executable: &str) -> std::io::Result<u32> {\n\n let output = Command::new(executable).arg(\"--protocol\").output()?;\n\n\n\n Ok(u32::from_str(&String::from_utf8(output.stdout).unwrap().trim()).unwrap_or(55))\n\n}\n\n\n", "file_path": "rust/hedgewars-checker/src/main.rs", "rank": 3, "score": 385946.60571211606 }, { "content": "fn is_out_of_bounds(data: &[u8], data_stride: Option<NonZeroU32>, texture_size: Size) -> bool {\n\n let data_stride = get_u32(data_stride);\n\n data_stride == 0 && texture_size.area() * 4 > data.len()\n\n || data_stride != 0\n\n && texture_size.width > data_stride as usize\n\n && (texture_size.height * data_stride as usize) * 4 > data.len()\n\n}\n\n\n\nimpl Texture2D {\n\n pub fn new(size: Size, internal_format: TextureInternalFormat, filter: TextureFilter) -> Self {\n\n if let Some(handle) = new_texture() {\n\n unsafe {\n\n gl::BindTexture(gl::TEXTURE_2D, handle.get());\n\n gl::TexImage2D(\n\n gl::TEXTURE_2D,\n\n 0,\n\n internal_format as i32,\n\n size.width as i32,\n\n size.height as i32,\n\n 0,\n", "file_path": "rust/lib-hedgewars-engine/src/render/gl.rs", "rank": 4, "score": 380344.03314799553 }, { "content": "fn load_file(filename: &str) -> Result<String> {\n\n let mut reader = File::open(filename)?;\n\n let mut result = String::new();\n\n reader.read_to_string(&mut result)?;\n\n Ok(result)\n\n}\n", "file_path": "rust/hedgewars-server/src/server/io.rs", "rank": 6, "score": 350235.5656874673 }, { "content": "pub fn protocol_version_string(protocol_number: u16) -> &'static str {\n\n match protocol_number {\n\n 17 => \"0.9.7-dev\",\n\n 19 => \"0.9.7\",\n\n 20 => \"0.9.8-dev\",\n\n 21 => \"0.9.8\",\n\n 22 => \"0.9.9-dev\",\n\n 23 => \"0.9.9\",\n\n 24 => \"0.9.10-dev\",\n\n 25 => \"0.9.10\",\n\n 26 => \"0.9.11-dev\",\n\n 27 => \"0.9.11\",\n\n 28 => \"0.9.12-dev\",\n\n 29 => \"0.9.12\",\n\n 30 => \"0.9.13-dev\",\n\n 31 => \"0.9.13\",\n\n 32 => \"0.9.14-dev\",\n\n 33 => \"0.9.14\",\n\n 34 => \"0.9.15-dev\",\n\n 35 => \"0.9.14.1\",\n", "file_path": "rust/hedgewars-server/src/utils.rs", "rank": 7, "score": 326013.62805651117 }, { "content": "pub fn slice_u32_to_u8_mut(slice_u32: &mut [u32]) -> &mut [u8] {\n\n unsafe { from_raw_parts_mut::<u8>(slice_u32.as_mut_ptr() as *mut u8, slice_u32.len() * 4) }\n\n}\n", "file_path": "rust/mapgen/src/theme.rs", "rank": 8, "score": 325606.8036118002 }, { "content": "fn save_file(filename: &str, contents: &str) -> Result<()> {\n\n let mut writer = OpenOptions::new().create(true).write(true).open(filename)?;\n\n writer.write_all(contents.as_bytes())\n\n}\n\n\n", "file_path": "rust/hedgewars-server/src/server/io.rs", "rank": 9, "score": 317636.58601800445 }, { "content": "fn fill_pixels(pixels: &mut [u8], land: &Land2D<u32>) {\n\n for (surf_row, land_row) in pixels.chunks_mut(land.width() * 4).zip(land.rows()) {\n\n for (surf_pixel, land_pixel) in surf_row.chunks_mut(4).zip(land_row) {\n\n if let [b, g, r, a] = surf_pixel {\n\n *a = 255; *r = *land_pixel as u8;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/theme-editor/src/main.rs", "rank": 10, "score": 316007.11643682764 }, { "content": "fn fill_texture(surface: &mut Surface, land: &Land2D<u32>) {\n\n if surface.must_lock() {\n\n surface.with_lock_mut(|data| fill_pixels(data, land));\n\n } else {\n\n surface.without_lock_mut().map(|data| fill_pixels(data, land));\n\n }\n\n}\n\n\n", "file_path": "rust/theme-editor/src/main.rs", "rank": 11, "score": 300156.74243081873 }, { "content": "fn load_sprite(path: &Path) -> Result<ThemeSprite, ThemeLoadError> {\n\n let decoder = Decoder::new(BufReader::new(File::open(path)?));\n\n let (info, mut reader) = decoder.read_info()?;\n\n\n\n if info.color_type != ColorType::RGBA {\n\n return Err(ThemeLoadError::Format(format!(\n\n \"Unexpected format: {:?}\",\n\n info.color_type\n\n )));\n\n }\n\n let size = Size::new(info.width as usize, info.height as usize);\n\n\n\n let mut pixels: Vec2D<u32> = Vec2D::new(size, 0);\n\n reader.next_frame(slice_u32_to_u8_mut(pixels.as_mut_slice()))?;\n\n\n\n Ok(ThemeSprite { pixels })\n\n}\n\n\n", "file_path": "rust/mapgen/src/theme.rs", "rank": 12, "score": 296902.1651923226 }, { "content": "fn texturize(theme_dir: &Path, land: &Land2D<u8>, output_filename: &Path) {\n\n let theme = Theme::load(theme_dir).unwrap();\n\n let texture = MapGenerator::new().make_texture(land, &theme);\n\n\n\n let file = File::create(output_filename).unwrap();\n\n let ref mut w = BufWriter::new(file);\n\n\n\n let mut encoder = png::Encoder::new(w, land.width() as u32, land.height() as u32); // Width is 2 pixels and height is 1.\n\n encoder\n\n .set(png::ColorType::RGBA)\n\n .set(png::BitDepth::Eight);\n\n\n\n let mut writer = encoder.write_header().unwrap();\n\n\n\n writer.write_image_data(slice_u32_to_u8(texture.as_slice())).unwrap();\n\n}\n\n\n", "file_path": "rust/land_dump/src/main.rs", "rank": 13, "score": 296451.0716141914 }, { "content": "function onNewAmmoStore(groupIndex, hogIndex)\n\n\n\n\tSetAmmo(amSkip, 9, 0, 0, 0)\n\n\n\n\tif hogIndex == 0 then\n\n\t\tSetAmmo(amBazooka, 1, 0, 0, 0)\n\n\t\tSetAmmo(amGrenade, 1, 0, 0, 0)\n\n\t\tSetAmmo(amShotgun, 1, 0, 0, 0)\n\n\telseif hogIndex == 1 then\n\n\t\tSetAmmo(amGirder, 2, 0, 0, 0)\n\n\t\tSetAmmo(amBlowTorch, 1, 0, 0, 0)\n\n\t\tSetAmmo(amPickHammer, 1, 0, 0, 0)\n\n\telseif hogIndex == 2 then\n\n\t\tSetAmmo(amRope, 9, 0, 0, 0)\n\n\t\tSetAmmo(amParachute, 9, 0, 0, 0)\n\n\t\tSetAmmo(amFirePunch, 1, 0, 0, 0)\n\n\telseif hogIndex == 3 then\n\n\t\tSetAmmo(amDynamite, 1, 0, 0, 0)\n\n\t\tSetAmmo(amMine, 1, 0, 0, 0)\n\n\t\tSetAmmo(amDrill, 1, 0, 0, 0)\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 14, "score": 292553.8963129971 }, { "content": "fn tex_row_copy<LandT>(land_row: &[LandT], tex_row: &mut [u32], sprite_row: &[u32])\n\nwhere\n\n LandT: Default + PartialEq,\n\n{\n\n for ((land_v, tex_v), sprite_v) in land_row.iter().zip(tex_row.iter_mut()).zip(sprite_row) {\n\n *tex_v = if *land_v == LandT::default() {\n\n *sprite_v\n\n } else {\n\n 0\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{MapGenerator, TemplateType};\n\n\n\n #[test]\n\n fn simple_load() {\n\n let text = r#\"\n", "file_path": "rust/mapgen/src/lib.rs", "rank": 15, "score": 291600.8949277893 }, { "content": "fn check(executable: &str, data_prefix: &str, buffer: &[u8]) -> Result<Vec<Vec<u8>>, CheckError> {\n\n let mut replay = tempfile::NamedTempFile::new()?;\n\n\n\n for line in buffer.split(|b| *b == '\\n' as u8) {\n\n replay.write(&base64::decode(line)?)?;\n\n }\n\n\n\n let temp_file_path = replay.path();\n\n\n\n let mut home_dir = dirs::home_dir().unwrap();\n\n home_dir.push(\".hedgewars\");\n\n\n\n debug!(\"Checking replay in {}\", temp_file_path.to_string_lossy());\n\n\n\n let output = Command::new(executable)\n\n .arg(\"--user-prefix\")\n\n .arg(&home_dir)\n\n .arg(\"--prefix\")\n\n .arg(data_prefix)\n\n .arg(\"--nomusic\")\n", "file_path": "rust/hedgewars-checker/src/main.rs", "rank": 16, "score": 282588.2232543796 }, { "content": "fn new_texture() -> Option<NonZeroU32> {\n\n let mut handle = 0;\n\n unsafe {\n\n gl::GenTextures(1, &mut handle);\n\n }\n\n NonZeroU32::new(handle)\n\n}\n\n\n", "file_path": "rust/lib-hedgewars-engine/src/render/gl.rs", "rank": 17, "score": 279310.14263026323 }, { "content": "void glBufferData(GLenum target, GLsizeiptr size, const GLvoid * data, GLenum usage);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 18, "score": 275817.30353632715 }, { "content": " size_t size;\n", "file_path": "project_files/frontlib/util/buffer.c", "rank": 19, "score": 272201.36260729906 }, { "content": " size_t size;\n", "file_path": "project_files/frontlib/util/buffer.h", "rank": 20, "score": 272201.36260729906 }, { "content": "fn rect() -> Rect {\n\n Rect::new(rnd(LAND_WIDTH as i32), rnd(LAND_HEIGHT as i32), rnd(120) + 8, rnd(120) + 8)\n\n}\n\n\n", "file_path": "rust/theme-editor/src/main.rs", "rank": 21, "score": 270134.0062074157 }, { "content": " const void *data;\n", "file_path": "project_files/frontlib/util/buffer.h", "rank": 22, "score": 268759.68179853435 }, { "content": " void *data;\n", "file_path": "project_files/frontlib/util/buffer.c", "rank": 23, "score": 268759.68179853435 }, { "content": "fn draw_center_mark(land: &mut Land2D<u32>) {\n\n for i in 0..32 {\n\n land.draw_thick_line(Line::new(Point::new(LAND_WIDTH as i32 / 2, 0),\n\n Point::new(LAND_WIDTH as i32 / 2, LAND_HEIGHT as i32)), 10, 128);\n\n land.draw_thick_line(Line::new(Point::new(0, LAND_HEIGHT as i32 / 2),\n\n Point::new(LAND_WIDTH as i32, LAND_HEIGHT as i32 / 2)), 10, 128);\n\n land.fill_circle(Point::new(LAND_WIDTH as i32, LAND_HEIGHT as i32) / 2, 60, 128);\n\n }\n\n}\n\n\n", "file_path": "rust/theme-editor/src/main.rs", "rank": 24, "score": 267758.57093622454 }, { "content": "fn draw_random_lines(land: &mut Land2D<u32>) {\n\n for i in 0..32 {\n\n land.draw_thick_line(Line::new(point(), point()), rnd(5), 128);\n\n\n\n land.fill_circle(point(), rnd(60), 128);\n\n }\n\n}\n\n\n", "file_path": "rust/theme-editor/src/main.rs", "rank": 25, "score": 267758.57093622454 }, { "content": "-- version history\n\n-----------------\n\n-- version 0.1\n\n-----------------\n\n-- concept test\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 26, "score": 254202.67908938864 }, { "content": "------------------------\n\n-- version 0.4\n\n------------------------\n\n\n\n-- removed some old code/comments\n\n-- removed both shell and mortar as the primary and secondary weapons\n\n-- the primary weapon is now an explosive(barrel)\n\n\n\n-- added support for picking up barrels scattered about the map (backspace)\n\n-- added support for dragging around mines (enter toggles on/off)\n\n-- added support for primary fire being onAttackUp\n\n-- added a trail to indicate when the player has 5s or less left to tumble\n\n-- updated showmission to reflect changed controls and options\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Tumbler.lua", "rank": 27, "score": 254195.89188790077 }, { "content": "----------------\n\n-- version 0.5\n\n----------------\n\n-- provision for variable minetimer / demo mines set to 5000ms\n\n-- don't autoswitch if player only has 1 hog on his team\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 28, "score": 254195.89188790077 }, { "content": "------------------------\n\n-- version 0.5\n\n------------------------\n\n\n\n-- changed some of the user feedback\n\n-- i can't remember??\n\n-- substituted onAttackUp for onPrecise()\n\n-- brought in line with new velocity changes\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Tumbler.lua", "rank": 29, "score": 254195.89188790077 }, { "content": "----------------\n\n-- version 0.7\n\n----------------\n\n-- perhogadmsdf :D :D :D :D\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 30, "score": 254195.89188790077 }, { "content": "----------------\n\n-- version 0.3\n\n----------------\n\n-- added switching on start\n\n-- removed switch from engineer weaponset\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 31, "score": 254195.89188790077 }, { "content": "------------------------\n\n-- version 0.7\n\n------------------------\n\n\n\n-- a few code optimisations/performance tweaks\n\n-- removed some deprecated code\n\n-- fix a potential spawn bug\n\n\n\n-- improved HUD (now shows ammo counts)\n\n-- improved user feedback (less generic messages)\n\n-- colour-coded addcaptions to match hud :)\n\n\n\n-- base tumbling time now equals scheme turntime\n\n-- tumbling time extension is now based on the amount of health contained in crate\n\n-- new mines per turn based on minesnum\n\n-- new barrels per turn based on explosives\n\n\n\n-- added 2 more achievements: barrel eater and mine eater (like kills, don't do anything atm)\n\n-- slightly increased grab distance for explosives/mines\n\n-- slightly increased flamer velocity\n\n-- slightly decreased flamer volume\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Tumbler.lua", "rank": 32, "score": 254195.89188790077 }, { "content": "----------------\n\n-- version 0.6\n\n----------------\n\n-- for the meanwhile, don't drop any crates except health crates\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 33, "score": 254195.89188790077 }, { "content": "------------------------\n\n-- version 0.7.1\n\n------------------------\n\n\n\n-- redraw HUD on screen resolution change\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Tumbler.lua", "rank": 34, "score": 254195.89188790077 }, { "content": "------------------------\n\n-- version 0.6\n\n------------------------\n\n\n\n-- reduced starting \"ammo\"\n\n-- randomly spawn new barrels/mines on new turn\n\n-- updated user feedback\n\n-- better locs and coloured addcaptions\n\n-- added tag for turntime\n\n-- removed tractor beam\n\n-- added two new weapons and changed ammo handling\n\n-- health crates now give tumbler time, and wep/utility give flamer ammo\n\n-- explosives AND mines can be picked up to increase their relative ammo\n\n-- replaced \"no weapon\" selected message that hw serves\n\n-- modified crate frequencies a bit\n\n-- added some simple kill-based achievements, i think\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Tumbler.lua", "rank": 35, "score": 254195.89188790077 }, { "content": "----------------\n\n-- version 0.2\n\n----------------\n\n-- added gfRandomOrder to gameflags\n\n-- removed some deprecated variables/methods\n\n-- fixed lack of portal reset\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 36, "score": 254195.89188790077 }, { "content": "----------------\n\n-- version 0.4\n\n----------------\n\n-- Attempted to:\n\n-- fix potential switch explit\n\n-- improve user feedback on start\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 37, "score": 254195.89188790077 }, { "content": "fn allocate_room(rooms: &mut Slab<HwRoom>) -> &mut HwRoom {\n\n let entry = rooms.vacant_entry();\n\n let room = HwRoom::new(entry.key());\n\n entry.insert(room)\n\n}\n\n\n", "file_path": "rust/hedgewars-server/src/core/server.rs", "rank": 38, "score": 253112.3992138313 }, { "content": "fn land_rect() -> Rect {\n\n Rect::at_origin(PLAY_SIZE)\n\n}\n\n\n", "file_path": "rust/theme-editor/src/main.rs", "rank": 39, "score": 250851.07579533957 }, { "content": "---------------------------------------------------\n\n---------------------------------------------------\n\n---------------------------------------------------\n\n--- Space Invasion Code Follows (1.1)\n\n---------------------------------------------------\n\n---------------------------------------------------\n\n-- VERSION HISTORY\n\n----------------\n\n-- version 0.1\n\n----------------\n\n-- conversion of tumbler into space invasion\n\n-- a million and one changes\n\n-- bells and whistles\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Space_Invasion.lua", "rank": 40, "score": 250115.15069168375 }, { "content": "-- version history\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 41, "score": 250108.56592855928 }, { "content": "------------------------\n\n-- version 0.9\n\n------------------------\n\n-- time for more 'EXPERIMENTS' mwahahahahahaha D:\n\n-- (hopefully) balanced Shield Miser\n\n-- bosses are no longer a redunkulous 50 points, but toned down to 30\n\n-- experimental radar (it's INTERACTIVE and math-heavy :D) (visual gears are safe... right? D:)\n\n-- bugfix and balance for multishot\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Space_Invasion.lua", "rank": 42, "score": 250108.56592855928 }, { "content": "------------------------\n\n-- version 1.0\n\n------------------------\n\n-- if only version numbers actually worked like this, wouldn't that be awful :D\n\n-- added surfer achievement\n\n-- increased value of shield miser by 1 point per kill (OP?)\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Space_Invasion.lua", "rank": 43, "score": 250108.56592855928 }, { "content": "------------------------\n\n-- version 0.8.1\n\n------------------------\n\n\n\n-- stop hiding non-existant 4th Tag\n\n-- redraw HUD on screen resolution change\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Space_Invasion.lua", "rank": 44, "score": 250108.56592855928 }, { "content": "----------------\n\n-- version 0.2\n\n----------------\n\n-- code slowly getting cleaner, it still looks like a spaghetti monster tho\n\n-- lots of console tracking :/\n\n-- all visual gears are now compulsary (will probably revert this)\n\n-- implemented fMod to try combat desyncs and bring this in line with dev\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Space_Invasion.lua", "rank": 45, "score": 250108.56592855928 }, { "content": "------------------------\n\n-- version 1.1\n\n------------------------\n\n-- fixed radar so that blips dont go past circs when you get very close\n\n-- added a missing loc for shield depletion\n\n-- increased delay to 1000 to try stop noobies missing their turn\n\n-- added sniper achievement for hits from over a 1000000 away\n\n-- added achievement for 3 \"sniper\" shots in a round\n\n-- added achievement for 3 \"point blank\" shots in a round\n\n-- added \"fierce Competition\" achievement for shooting an enemy hog (once per round)\n\n-- some support for more weapons later\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Space_Invasion.lua", "rank": 46, "score": 250108.56592855928 }, { "content": "----------------\n\n-- version 0.3\n\n----------------\n\n-- values of scoring changed to 3:10, and now based on vCircScore\n\n-- time gained from killing a red circ increased from 3 to 4\n\n-- circles now spawn at a distance of at least 800 or until sanity limit\n\n-- roundsLimit now based off MinesTime (kinda, its an experiment)\n\n\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Space_Invasion.lua", "rank": 47, "score": 250108.56592855928 }, { "content": "pub trait TypeTuple: Sized {\n\n fn get_types(types: &mut Vec<TypeId>);\n\n}\n\n\n\nimpl TypeTuple for () {\n\n fn get_types(_types: &mut Vec<TypeId>) {}\n\n}\n\n\n\nimpl<T: 'static> TypeTuple for &T {\n\n fn get_types(types: &mut Vec<TypeId>) {\n\n types.push(TypeId::of::<T>());\n\n }\n\n}\n\n\n", "file_path": "rust/hwphysics/src/data.rs", "rank": 48, "score": 249855.42611880752 }, { "content": "-- Version History\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Capture_the_Flag.lua", "rank": 49, "score": 246179.64289034874 }, { "content": "---------------------------------------------------\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Space_Invasion.lua", "rank": 50, "score": 246179.64289034874 }, { "content": "pub fn slice_u32_to_u8(slice_u32: &[u32]) -> &[u8] {\n\n unsafe { from_raw_parts::<u8>(slice_u32.as_ptr() as *const u8, slice_u32.len() * 4) }\n\n}\n\n\n", "file_path": "rust/mapgen/src/theme.rs", "rank": 51, "score": 244087.73486517294 }, { "content": "-- In this version:\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Multiplayer/Capture_the_Flag.lua", "rank": 52, "score": 243115.53338448406 }, { "content": "-- Check if a gear is inside a circle\n\nfunction gearIsInCircle(gear, x, y, r, useRadius)\n\n gx, gy = GetGearPosition(gear)\n\n if useRadius then\n\n r = r + GetGearRadius(gear)\n\n end\n\n if r ^ 2 >= (x - gx) ^ 2 + (y - gy) ^ 2 then\n\n return true\n\n end\n\n return false\n\nend\n", "file_path": "project_files/Android-build/SDL-android-project/assets/Data/Scripts/Utils.lua", "rank": 54, "score": 238234.94638733545 }, { "content": "pub fn get_lobby_join_data(server: &HwServer, response: &mut Response) {\n\n let client_id = response.client_id();\n\n\n\n let client = server.client(client_id);\n\n let nick = vec![client.nick.clone()];\n\n let mut flags = vec![];\n\n if client.is_registered() {\n\n flags.push(Flags::Registered)\n\n }\n\n if client.is_admin() {\n\n flags.push(Flags::Admin)\n\n }\n\n if client.is_contributor() {\n\n flags.push(Flags::Contributor)\n\n }\n\n\n\n let all_nicks: Vec<_> = server.collect_nicks(|_| true);\n\n\n\n let mut flag_selectors = [\n\n (\n", "file_path": "rust/hedgewars-server/src/handlers/common.rs", "rank": 55, "score": 236727.92896519654 }, { "content": "pub fn handle_io_result(\n\n state: &mut ServerState,\n\n client_id: ClientId,\n\n response: &mut Response,\n\n io_result: IoResult,\n\n) {\n\n match io_result {\n\n IoResult::AccountRegistered(is_registered) => {\n\n if !is_registered && state.server.is_registered_only() {\n\n response.add(Bye(REGISTRATION_REQUIRED.to_string()).send_self());\n\n response.remove_client(client_id);\n\n } else if is_registered {\n\n let client = &state.anteroom.clients[client_id];\n\n response.add(AskPassword(client.server_salt.clone()).send_self());\n\n } else if let Some(client) = state.anteroom.remove_client(client_id) {\n\n state.server.add_client(client_id, client);\n\n common::get_lobby_join_data(&state.server, response);\n\n }\n\n }\n\n IoResult::Account(None) => {\n", "file_path": "rust/hedgewars-server/src/handlers.rs", "rank": 56, "score": 232243.8744874894 }, { "content": "pub fn get_room_join_data<'a, I: Iterator<Item = &'a HwClient> + Clone>(\n\n client: &HwClient,\n\n room: &HwRoom,\n\n room_clients: I,\n\n response: &mut Response,\n\n) {\n\n #[inline]\n\n fn partition_nicks<'a, I, F>(clients: I, f: F) -> (Vec<String>, Vec<String>)\n\n where\n\n I: Iterator<Item = &'a HwClient> + Clone,\n\n F: Fn(&&'a HwClient) -> bool,\n\n {\n\n (\n\n clients\n\n .clone()\n\n .filter(|c| f(c))\n\n .map(|c| &c.nick)\n\n .cloned()\n\n .collect(),\n\n clients\n", "file_path": "rust/hedgewars-server/src/handlers/common.rs", "rank": 57, "score": 231823.13243077975 }, { "content": "void glShaderSource(GLuint shader, GLsizei count, const GLchar *const*string, const GLint *length);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 58, "score": 225654.0945258055 }, { "content": "void glUniformMatrix4fv(GLint location, GLsizei count, GLboolean transpose, const GLfloat *value);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 59, "score": 225629.48918659822 }, { "content": "void glUseProgram(GLuint program);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 60, "score": 225628.31512308866 }, { "content": "GLuint glCreateShader(GLenum shaderType);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 61, "score": 225627.9282031378 }, { "content": "void glCompileShader(GLuint shader);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 62, "score": 225627.9282031378 }, { "content": "void glAttachShader(GLuint program, GLuint shader);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 63, "score": 225627.9282031378 }, { "content": "void glDeleteShader(GLuint shader);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 64, "score": 225627.9282031378 }, { "content": "void glBindBuffer(GLenum target, GLuint buffer);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 65, "score": 225610.26007988158 }, { "content": "void glGenBuffers(GLsizei n, GLuint * buffers);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 66, "score": 225610.26007988158 }, { "content": "void glDeleteBuffers(GLsizei n, const GLuint * buffers);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 67, "score": 225610.26007988158 }, { "content": "pub fn get_vote_data(\n\n room_id: RoomId,\n\n result: &Result<VoteResult, VoteError>,\n\n response: &mut Response,\n\n) {\n\n match result {\n\n Ok(VoteResult::Submitted) => {\n\n response.add(server_chat(\"Your vote has been counted.\".to_string()).send_self())\n\n }\n\n Ok(VoteResult::Succeeded(_)) | Ok(VoteResult::Failed) => response.add(\n\n server_chat(\"Voting closed.\".to_string())\n\n .send_all()\n\n .in_room(room_id),\n\n ),\n\n Err(VoteError::NoVoting) => {\n\n response.add(server_chat(\"There's no voting going on.\".to_string()).send_self())\n\n }\n\n Err(VoteError::AlreadyVoted) => {\n\n response.add(server_chat(\"You already have voted.\".to_string()).send_self())\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/hedgewars-server/src/handlers/common.rs", "rank": 68, "score": 225199.36002894724 }, { "content": "pub fn get_room_config_impl(\n\n config: &RoomConfig,\n\n destination: Destination,\n\n response: &mut Response,\n\n) {\n\n response.add(\n\n ConfigEntry(\"FULLMAPCONFIG\".to_string(), config.to_map_config())\n\n .send_to_destination(destination.clone()),\n\n );\n\n for cfg in config.to_game_config() {\n\n response.add(cfg.to_server_msg().send_to_destination(destination.clone()));\n\n }\n\n}\n\n\n", "file_path": "rust/hedgewars-server/src/handlers/common.rs", "rank": 69, "score": 224917.23358969425 }, { "content": "pub fn is_name_illegal(name: &str) -> bool {\n\n name.len() > 40\n\n || name.trim().is_empty()\n\n || name.trim() != name\n\n || name\n\n .chars()\n\n .any(|c| \"$()*+?[]^{|}\\x7F\".contains(c) || ('\\x00'..='\\x1F').contains(&c))\n\n}\n\n\n", "file_path": "rust/hedgewars-server/src/utils.rs", "rank": 70, "score": 224412.7502543677 }, { "content": "pub fn get_start_game_data(\n\n server: &HwServer,\n\n room_id: RoomId,\n\n result: Result<Vec<String>, StartGameError>,\n\n response: &mut Response,\n\n) {\n\n match result {\n\n Ok(room_nicks) => {\n\n let room = server.room(room_id);\n\n response.add(RunGame.send_all().in_room(room.id));\n\n response.add(\n\n ClientFlags(add_flags(&[Flags::InGame]), room_nicks)\n\n .send_all()\n\n .in_room(room.id),\n\n );\n\n\n\n let room_master = room.master_id.map(|id| server.client(id));\n\n get_room_update(None, room, room_master, response);\n\n }\n\n Err(StartGameError::NotEnoughClans) => {\n\n response.warn(\"The game can't be started with less than two clans!\")\n\n }\n\n Err(StartGameError::NotReady) => response.warn(\"Not all players are ready\"),\n\n Err(StartGameError::AlreadyInGame) => response.warn(\"The game is already in progress\"),\n\n }\n\n}\n\n\n", "file_path": "rust/hedgewars-server/src/handlers/common.rs", "rank": 71, "score": 221686.1774126633 }, { "content": "pub fn get_remove_teams_data(\n\n room_id: RoomId,\n\n was_in_game: bool,\n\n removed_teams: Vec<String>,\n\n response: &mut Response,\n\n) {\n\n if was_in_game {\n\n for team_name in &removed_teams {\n\n let remove_msg = to_engine_msg(once(b'F').chain(team_name.bytes()));\n\n\n\n response.add(\n\n ForwardEngineMessage(vec![remove_msg])\n\n .send_all()\n\n .in_room(room_id)\n\n .but_self(),\n\n );\n\n }\n\n } else {\n\n for team_name in removed_teams {\n\n response.add(TeamRemove(team_name).send_all().in_room(room_id));\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/hedgewars-server/src/handlers/common.rs", "rank": 72, "score": 221686.1774126633 }, { "content": "GLint glGetUniformLocation(GLuint program, const GLchar *name);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 73, "score": 221127.3382733155 }, { "content": "void glVertexAttribPointer(GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const GLvoid * pointer);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 74, "score": 221126.57009666503 }, { "content": "void glDisableVertexAttribArray(GLuint index);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 75, "score": 216806.21544169472 }, { "content": "void glEnableVertexAttribArray(GLuint index);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 76, "score": 216806.21544169472 }, { "content": "void glGetShaderInfoLog(GLuint shader, GLsizei maxLength, GLsizei *length, GLchar *infoLog);\n", "file_path": "project_files/hwc/rtl/GL.h", "rank": 77, "score": 216805.4741038749 }, { "content": " public NativeSizeT drawDataSize;\n", "file_path": "project_files/Android-build/SDL-android-project/src/org/hedgewars/hedgeroid/frontlib/Frontlib.java", "rank": 78, "score": 216762.36749203713 }, { "content": "fn main() {\n\n let use_wgpu = true;\n\n let mut event_loop = EventsLoop::new();\n\n let (w, h) = (1024.0, 768.0);\n\n\n\n let mut context = HwRendererContext::new(&event_loop, dpi::LogicalSize::new(w, h), use_wgpu);\n\n\n\n let mut engine = EngineInstance::new();\n\n if !use_wgpu {\n\n engine.world.create_renderer(w as u16, h as u16);\n\n }\n\n\n\n let mut dragging = false;\n\n\n\n use std::time::Instant;\n\n\n\n let mut now = Instant::now();\n\n let mut update_time = Instant::now();\n\n let mut render_time = Instant::now();\n\n\n", "file_path": "rust/hwrunner/src/main.rs", "rank": 79, "score": 216426.90899005442 }, { "content": "#define const\n\n\n", "file_path": "project_files/frontlib/extra/jnacontrol.c", "rank": 80, "score": 216185.6743432476 }, { "content": " int size ; /** Storage size */\n", "file_path": "project_files/frontlib/iniparser/dictionary.h", "rank": 81, "score": 216152.17268496545 }, { "content": " unsigned char str[MAX_ANSISTRING_LENGTH];\n", "file_path": "project_files/hwc/rtl/pas2c.h", "rank": 82, "score": 216127.98559062494 }, { "content": "int main(int argc, char *argv[])\n\n{\n\n dictionary * d ;\n\n char * val ;\n\n int i ;\n\n char cval[90] ;\n\n\n\n /* Allocate dictionary */\n\n printf(\"allocating...\\n\");\n\n d = dictionary_new(0);\n\n\n\n /* Set values in dictionary */\n\n printf(\"setting %d values...\\n\", NVALS);\n\n for (i=0 ; i<NVALS ; i++) {\n\n sprintf(cval, \"%04d\", i);\n\n dictionary_set(d, cval, \"salut\");\n\n }\n\n printf(\"getting %d values...\\n\", NVALS);\n\n for (i=0 ; i<NVALS ; i++) {\n\n sprintf(cval, \"%04d\", i);\n\n val = dictionary_get(d, cval, DICT_INVALID_KEY);\n\n if (val==DICT_INVALID_KEY) {\n\n printf(\"cannot get value for key [%s]\\n\", cval);\n\n }\n\n }\n\n printf(\"unsetting %d values...\\n\", NVALS);\n\n for (i=0 ; i<NVALS ; i++) {\n\n sprintf(cval, \"%04d\", i);\n\n dictionary_unset(d, cval);\n\n }\n\n if (d->n != 0) {\n\n printf(\"error deleting values\\n\");\n\n }\n\n printf(\"deallocating...\\n\");\n\n dictionary_del(d);\n\n return 0 ;\n", "file_path": "project_files/frontlib/iniparser/dictionary.c", "rank": 83, "score": 216097.3244307483 }, { "content": " Byte CurrencyFormat;\n", "file_path": "project_files/hwc/rtl/Types.h", "rank": 84, "score": 215365.05261688313 }, { "content": "pub fn integral_sqrt_ext(mut value: u128) -> u128 {\n\n let mut digit_sqr =\n\n 0x40000000_00000000_00000000_00000000u128.wrapping_shr(value.leading_zeros() & 0xFFFF_FFFE);\n\n let mut result = 0u128;\n\n\n\n while digit_sqr != 0 {\n\n let approx = result + digit_sqr;\n\n result >>= 1;\n\n\n\n if approx <= value {\n\n value -= approx;\n\n result += digit_sqr;\n\n }\n\n digit_sqr >>= 2;\n\n }\n\n result\n\n}\n\n\n", "file_path": "rust/fpnum/src/lib.rs", "rank": 85, "score": 214156.67942435836 }, { "content": "#[derive(Clone, Copy, Debug, Default)]\n\nstruct LookupEntry {\n\n index: Option<NonZeroU16>,\n\n block_index: u16,\n\n}\n\n\n\nimpl LookupEntry {\n\n fn new(block_index: u16, index: u16) -> Self {\n\n Self {\n\n index: unsafe { Some(NonZeroU16::new_unchecked(index + 1)) },\n\n block_index,\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/hwphysics/src/data.rs", "rank": 86, "score": 214024.55456347196 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n println!(\"{:?}\", opt);\n\n\n\n let template =\n\n if let Some(path) = opt.templates_file {\n\n let mut result = String::new();\n\n File::open(path)\n\n .expect(\"Unable to read templates file\")\n\n .read_to_string(&mut result);\n\n\n\n let mut generator = MapGenerator::new();\n\n\n\n let source = &result[..];\n\n\n\n generator.import_yaml_templates(source);\n\n\n\n let template_type = &opt.template_type\n\n .expect(\"No template type specified\");\n\n generator.get_template(template_type)\n", "file_path": "rust/land_dump/src/main.rs", "rank": 87, "score": 213753.69043455017 }, { "content": "fn main() {\n\n let hwlib = Library::new(\"libhedgewars_engine.so\").unwrap();\n\n\n\n unsafe {\n\n let engine = Engine {\n\n protocol_version: hwlib.get(b\"protocol_version\").unwrap(),\n\n start_engine: hwlib.get(b\"start_engine\").unwrap(),\n\n cleanup: hwlib.get(b\"cleanup\").unwrap(),\n\n };\n\n\n\n println!(\"Hedgewars engine, protocol version {}\", engine.protocol_version.deref()());\n\n }\n\n}\n", "file_path": "rust/hedgewars-engine/src/main.rs", "rank": 88, "score": 213753.69043455017 }, { "content": "fn main() {\n\n stderrlog::new()\n\n .verbosity(3)\n\n .timestamp(stderrlog::Timestamp::Second)\n\n .module(module_path!())\n\n .init()\n\n .unwrap();\n\n\n\n let mut frontend_settings = dirs::home_dir().unwrap();\n\n frontend_settings.push(\".hedgewars/settings.ini\");\n\n\n\n let i = Ini::load_from_file(frontend_settings.to_str().unwrap()).unwrap();\n\n let username = i.get_from(Some(\"net\"), \"nick\").unwrap();\n\n let password = i.get_from(Some(\"net\"), \"passwordhash\").unwrap();\n\n\n\n let mut exe = \"/usr/local/bin/hwengine\".to_string();\n\n let mut prefix = \"/usr/local/share/hedgewars/Data\".to_string();\n\n {\n\n let mut ap = ArgumentParser::new();\n\n ap.set_description(\"Game replay checker for hedgewars.\");\n", "file_path": "rust/hedgewars-checker/src/main.rs", "rank": 89, "score": 213753.69043455017 }, { "content": "fn main() {\n\n let sdl = sdl2::init().unwrap();\n\n let _image = sdl2::image::init(sdl2::image::INIT_PNG).unwrap();\n\n let events = sdl.event().unwrap();\n\n\n\n let mut pump = sdl.event_pump().unwrap();\n\n let video = sdl.video().unwrap();\n\n let window = video.window(\"Theme Editor\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .build().unwrap();\n\n\n\n let mut source = init_source();\n\n let mut land = source.next(\n\n LandGenerationParameters::new(0, u32::max_value(), 1, false, false));\n\n draw_center_mark(&mut land);\n\n\n\n let mut land_surf = Surface::new(LAND_WIDTH, LAND_HEIGHT, PixelFormatEnum::ARGB8888).unwrap();\n\n\n\n fill_texture(&mut land_surf, &land);\n\n\n", "file_path": "rust/theme-editor/src/main.rs", "rank": 90, "score": 213753.69043455017 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n info!(\"Hedgewars game server, protocol {}\", utils::SERVER_VERSION);\n\n\n\n let args: Vec<String> = env::args().collect();\n\n let mut opts = Options::new();\n\n\n\n opts.optopt(\"p\", \"port\", \"port - defaults to 46631\", \"PORT\");\n\n opts.optflag(\"h\", \"help\", \"help\");\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(e) => {\n\n println!(\"{}\\n{}\", e, opts.short_usage(\"\"));\n\n return;\n\n }\n\n };\n\n if matches.opt_present(\"h\") {\n\n println!(\"{}\", opts.usage(PROGRAM_NAME));\n\n return;\n", "file_path": "rust/hedgewars-server/src/main.rs", "rank": 91, "score": 213753.69043455017 }, { "content": "#[inline]\n\nfn get_u32(value: Option<NonZeroU32>) -> u32 {\n\n value.map_or(0, |v| v.get())\n\n}\n\n\n", "file_path": "rust/lib-hedgewars-engine/src/render/gl.rs", "rank": 92, "score": 212971.86798378092 }, { "content": "function onNewAmmoStore(groupIndex, hogIndex)\n\n\n\n\tSetAmmo(amSkip, 9, 0, 0, 0)\n\n\tgroupIndex = groupIndex + 1\n\n\thogIndex = hogIndex + 1\n\n\n\n\tif teamRoles[groupIndex][hogIndex] == 'S' then\n\n\t\tSetAmmo(amBazooka, 1, 0, 0, 0)\n\n\t\tSetAmmo(amGrenade, 1, 0, 0, 0)\n\n\t\tSetAmmo(amShotgun, 1, 0, 0, 0)\n\n\telseif teamRoles[groupIndex][hogIndex] == 'E' then\n\n\t\tSetAmmo(amGirder, 2, 0, 0, 0)\n\n\t\tSetAmmo(amBlowTorch, 1, 0, 0, 0)\n\n\t\tSetAmmo(amPickHammer, 1, 0, 0, 0)\n\n\telseif teamRoles[groupIndex][hogIndex] == 'N' then\n\n\t\tSetAmmo(amRope, 9, 0, 0, 0)\n\n\t\tSetAmmo(amParachute, 9, 0, 0, 0)\n\n\t\tSetAmmo(amFirePunch, 1, 0, 0, 0)\n\n\telseif teamRoles[groupIndex][hogIndex] == 'D' then\n\n\t\tSetAmmo(amDynamite, 1, 0, 0, 0)\n", "file_path": "share/hedgewars/Data/Scripts/Multiplayer/The_Specialists.lua", "rank": 93, "score": 212846.43230372493 }, { "content": "int main(int argc, char *argv[]) {\n\n if(init()) {\n\n return -1;\n\n }\n\n\n\n puts(\"Please enter a nickname:\");\n\n flib_gets(nickname, sizeof(nickname));\n\n\n\n netconn = flib_netconn_create(nickname, metacfg, DATA_DIR\"\\\\\", \"140.247.62.101\", 46631);\n\n if(!netconn) {\n\n flib_quit();\n\n return -1;\n\n }\n\n\n\n flib_netconn_onConnected(netconn, handleNetConnected, NULL);\n\n flib_netconn_onDisconnected(netconn, handleNetDisconnect, NULL);\n\n flib_netconn_onChat(netconn, handleChat, NULL);\n\n flib_netconn_onEnterRoom(netconn, handleEnterRoom, NULL);\n\n flib_netconn_onRunGame(netconn, handleRunGame, NULL);\n\n flib_netconn_onEngineMessage(netconn, handleEmFromNet, NULL);\n\n flib_netconn_onRoomJoin(netconn, handleRoomJoin, NULL);\n\n flib_netconn_onRoomLeave(netconn, handleRoomLeave, NULL);\n\n flib_netconn_onReadyState(netconn, handleReady, NULL);\n\n flib_netconn_onNickTaken(netconn, handleNickTaken, NULL);\n\n flib_netconn_onPasswordRequest(netconn, handlePwRequest, NULL);\n\n flib_netconn_onMessage(netconn, handleMessage, NULL);\n\n flib_netconn_onTeamAccepted(netconn, handleTeamAccepted, NULL);\n\n flib_netconn_onMapChanged(netconn, handleMapChanged, NULL);\n\n flib_netconn_onLeaveRoom(netconn, handleLeaveRoom, NULL);\n\n flib_netconn_onCfgScheme(netconn, handleSchemeChanged, NULL);\n\n flib_netconn_onWeaponsetChanged(netconn, handleWeaponsetChanged, NULL);\n\n flib_netconn_onHogCountChanged(netconn, handleHogcountChanged, NULL);\n\n flib_netconn_onRoomAdd(netconn, handleRoomAdd, NULL);\n\n flib_netconn_onRoomDelete(netconn, handleRoomDelete, NULL);\n\n flib_netconn_onScriptChanged(netconn, handleScriptChanged, NULL);\n\n flib_netconn_onTeamAdd(netconn, handleTeamAdd, NULL);\n\n flib_netconn_onTeamDelete(netconn, handleTeamDelete, NULL);\n\n flib_netconn_onTeamColorChanged(netconn, handleTeamColorChanged, NULL);\n\n\n\n INPUT_RECORD inputRecord;\n\n DWORD eventCount = 0;\n\n\n\n while(netconn || gameconn) {\n\n tick();\n\n if(netconn && netConnected) {\n\n while(PeekConsoleInput(hStdin, &inputRecord, 1, &eventCount) && eventCount>0) {\n\n if(inputRecord.EventType != KEY_EVENT) {\n\n ReadConsoleInput(hStdin, &inputRecord, 1, &eventCount);\n\n } else {\n\n printf(\"%s: \", nickname);\n\n char input[256];\n\n if(!flib_gets(input, sizeof(input))) {\n\n if(!memcmp(\"/quit\", input, strlen(\"/quit\"))) {\n\n flib_netconn_send_quit(netconn, \"Player quit.\");\n\n } else if(!memcmp(\"/describe \", input, strlen(\"/describe \"))) {\n\n const char *roomname = input+strlen(\"/describe \");\n\n /*const flib_roomlist *roomlist = flib_netconn_get_roomlist(netconn);\n\n flib_room *room = flib_roomlist_find(roomlist, roomname);\n\n if(!room) {\n\n puts(\"Unknown room.\");\n\n } else {\n\n char *text = flib_asprintf(\n\n \"%s is a room created by %s, where %i players (%i teams) are %s on %s%s, using the %s scheme and %s weaponset.\",\n\n room->name,\n\n room->owner,\n\n room->playerCount,\n\n room->teamCount,\n\n room->inProgress ? \"fighting\" : \"preparing to fight\",\n\n room->map[0]=='+' ? \"\" : \"the map \",\n\n !strcmp(\"+rnd+\", room->map) ? \"a random map\" :\n\n !strcmp(\"+maze+\", room->map) ? \"a random maze\" :\n\n !strcmp(\"+drawn+\", room->map) ? \"a hand-drawn map\" :\n\n room->map,\n\n room->scheme,\n\n room->weapons);\n\n if(text) {\n\n puts(text);\n\n }\n\n free(text);\n\n }*/\n\n } else if(!memcmp(\"/join \", input, strlen(\"/join \"))) {\n\n const char *roomname = input+strlen(\"/join \");\n\n flib_netconn_send_joinRoom(netconn, roomname);\n\n } else if(!memcmp(\"/ready\", input, strlen(\"/ready\"))) {\n\n flib_netconn_send_toggleReady(netconn);\n\n } else if(!memcmp(\"/loglevel \", input, strlen(\"/loglevel \"))) {\n\n int loglevel = atoi(input+strlen(\"/loglevel \"));\n\n flib_log_setLevel(loglevel);\n\n } else if(!memcmp(\"/list\", input, strlen(\"/list\"))) {\n\n if(flib_netconn_is_in_room_context(netconn)) {\n\n printTeamList();\n\n } else {\n\n puts(\"From this big and expansive lobby, hallways branch off to these rooms:\");\n\n //printRoomList();\n\n }\n\n } else if(!memcmp(\"/addteam \", input, strlen(\"/addteam \"))) {\n\n const char *teamname = input+strlen(\"/addteam \");\n\n if(!flib_contains_dir_separator(teamname)) {\n\n char *teamfilename = flib_asprintf(\"%s.hwt\", teamname);\n\n if(teamfilename) {\n\n flib_team *team = flib_team_from_ini(teamfilename);\n\n if(team) {\n\n flib_netconn_send_addTeam(netconn, team);\n\n } else {\n\n printf(\"Teamfile %s not found.\\n\", teamfilename);\n\n }\n\n flib_team_destroy(team);\n\n }\n\n free(teamfilename);\n\n }\n\n } else if(strlen(input)>0) {\n\n flib_netconn_send_chat(netconn, input);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n fflush(stdout);\n\n Sleep(10);\n\n }\n\n\n\n\n\n flib_metascheme_release(metacfg);\n\n return 0;\n", "file_path": "project_files/cmdlineClient/cmdlineClient.c", "rank": 94, "score": 212249.4409352081 }, { "content": "pub fn physfs_error_as_io_error() -> Error {\n\n Error::new(ErrorKind::Other,\n\n &format!(\"PhysicsFS Error: `{}`\", PhysFSContext::get_last_error())[..])\n\n}\n", "file_path": "rust/physfs-rs/src/physfs/util.rs", "rank": 95, "score": 211567.57721769818 }, { "content": "static int allocateExtraCapacity(flib_vector *vec, size_t extraCapacity) {\n\n if(extraCapacity <= SIZE_MAX - vec->capacity) {\n\n return setCapacity(vec, vec->capacity + extraCapacity);\n\n } else {\n\n return -1;\n\n }\n", "file_path": "project_files/frontlib/util/buffer.c", "rank": 96, "score": 210462.35265600527 }, { "content": "use integral_geometry::{GridIndex, Point, Rect, Size};\n\nuse land2d::Land2D;\n\nuse vec2d::Vec2D;\n\n\n\nuse super::{\n\n camera::Camera,\n\n gl::{\n\n Buffer, BufferType, BufferUsage, InputElement, InputFormat, InputLayout, PipelineState,\n\n Shader, Texture2D, TextureDataType, TextureFilter, TextureFormat, TextureInternalFormat,\n\n VariableBinding,\n\n },\n\n};\n\n\n\nuse std::num::NonZeroU32;\n\n\n\nconst VERTEX_SHADER: &'static str = r#\"\n\n#version 150\n\n\n\nin vec2 Position;\n\nin vec3 Uv;\n", "file_path": "rust/lib-hedgewars-engine/src/render/map.rs", "rank": 98, "score": 112.57846037488551 } ]
Rust
src/views/help.rs
RedlineTriad/rshub
778d0a2f2d66998b6b1669def1f79573ccb54781
use std::io; use std::sync::Arc; use crossterm::event::KeyCode; use tui::layout::Rect; use tui::{ backend::CrosstermBackend, layout::{Alignment, Constraint, Direction, Layout, Margin}, style::{Modifier, Style}, text::Span, widgets::{Block, Borders, Paragraph, Wrap}, Frame, }; use crate::app::AppAction; use crate::input::UserInput; use crate::states::help::HotKey; use crate::states::AppState; use crate::views::{AppView, Drawable, HotKeys, InputProcessor, Named}; pub struct Help {} impl AppView for Help {} #[async_trait::async_trait] impl Named for Help { fn name(&self) -> String { "Help Screen".to_owned() } } #[async_trait::async_trait] impl HotKeys for Help { fn hotkeys(&self) -> Vec<HotKey> { vec![HotKey { description: "Close help", key: KeyCode::Esc, modifiers: None, }] } } #[async_trait::async_trait] impl InputProcessor for Help { async fn on_input(&mut self, input: &UserInput, _: Arc<AppState>) -> Option<AppAction> { match input { UserInput::Back => Some(AppAction::CloseView), _ => None, } } } #[async_trait::async_trait] impl Drawable for Help { async fn draw( &mut self, f: &mut Frame<CrosstermBackend<io::Stdout>>, area: Rect, app: Arc<AppState>, ) { let help = app.help.lock().unwrap(); let list_length = (help.global_hotkeys.len() + help.local_hotkeys.len()) as u16 + 2 + 1 + 1; let vertical_margin = if list_length < area.height { (area.height - list_length) / 2 } else { 0 }; f.render_widget( Block::default() .title(help.view_name.clone()) .title_alignment(Alignment::Center) .borders(Borders::ALL), area, ); let chunks = Layout::default() .direction(Direction::Vertical) .constraints(vec![ Constraint::Length(vertical_margin), Constraint::Length(1), Constraint::Length(help.global_hotkeys.len() as u16), Constraint::Length(1), Constraint::Length(help.local_hotkeys.len() as u16), ]) .split(area.inner(&Margin { vertical: 1, horizontal: 1, })); f.render_widget( Block::default() .title(Span::styled( "GLOBAL", Style::default().add_modifier(Modifier::BOLD), )) .title_alignment(Alignment::Center), chunks[1], ); f.render_widget( Block::default() .title(Span::styled( "LOCAL", Style::default().add_modifier(Modifier::BOLD), )) .title_alignment(Alignment::Center), chunks[3], ); let chunks_global = Layout::default() .direction(Direction::Horizontal) .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)]) .split(chunks[2]); let chunks_local = Layout::default() .direction(Direction::Horizontal) .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)]) .split(chunks[4]); f.render_widget( Paragraph::new( help.global_hotkeys .iter() .map(|h| format!("{} :", h.description)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Right) .wrap(Wrap { trim: true }), chunks_global[0], ); f.render_widget( Paragraph::new( help.global_hotkeys .iter() .map(|h| format!(" {}", h)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Left) .wrap(Wrap { trim: false }), chunks_global[1], ); f.render_widget( Paragraph::new( help.local_hotkeys .iter() .map(|h| format!("{} :", h.description)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Right) .wrap(Wrap { trim: true }), chunks_local[0], ); f.render_widget( Paragraph::new( help.local_hotkeys .iter() .map(|h| format!(" {}", h)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Left) .wrap(Wrap { trim: false }), chunks_local[1], ); } }
use std::io; use std::sync::Arc; use crossterm::event::KeyCode; use tui::layout::Rect; use tui::{ backend::CrosstermBackend, layout::{Alignment, Constraint, Direction, Layout, Margin}, style::{Modifier, Style}, text::Span, widgets::{Block, Borders, Paragraph, Wrap}, Frame, }; use crate::app::AppAction; use crate::input::UserInput; use crate::states::help::HotKey; use crate::states::AppState; use crate::views::{AppView, Drawable, HotKeys, InputProcessor, Named}; pub struct Help {} impl AppView for Help {} #[async_trait::async_trait] impl Named for Help { fn name(&self) -> String { "Help Screen".to_owned() } } #[async_trait::async_trait] impl HotKeys for Help { fn hotkeys(&self) -> Vec<HotKey> { vec![HotKey { description: "Close help", key: KeyCode::Esc, modifiers: None, }] } } #[async_trait::async_trait] impl InputProcessor for Help { async fn on_input(&mut self, input: &UserInput, _: Arc<AppState>) -> Option<AppAction> { match input { UserInput::Back => Some(AppAction::CloseView), _ => None, } } } #[async_trait::async_trait] impl Drawable for Help { async fn draw( &mut self, f: &mut Frame<CrosstermBackend<io::Stdout>>, area: Rect, app: Arc<AppState>, ) { let help = app.help.lock().unwrap(); let list_length = (help.global_hotkeys.len() + help.local_hotkeys.len()) as u16 + 2 + 1 + 1; let vertical_margin = if list_length < area.height { (area.height - list_length) / 2 } else { 0 }; f.render_widget( Block::default() .title(help.view_name.clone()) .title_alignment(Alignment::Center) .borders(Borders::ALL), area, );
f.render_widget( Block::default() .title(Span::styled( "GLOBAL", Style::default().add_modifier(Modifier::BOLD), )) .title_alignment(Alignment::Center), chunks[1], ); f.render_widget( Block::default() .title(Span::styled( "LOCAL", Style::default().add_modifier(Modifier::BOLD), )) .title_alignment(Alignment::Center), chunks[3], ); let chunks_global = Layout::default() .direction(Direction::Horizontal) .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)]) .split(chunks[2]); let chunks_local = Layout::default() .direction(Direction::Horizontal) .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)]) .split(chunks[4]); f.render_widget( Paragraph::new( help.global_hotkeys .iter() .map(|h| format!("{} :", h.description)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Right) .wrap(Wrap { trim: true }), chunks_global[0], ); f.render_widget( Paragraph::new( help.global_hotkeys .iter() .map(|h| format!(" {}", h)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Left) .wrap(Wrap { trim: false }), chunks_global[1], ); f.render_widget( Paragraph::new( help.local_hotkeys .iter() .map(|h| format!("{} :", h.description)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Right) .wrap(Wrap { trim: true }), chunks_local[0], ); f.render_widget( Paragraph::new( help.local_hotkeys .iter() .map(|h| format!(" {}", h)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Left) .wrap(Wrap { trim: false }), chunks_local[1], ); } }
let chunks = Layout::default() .direction(Direction::Vertical) .constraints(vec![ Constraint::Length(vertical_margin), Constraint::Length(1), Constraint::Length(help.global_hotkeys.len() as u16), Constraint::Length(1), Constraint::Length(help.local_hotkeys.len() as u16), ]) .split(area.inner(&Margin { vertical: 1, horizontal: 1, }));
assignment_statement
[ { "content": "fn draw_info(f: &mut Frame<CrosstermBackend<io::Stdout>>, area: Rect, app: Arc<AppState>) {\n\n let chunks = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints(vec![Constraint::Percentage(100)])\n\n .split(area);\n\n\n\n f.render_widget(\n\n Block::default()\n\n .borders(Borders::ALL)\n\n .title(Span::styled(\n\n format!(\n\n \" {} {} {} \",\n\n env!(\"CARGO_PKG_NAME\"),\n\n DOT,\n\n env!(\"CARGO_PKG_VERSION\")\n\n ),\n\n Style::default().add_modifier(Modifier::BOLD),\n\n ))\n\n .title_alignment(Alignment::Center),\n\n chunks[0],\n", "file_path": "src/views/servers.rs", "rank": 0, "score": 165782.6570595842 }, { "content": "pub trait AppView: Drawable + InputProcessor + HotKeys + Named {}\n", "file_path": "src/views/mod.rs", "rank": 1, "score": 144893.5935402484 }, { "content": "#[cfg(feature = \"geolocation\")]\n\nfn serde_unknown_string_field() -> String {\n\n \"unknown\".to_owned()\n\n}\n\n\n\n#[cfg(feature = \"geolocation\")]\n\n#[derive(Deserialize)]\n\npub struct LocationJson {\n\n pub longitude: Option<f64>,\n\n pub latitude: Option<f64>,\n\n #[serde(default = \"serde_unknown_string_field\")]\n\n pub country: String,\n\n #[serde(default = \"serde_unknown_string_field\")]\n\n pub city: String,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Location {\n\n pub longitude: f64,\n\n pub latitude: f64,\n\n pub country: String,\n", "file_path": "src/datatypes/geolocation.rs", "rank": 2, "score": 84419.26654888602 }, { "content": "#[async_trait::async_trait]\n\npub trait InputProcessor {\n\n async fn on_input(&mut self, input: &UserInput, app: Arc<AppState>) -> Option<AppAction>;\n\n}\n\n\n", "file_path": "src/views/mod.rs", "rank": 3, "score": 80016.2612426845 }, { "content": "#[async_trait::async_trait]\n\npub trait Drawable {\n\n async fn draw(\n\n &mut self,\n\n f: &mut Frame<CrosstermBackend<io::Stdout>>,\n\n area: Rect,\n\n app: Arc<AppState>,\n\n );\n\n}\n\n\n", "file_path": "src/views/mod.rs", "rank": 4, "score": 67552.46655556593 }, { "content": "pub trait Named {\n\n fn name(&self) -> String;\n\n}\n\n\n", "file_path": "src/views/mod.rs", "rank": 5, "score": 67548.24064835407 }, { "content": "// thanks kalmari\n\nfn greater_than_5(s: &str) -> Result<u64, String> {\n\n let min_value = 5;\n\n\n\n let v = s.parse::<u64>().map_err(|e| e.to_string())?;\n\n if v < min_value {\n\n Err(format!(\"Value must be >= {}\", min_value))\n\n } else {\n\n Ok(v)\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 6, "score": 66321.59183981521 }, { "content": "// tui states look same, but do not implement trait, so I made one\n\npub trait TuiState {\n\n fn selected(&self) -> Option<usize>;\n\n fn select(&mut self, index: Option<usize>);\n\n}\n\n\n\nimpl TuiState for ListState {\n\n fn selected(&self) -> Option<usize> {\n\n ListState::selected(self)\n\n }\n\n\n\n fn select(&mut self, index: Option<usize>) {\n\n ListState::select(self, index)\n\n }\n\n}\n\n\n\nimpl TuiState for TableState {\n\n fn selected(&self) -> Option<usize> {\n\n TableState::selected(self)\n\n }\n\n\n", "file_path": "src/states/mod.rs", "rank": 7, "score": 65012.848804810434 }, { "content": "pub trait HotKeys {\n\n fn hotkeys(&self) -> Vec<HotKey> {\n\n Vec::new()\n\n }\n\n}\n\n\n", "file_path": "src/views/mod.rs", "rank": 8, "score": 64984.034699516495 }, { "content": "fn cleanup_terminal(terminal: Option<&mut Terminal<CrosstermBackend<io::Stdout>>>) {\n\n disable_raw_mode().unwrap();\n\n\n\n let mut stdout = io::stdout();\n\n\n\n // FIXME: if mouse is outside terminal, it is not released properly and garbage\n\n // is printed after panic\n\n execute!(stdout, LeaveAlternateScreen, DisableMouseCapture).unwrap();\n\n\n\n if let Some(terminal) = terminal {\n\n terminal.show_cursor().unwrap();\n\n }\n\n}\n\n\n\nconst fn verbosity_to_log_level(verbosity: u32) -> LevelFilter {\n\n let mut verbosity = verbosity;\n\n\n\n #[cfg(debug_assertions)]\n\n {\n\n // jump straight to debug\n", "file_path": "src/main.rs", "rank": 9, "score": 54415.70889240288 }, { "content": "fn setup_logger(config: &AppConfig) -> Result<(), io::Error> {\n\n simplelog::WriteLogger::init(\n\n verbosity_to_log_level(config.verbose),\n\n simplelog::ConfigBuilder::default()\n\n .set_level_padding(simplelog::LevelPadding::Right)\n\n .set_thread_level(LevelFilter::Off)\n\n .add_filter_ignore_str(\"mio\")\n\n .add_filter_ignore_str(\"want\")\n\n .add_filter_ignore_str(\"rustls\")\n\n .add_filter_ignore_str(\"reqwest\")\n\n .add_filter_ignore_str(\"tokio_util\")\n\n .build(),\n\n std::fs::File::create(&config.dirs.log_file)?,\n\n )\n\n .expect(\"creating logger\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 50693.64339496006 }, { "content": "#[derive(Clap, Debug)]\n\n#[clap(version = clap::crate_version!(), about = \"UnityStation server hub\")]\n\nstruct CliArgs {\n\n /// Log file path\n\n #[clap(short, long)]\n\n log_file: Option<PathBuf>,\n\n /// Server list update interval, in seconds (must be >= 5)\n\n #[clap(short, long, default_value = \"20\", parse(try_from_str = greater_than_5))]\n\n update_interval: u64,\n\n /// A level of verbosity, and can be used multiple times\n\n #[clap(short, long, parse(from_occurrences))]\n\n verbose: u32,\n\n /// Geolocation provider (ifconfig.co compatible)\n\n #[cfg(feature = \"geolocation\")]\n\n #[clap(long, default_value = DEFAULT_GEO_PROVIDER_URL)]\n\n geo_provider: reqwest::Url,\n\n /// Offline mode\n\n #[clap(long)]\n\n offline: bool,\n\n /// Disable download URL verification\n\n #[clap(long)]\n\n unchecked_downloads: bool,\n", "file_path": "src/config.rs", "rank": 11, "score": 45956.83409380518 }, { "content": "fn setup_panic_hook() {\n\n #[cfg(not(debug_assertions))]\n\n let original_hook = std::panic::take_hook();\n\n\n\n std::panic::set_hook(Box::new(move |panic_info| {\n\n cleanup_terminal(None);\n\n\n\n #[cfg(debug_assertions)]\n\n {\n\n better_panic::Settings::auto().create_panic_handler()(panic_info);\n\n }\n\n #[cfg(not(debug_assertions))]\n\n {\n\n original_hook(panic_info);\n\n }\n\n }));\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 12, "score": 43087.57683571843 }, { "content": "use std::sync::mpsc;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse crossterm::event::{self, Event, KeyCode, KeyEvent, KeyModifiers, MouseEventKind};\n\n\n\n#[derive(Debug)]\n\npub enum UserInput {\n\n // hardcoded global inputs\n\n Help,\n\n Quit,\n\n\n\n // directions\n\n Up,\n\n Down,\n\n Left,\n\n Right,\n\n // gotos\n\n Top,\n\n Bottom,\n", "file_path": "src/input.rs", "rank": 13, "score": 31084.082818269595 }, { "content": " KeyEvent {\n\n code: KeyCode::Tab, ..\n\n } => Some(Self::Tab),\n\n KeyEvent {\n\n code: KeyCode::F(1),\n\n ..\n\n } => Some(Self::Help),\n\n KeyEvent {\n\n code: KeyCode::F(5),\n\n ..\n\n } => Some(Self::Refresh),\n\n _ => None,\n\n },\n\n Event::Mouse(mouse) => match mouse.kind {\n\n MouseEventKind::ScrollUp => Some(Self::Up),\n\n MouseEventKind::ScrollDown => Some(Self::Down),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n", "file_path": "src/input.rs", "rank": 14, "score": 31083.546569976555 }, { "content": " // actions\n\n Back,\n\n Enter,\n\n Delete,\n\n // misc\n\n Tab,\n\n Refresh,\n\n // custom\n\n Char(char),\n\n}\n\n\n\nimpl UserInput {\n\n fn from(event: Event) -> Option<Self> {\n\n match event {\n\n Event::Key(key) => match key {\n\n KeyEvent {\n\n code: KeyCode::Char('c' | 'C'),\n\n modifiers: KeyModifiers::CONTROL,\n\n }\n\n | KeyEvent {\n", "file_path": "src/input.rs", "rank": 15, "score": 31081.985434126243 }, { "content": " }\n\n}\n\n\n\npub(crate) enum EventOrTick<I> {\n\n Input(I),\n\n Tick,\n\n}\n\n\n\npub(crate) fn spawn_input_thread(interval: Duration) -> mpsc::Receiver<EventOrTick<UserInput>> {\n\n let (tx, rx) = mpsc::channel();\n\n\n\n thread::Builder::new()\n\n .name(\"input\".to_owned())\n\n .spawn(move || loop {\n\n let event = if event::poll(interval).unwrap() {\n\n if let Some(valid_input) = UserInput::from(event::read().unwrap()) {\n\n EventOrTick::Input(valid_input)\n\n } else {\n\n EventOrTick::Tick\n\n }\n", "file_path": "src/input.rs", "rank": 16, "score": 31072.967922725016 }, { "content": " ..\n\n } => Some(Self::Down),\n\n KeyEvent {\n\n code: KeyCode::Home,\n\n ..\n\n } => Some(Self::Top),\n\n KeyEvent {\n\n code: KeyCode::End, ..\n\n } => Some(Self::Bottom),\n\n KeyEvent {\n\n code: KeyCode::Esc, ..\n\n } => Some(Self::Back),\n\n KeyEvent {\n\n code: KeyCode::Enter,\n\n ..\n\n } => Some(Self::Enter),\n\n KeyEvent {\n\n code: KeyCode::Delete | KeyCode::Backspace,\n\n ..\n\n } => Some(Self::Delete),\n", "file_path": "src/input.rs", "rank": 17, "score": 31072.379372900912 }, { "content": " code: KeyCode::Char('q' | 'Q'),\n\n ..\n\n } => Some(Self::Quit),\n\n KeyEvent {\n\n code: KeyCode::Char(c),\n\n ..\n\n } => Some(Self::Char(c)),\n\n KeyEvent {\n\n code: KeyCode::Left,\n\n ..\n\n } => Some(Self::Left),\n\n KeyEvent {\n\n code: KeyCode::Right,\n\n ..\n\n } => Some(Self::Right),\n\n KeyEvent {\n\n code: KeyCode::Up, ..\n\n } => Some(Self::Up),\n\n KeyEvent {\n\n code: KeyCode::Down,\n", "file_path": "src/input.rs", "rank": 18, "score": 31072.28558350391 }, { "content": " } else {\n\n EventOrTick::Tick\n\n };\n\n\n\n if let Err(e) = tx.send(event) {\n\n log::error!(\"failed to send input event, probably closed channel: {}\", e);\n\n break;\n\n }\n\n })\n\n .expect(\"unable to spawn input thread\");\n\n\n\n rx\n\n}\n", "file_path": "src/input.rs", "rank": 19, "score": 31070.22066580942 }, { "content": " let chunks = Layout::default()\n\n .constraints(vec![Constraint::Min(0), Constraint::Length(1)])\n\n .direction(Direction::Vertical)\n\n .split(f.size());\n\n\n\n self.events_view\n\n .draw(f, chunks[1], Arc::clone(&self.state))\n\n .await;\n\n\n\n chunks[0]\n\n } else {\n\n f.size()\n\n };\n\n\n\n widget.draw(f, area, Arc::clone(&self.state)).await;\n\n }\n\n }\n\n }\n\n\n\n pub(crate) async fn on_input(&mut self, input: &UserInput) {\n", "file_path": "src/app.rs", "rank": 20, "score": 30854.948997686228 }, { "content": " };\n\n\n\n instance.register_view(ViewType::Tab, Box::new(TabView::new()));\n\n #[cfg(feature = \"geolocation\")]\n\n instance.register_view(ViewType::World, Box::new(World {}));\n\n instance.register_view(ViewType::Help, Box::new(Help {}));\n\n\n\n instance\n\n }\n\n\n\n fn register_view(&mut self, tp: ViewType, view: Box<dyn AppView>) {\n\n self.views.insert(tp, view);\n\n }\n\n\n\n pub async fn draw(&mut self, f: &mut Frame<'_, CrosstermBackend<io::Stdout>>) {\n\n if let Some(tp) = self.view_stack.last() {\n\n if let Some(widget) = self.views.get_mut(tp) {\n\n use tui::layout::{Constraint, Direction, Layout};\n\n\n\n let area = if self.state.events.read().await.current_event.is_some() {\n", "file_path": "src/app.rs", "rank": 21, "score": 30854.88737466897 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\nuse std::sync::atomic::AtomicBool;\n\nuse std::sync::Arc;\n\n\n\nuse tui::backend::CrosstermBackend;\n\nuse tui::terminal::Frame;\n\n\n\nuse crate::config::AppConfig;\n\nuse crate::datatypes::game_version::GameVersion;\n\nuse crate::datatypes::server::Address;\n\nuse crate::input::UserInput;\n\nuse crate::states::app::AppState;\n\n#[cfg(feature = \"geolocation\")]\n\nuse crate::views::world::World;\n\nuse crate::views::{events::EventsView, help::Help, tabs::TabView, AppView, Drawable, ViewType};\n\n\n\n#[derive(Debug)]\n\npub enum AppAction {\n\n // view management\n", "file_path": "src/app.rs", "rank": 22, "score": 30849.492719008376 }, { "content": "\n\n pub stopped: bool,\n\n pub panicked: Arc<AtomicBool>,\n\n}\n\n\n\nimpl App {\n\n pub async fn new(config: AppConfig) -> Self {\n\n let panic_bool = Arc::new(AtomicBool::new(false));\n\n let state = AppState::new(config, panic_bool.clone()).await;\n\n\n\n let mut instance = Self {\n\n state,\n\n\n\n views: HashMap::new(),\n\n view_stack: vec![ViewType::Tab],\n\n\n\n events_view: EventsView {},\n\n\n\n stopped: false,\n\n panicked: panic_bool,\n", "file_path": "src/app.rs", "rank": 23, "score": 30844.528179596044 }, { "content": " log::debug!(\"input: {:?}\", input);\n\n\n\n match input {\n\n UserInput::Quit => self.stop(),\n\n UserInput::Help => {\n\n if let Some(top_view_type) = self.view_stack.last() {\n\n if top_view_type == &ViewType::Help {\n\n return;\n\n }\n\n if let Some(top_view) = self.views.get(top_view_type) {\n\n self.view_stack.push(ViewType::Help);\n\n self.state\n\n .display_help(&top_view.name(), &top_view.hotkeys());\n\n }\n\n }\n\n }\n\n _ => {\n\n if let Some(top_widget_type) = self.view_stack.last() {\n\n if let Some(widget) = self.views.get_mut(top_widget_type) {\n\n if let Some(action) = widget.on_input(input, self.state.clone()).await {\n", "file_path": "src/app.rs", "rank": 24, "score": 30844.51088318712 }, { "content": " match action {\n\n AppAction::OpenView(view) => {\n\n self.view_stack.push(view);\n\n }\n\n AppAction::CloseView => {\n\n self.view_stack.pop();\n\n }\n\n\n\n _ => self.state.on_action(&action, Arc::clone(&self.state)).await,\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn stop(&mut self) {\n\n self.stopped = true;\n\n }\n\n}\n", "file_path": "src/app.rs", "rank": 25, "score": 30840.48198219647 }, { "content": " OpenView(ViewType),\n\n CloseView,\n\n // installations\n\n InstallVersion(GameVersion),\n\n AbortVersionInstallation(GameVersion),\n\n UninstallVersion(GameVersion),\n\n LaunchVersion(GameVersion),\n\n ConnectToServer {\n\n version: GameVersion,\n\n address: Address,\n\n },\n\n}\n\n\n\npub struct App {\n\n pub state: Arc<AppState>,\n\n\n\n views: HashMap<ViewType, Box<dyn AppView>>,\n\n view_stack: Vec<ViewType>,\n\n\n\n events_view: EventsView,\n", "file_path": "src/app.rs", "rank": 26, "score": 30837.34801106879 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n setup_panic_hook();\n\n\n\n // TODO: actual error processing\n\n _main()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 27, "score": 30006.666408977035 }, { "content": "fn _main() -> Result<(), Box<dyn std::error::Error>> {\n\n let config: AppConfig = AppConfig::new()?;\n\n\n\n setup_logger(&config)?;\n\n\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()?;\n\n\n\n let mut app = rt.block_on(app::App::new(config));\n\n\n\n let mut terminal = {\n\n enable_raw_mode()?;\n\n\n\n let mut stdout = io::stdout();\n\n execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;\n\n\n\n let backend = CrosstermBackend::new(stdout);\n\n\n\n Terminal::new(backend)?\n", "file_path": "src/main.rs", "rank": 28, "score": 30006.666408977035 }, { "content": " )\n\n }\n\n}\n\n\n\npub struct HelpState {\n\n pub view_name: String,\n\n pub global_hotkeys: Vec<HotKey>,\n\n pub local_hotkeys: Vec<HotKey>,\n\n}\n\n\n\nimpl HelpState {\n\n pub fn new() -> Self {\n\n Self {\n\n view_name: \"\".to_owned(),\n\n global_hotkeys: vec![\n\n HotKey {\n\n description: \"Display help in current context\",\n\n key: KeyCode::F(1),\n\n modifiers: None,\n\n },\n", "file_path": "src/states/help.rs", "rank": 32, "score": 29226.369144663553 }, { "content": " HotKey {\n\n description: \"Close help screen\",\n\n key: KeyCode::Esc,\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Quit app\",\n\n key: KeyCode::Char('q'),\n\n modifiers: None,\n\n },\n\n ],\n\n local_hotkeys: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn set_name(&mut self, name: &str) {\n\n self.view_name = name.to_owned()\n\n }\n\n\n\n pub fn set_hotkeys(&mut self, hotkeys: &[HotKey]) {\n\n self.local_hotkeys = hotkeys.to_vec();\n\n }\n\n}\n", "file_path": "src/states/help.rs", "rank": 33, "score": 29225.14836636082 }, { "content": "use std::fmt;\n\n\n\nuse crossterm::event::{KeyCode, KeyModifiers};\n\n\n\n// TODO: less code duplication by associating this with inputs.rs somehow\n\n#[derive(Debug, Clone)]\n\npub struct HotKey {\n\n pub description: &'static str,\n\n pub key: KeyCode,\n\n pub modifiers: Option<KeyModifiers>,\n\n}\n\n\n\nimpl fmt::Display for HotKey {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Some(modifiers) = self.modifiers {\n\n write!(f, \"{:?}+\", modifiers)?;\n\n }\n\n write!(\n\n f,\n\n \"{}\",\n", "file_path": "src/states/help.rs", "rank": 36, "score": 29214.397444286 }, { "content": " match self.key {\n\n KeyCode::Backspace => \"Backspace\".to_owned(),\n\n KeyCode::Enter => \"Enter\".to_owned(),\n\n KeyCode::Left => \"Left\".to_owned(),\n\n KeyCode::Right => \"Right\".to_owned(),\n\n KeyCode::Up => \"Up\".to_owned(),\n\n KeyCode::Down => \"Down\".to_owned(),\n\n KeyCode::Home => \"Home\".to_owned(),\n\n KeyCode::End => \"End\".to_owned(),\n\n KeyCode::PageUp => \"PageUp\".to_owned(),\n\n KeyCode::PageDown => \"PageDown\".to_owned(),\n\n KeyCode::Tab => \"Tab\".to_owned(),\n\n KeyCode::BackTab => \"BackTab\".to_owned(),\n\n KeyCode::Delete => \"Delete\".to_owned(),\n\n KeyCode::Insert => \"Insert\".to_owned(),\n\n KeyCode::F(i) => format!(\"F{}\", i),\n\n KeyCode::Char(c) => c.to_uppercase().to_string(),\n\n KeyCode::Null => \"Null\".to_owned(),\n\n KeyCode::Esc => \"Esc\".to_owned(),\n\n }\n", "file_path": "src/states/help.rs", "rank": 41, "score": 29201.275792011314 }, { "content": " }\n\n }\n\n\n\n pub fn display_help(&self, view_name: &str, keys: &[HotKey]) {\n\n let mut help = self.help.lock().unwrap();\n\n help.set_name(view_name);\n\n help.set_hotkeys(keys);\n\n }\n\n\n\n pub async fn watch_task(&self, task: JoinHandle<TaskResult>) {\n\n tokio::spawn(Self::wrap_task(\n\n task,\n\n self.panic_bool.clone(),\n\n self.events.clone(),\n\n ));\n\n }\n\n\n\n async fn wrap_task(\n\n task: JoinHandle<TaskResult>,\n\n panic_bool: Arc<AtomicBool>,\n", "file_path": "src/states/app.rs", "rank": 42, "score": 29105.136924094993 }, { "content": "pub struct AppState {\n\n pub config: AppConfig,\n\n pub commits: Arc<RwLock<CommitState>>,\n\n pub versions: Arc<RwLock<VersionsState>>,\n\n #[cfg(feature = \"geolocation\")]\n\n pub locations: Arc<RwLock<LocationsState>>,\n\n pub servers: Arc<RwLock<ServersState>>,\n\n pub events: Arc<RwLock<EventsState>>,\n\n\n\n pub help: Mutex<HelpState>,\n\n\n\n pub client: reqwest::Client,\n\n\n\n panic_bool: Arc<AtomicBool>,\n\n}\n\n\n\nimpl AppState {\n\n pub async fn new(config: AppConfig, panic_bool: Arc<AtomicBool>) -> Arc<Self> {\n\n #[cfg(feature = \"geolocation\")]\n\n let locations = Arc::new(RwLock::new(LocationsState::new(&config).await));\n", "file_path": "src/states/app.rs", "rank": 43, "score": 29093.73192840702 }, { "content": "use std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::{Arc, Mutex};\n\n\n\nuse tokio::sync::RwLock;\n\nuse tokio::task::JoinHandle;\n\n\n\nuse anyhow::Result;\n\n\n\nuse crate::app::AppAction;\n\nuse crate::config::AppConfig;\n\nuse crate::constants::USER_AGENT;\n\nuse crate::states::events::EventsState;\n\nuse crate::states::help::HelpState;\n\nuse crate::states::help::HotKey;\n\n#[cfg(feature = \"geolocation\")]\n\nuse crate::states::LocationsState;\n\nuse crate::states::{CommitState, ServersState, VersionsState};\n\n\n\npub type TaskResult = Result<()>;\n\n\n", "file_path": "src/states/app.rs", "rank": 44, "score": 29093.2322130784 }, { "content": " });\n\n\n\n events.write().await.run(instance.clone()).await;\n\n servers.write().await.run(instance.clone()).await;\n\n #[cfg(feature = \"geolocation\")]\n\n locations.write().await.run(instance.clone()).await;\n\n versions.write().await.run(instance.clone()).await;\n\n\n\n instance\n\n }\n\n\n\n pub async fn on_action(&self, action: &AppAction, app: Arc<AppState>) {\n\n log::debug!(\"action: {:?}\", &action);\n\n\n\n let f = match action {\n\n AppAction::ConnectToServer { version, address } => Some(tokio::spawn(\n\n VersionsState::launch(Arc::clone(&app), version.clone(), Some(address.clone())),\n\n )),\n\n AppAction::InstallVersion(version) => Some(tokio::spawn(VersionsState::install(\n\n Arc::clone(&app),\n", "file_path": "src/states/app.rs", "rank": 45, "score": 29088.18070569847 }, { "content": " version.clone(),\n\n ))),\n\n AppAction::LaunchVersion(version) => Some(tokio::spawn(VersionsState::launch(\n\n Arc::clone(&app),\n\n version.clone(),\n\n None,\n\n ))),\n\n AppAction::AbortVersionInstallation(version) => Some(tokio::spawn(\n\n VersionsState::abort_installation(Arc::clone(&app), version.clone()),\n\n )),\n\n AppAction::UninstallVersion(version) => Some(tokio::spawn(VersionsState::uninstall(\n\n Arc::clone(&app),\n\n version.clone(),\n\n ))),\n\n\n\n _ => None,\n\n };\n\n\n\n if let Some(f) = f {\n\n self.watch_task(f).await;\n", "file_path": "src/states/app.rs", "rank": 46, "score": 29086.247357276196 }, { "content": " let versions = Arc::new(RwLock::new(VersionsState::new(&config).await));\n\n let servers = Arc::new(RwLock::new(ServersState::new(&config).await));\n\n let events = Arc::new(RwLock::new(EventsState::new(&config).await));\n\n\n\n let instance = Arc::new(Self {\n\n commits: Arc::new(RwLock::new(CommitState::new().await)),\n\n versions: versions.clone(),\n\n #[cfg(feature = \"geolocation\")]\n\n locations: locations.clone(),\n\n servers: servers.clone(),\n\n events: events.clone(),\n\n config,\n\n client: reqwest::Client::builder()\n\n .user_agent(USER_AGENT)\n\n .build()\n\n .expect(\"creating client\"),\n\n\n\n help: Mutex::new(HelpState::new()),\n\n\n\n panic_bool,\n", "file_path": "src/states/app.rs", "rank": 47, "score": 29083.63399691083 }, { "content": " events: Arc<RwLock<EventsState>>,\n\n ) {\n\n match task.await {\n\n Err(err) => {\n\n log::warn!(\"join error: {:?}\", &err);\n\n\n\n if err.is_panic() {\n\n log::error!(\"error is panic, setting panic to exit on next tick\");\n\n panic_bool.store(true, Ordering::Relaxed);\n\n }\n\n }\n\n Ok(result) => {\n\n if let Err(err) = result {\n\n events.read().await.error(err).await;\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/states/app.rs", "rank": 48, "score": 29081.702027111347 }, { "content": "fn deserialize_ok_or_default<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: Deserialize<'de> + Default,\n\n D: Deserializer<'de>,\n\n{\n\n let v: Value = Deserialize::deserialize(deserializer)?;\n\n\n\n let deserialized =\n\n T::deserialize(v).map_err(|e| log::error!(\"error deserializing field: {}\", e));\n\n\n\n // this actually uses type default (String -> \"\"), so need to implement Deserialize\n\n // trait for more control over error handling\n\n Ok(deserialized.unwrap_or_default())\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, Hash)]\n\n#[serde(default)]\n\npub struct ServerJson {\n\n #[serde(rename = \"ServerName\")]\n\n #[serde(deserialize_with = \"deserialize_ok_or_default\")]\n", "file_path": "src/datatypes/server.rs", "rank": 49, "score": 23912.317087100742 }, { "content": "use std::io;\n\nuse std::sync::Arc;\n\n\n\nuse tui::{\n\n backend::CrosstermBackend,\n\n layout::{Alignment, Constraint, Direction, Layout, Rect},\n\n style::{Color, Style},\n\n text::Text,\n\n widgets::{Block, Borders, List, ListItem, ListState, Paragraph, Wrap},\n\n Frame,\n\n};\n\n\n\nuse crate::app::AppAction;\n\nuse crate::input::UserInput;\n\nuse crate::states::help::HotKey;\n\nuse crate::states::{AppState, CommitState, StatelessList};\n\nuse crate::views::{Drawable, HotKeys, InputProcessor, Named};\n\n\n\npub struct CommitView {\n\n // TODO:\n", "file_path": "src/views/commits.rs", "rank": 50, "score": 40.622846822701604 }, { "content": "use std::io;\n\nuse std::sync::Arc;\n\n\n\nuse crossterm::event::KeyCode;\n\n\n\nuse tui::backend::CrosstermBackend;\n\nuse tui::layout::Rect;\n\nuse tui::terminal::Frame;\n\n\n\nuse crate::app::AppAction;\n\nuse crate::datatypes::server::Server;\n\nuse crate::input::UserInput;\n\nuse crate::states::help::HotKey;\n\nuse crate::states::{AppState, StatelessList};\n\nuse crate::views::{Drawable, HotKeys, InputProcessor, Named, ViewType};\n\n\n\nuse tui::{\n\n layout::{Alignment, Constraint, Direction, Layout},\n\n style::{Color, Modifier, Style},\n\n symbols::DOT,\n", "file_path": "src/views/servers.rs", "rank": 51, "score": 37.23581160127316 }, { "content": "use crate::states::AppState;\n\nuse crate::views::{AppView, Drawable, HotKeys, InputProcessor, Named};\n\n\n\npub struct World {}\n\n\n\n#[async_trait::async_trait]\n\nimpl InputProcessor for World {\n\n async fn on_input(&mut self, input: &UserInput, _: Arc<AppState>) -> Option<AppAction> {\n\n match input {\n\n UserInput::Char('m' | 'M') | UserInput::Back => Some(AppAction::CloseView),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl AppView for World {}\n\n\n\nimpl Named for World {\n\n fn name(&self) -> String {\n\n \"World Map\".to_owned()\n", "file_path": "src/views/world.rs", "rank": 52, "score": 36.03315149923546 }, { "content": "use std::io;\n\nuse std::sync::Arc;\n\n\n\nuse tui::{\n\n backend::CrosstermBackend,\n\n layout::{Alignment, Rect},\n\n style::{Color, Modifier, Style},\n\n text::Span,\n\n widgets::{Block, Borders},\n\n Frame,\n\n};\n\n\n\nuse crate::states::app::AppState;\n\nuse crate::states::events::AppEvent;\n\nuse crate::views::Drawable;\n\n\n\npub struct EventsView {}\n\n\n\n#[async_trait::async_trait]\n\nimpl Drawable for EventsView {\n", "file_path": "src/views/events.rs", "rank": 53, "score": 33.70538411682873 }, { "content": "use std::io;\n\nuse std::sync::Arc;\n\n\n\nuse crossterm::event::KeyCode;\n\n\n\nuse tui::layout::Rect;\n\nuse tui::{\n\n backend::CrosstermBackend,\n\n layout::{Constraint, Direction, Layout},\n\n style::{Color, Modifier, Style},\n\n symbols::DOT,\n\n text::Spans,\n\n widgets::BorderType,\n\n widgets::{Block, ListState, Tabs},\n\n Frame,\n\n};\n\n\n\nuse futures::stream::{self, StreamExt};\n\n\n\nuse crate::app::AppAction;\n", "file_path": "src/views/tabs.rs", "rank": 54, "score": 31.963254037756272 }, { "content": "use std::io;\n\nuse std::sync::Arc;\n\n\n\nuse bytesize::ByteSize;\n\n\n\nuse crossterm::event::KeyCode;\n\n\n\nuse tui::{\n\n backend::CrosstermBackend,\n\n layout::{Alignment, Constraint, Direction, Layout, Margin, Rect},\n\n style::{Color, Modifier, Style},\n\n text::Span,\n\n widgets::{Block, Borders, Gauge, Row, Table, TableState},\n\n Frame,\n\n};\n\n\n\nuse crate::app::AppAction;\n\nuse crate::datatypes::{\n\n game_version::{DownloadUrl, GameVersion},\n\n installation::InstallationKind,\n", "file_path": "src/views/versions.rs", "rank": 55, "score": 31.001819090895914 }, { "content": " // cannot move this to function because of match limitation for arms\n\n // even if they implement same trait\n\n _ => match self.selected_tab() {\n\n Tab::Servers => self.view_servers.on_input(input, app).await,\n\n Tab::Versions => self.view_versions.on_input(input, app).await,\n\n Tab::Commits => self.view_commits.on_input(input, app).await,\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Drawable for TabView {\n\n async fn draw(\n\n &mut self,\n\n f: &mut Frame<CrosstermBackend<io::Stdout>>,\n\n area: Rect,\n\n app: Arc<AppState>,\n\n ) {\n\n let chunks = Layout::default()\n", "file_path": "src/views/tabs.rs", "rank": 56, "score": 30.63529509796013 }, { "content": "};\n\nuse crate::input::UserInput;\n\nuse crate::states::help::HotKey;\n\nuse crate::states::{AppState, StatelessList};\n\nuse crate::views::{Drawable, HotKeys, InputProcessor, Named};\n\n\n\npub struct VersionView {\n\n state: StatelessList<TableState>,\n\n}\n\n\n\nimpl VersionView {\n\n pub fn new() -> Self {\n\n Self {\n\n state: StatelessList::new(TableState::default(), false),\n\n }\n\n }\n\n}\n\n\n\nimpl Named for VersionView {\n\n fn name(&self) -> String {\n", "file_path": "src/views/versions.rs", "rank": 57, "score": 30.363788114110477 }, { "content": " async fn draw(\n\n &mut self,\n\n f: &mut Frame<CrosstermBackend<io::Stdout>>,\n\n area: Rect,\n\n app: Arc<AppState>,\n\n ) {\n\n if let Some(event) = &app.events.read().await.current_event {\n\n let mut style = Style::default().add_modifier(Modifier::BOLD);\n\n let mut border_style = Style::default();\n\n\n\n match event {\n\n AppEvent::Error(_) => {\n\n style = style.bg(Color::Red);\n\n border_style = border_style.bg(Color::Red);\n\n }\n\n AppEvent::Event(_) => {\n\n style = style.bg(Color::Green).fg(Color::Black);\n\n border_style = border_style.bg(Color::Green).fg(Color::Black);\n\n }\n\n };\n", "file_path": "src/views/events.rs", "rank": 58, "score": 28.774675922927 }, { "content": "use std::io;\n\nuse std::sync::Arc;\n\n\n\nuse tui::layout::Rect;\n\nuse tui::{\n\n backend::CrosstermBackend,\n\n style::Color,\n\n symbols,\n\n widgets::canvas::{Canvas, Line, Map, MapResolution},\n\n widgets::{Block, Borders},\n\n Frame,\n\n};\n\n\n\nuse crossterm::event::KeyCode;\n\n\n\nuse crate::app::AppAction;\n\n\n\nuse crate::datatypes::geolocation::IP;\n\nuse crate::input::UserInput;\n\nuse crate::states::help::HotKey;\n", "file_path": "src/views/world.rs", "rank": 59, "score": 28.65861597652698 }, { "content": " }\n\n}\n\n\n\nimpl HotKeys for CommitView {\n\n fn hotkeys(&self) -> Vec<HotKey> {\n\n self.state.hotkeys()\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl InputProcessor for CommitView {\n\n async fn on_input(&mut self, input: &UserInput, app: Arc<AppState>) -> Option<AppAction> {\n\n self.state.on_input(input, app.commits.read().await.count())\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Drawable for CommitView {\n\n async fn draw(\n\n &mut self,\n", "file_path": "src/views/commits.rs", "rank": 60, "score": 28.57845993937203 }, { "content": "pub mod commits;\n\npub mod events;\n\npub mod help;\n\npub mod servers;\n\npub mod tabs;\n\npub mod versions;\n\n#[cfg(feature = \"geolocation\")]\n\npub mod world;\n\n\n\nuse std::io;\n\nuse std::sync::Arc;\n\n\n\nuse tui::backend::CrosstermBackend;\n\nuse tui::layout::Rect;\n\nuse tui::terminal::Frame;\n\n\n\nuse crate::app::AppAction;\n\nuse crate::input::UserInput;\n\nuse crate::states::help::HotKey;\n\nuse crate::states::AppState;\n", "file_path": "src/views/mod.rs", "rank": 61, "score": 28.091699629250563 }, { "content": " .state\n\n .on_input(input, app.versions.read().await.count()),\n\n }\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Drawable for VersionView {\n\n async fn draw(\n\n &mut self,\n\n f: &mut Frame<CrosstermBackend<io::Stdout>>,\n\n area: Rect,\n\n app: Arc<AppState>,\n\n ) {\n\n let versions = &app.versions.read().await.items;\n\n\n\n let mut total_size = 0;\n\n let mut in_progress = Vec::new();\n\n\n\n let items: Vec<Row> = versions\n", "file_path": "src/views/versions.rs", "rank": 62, "score": 27.38822197095238 }, { "content": "\n\n#[async_trait::async_trait]\n\nimpl Drawable for ServerView {\n\n async fn draw(\n\n &mut self,\n\n f: &mut Frame<CrosstermBackend<io::Stdout>>,\n\n area: Rect,\n\n app: Arc<AppState>,\n\n ) {\n\n let servers = &app.servers.read().await.items;\n\n\n\n let mut count_online = 0;\n\n let mut count_no_players = 0;\n\n let mut count_offline = 0;\n\n\n\n let mut count_players = 0;\n\n\n\n for s in servers {\n\n count_players += s.players;\n\n\n", "file_path": "src/views/servers.rs", "rank": 63, "score": 24.848992982425884 }, { "content": "#[async_trait::async_trait]\n\nimpl Drawable for World {\n\n // TODO: render selected with labels by default, all without labels\n\n // TODO: zoom and map navigation\n\n async fn draw(\n\n &mut self,\n\n f: &mut Frame<CrosstermBackend<io::Stdout>>,\n\n area: Rect,\n\n app: Arc<AppState>,\n\n ) {\n\n let locations = &app.locations.read().await.items;\n\n let servers = &app.servers.read().await.items;\n\n\n\n let map = Canvas::default()\n\n .block(Block::default().borders(Borders::ALL))\n\n .marker(symbols::Marker::Braille)\n\n .x_bounds([-180.0, 180.0])\n\n .y_bounds([-90.0, 90.0])\n\n .paint(|ctx| {\n\n ctx.draw(&Map {\n", "file_path": "src/views/world.rs", "rank": 64, "score": 24.843509155174896 }, { "content": " f: &mut Frame<CrosstermBackend<io::Stdout>>,\n\n area: Rect,\n\n app: Arc<AppState>,\n\n ) {\n\n let chunks = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints([Constraint::Length(60), Constraint::Min(0)])\n\n .split(area);\n\n\n\n let commits = &app.commits.read().await.items;\n\n\n\n let items: Vec<ListItem> = commits\n\n .iter()\n\n .map(|c| ListItem::new(c.title.clone()))\n\n .collect();\n\n\n\n let list = List::new(items)\n\n .block(\n\n Block::default()\n\n .borders(Borders::ALL)\n", "file_path": "src/views/commits.rs", "rank": 65, "score": 23.950326366352716 }, { "content": " }\n\n}\n\n\n\nimpl HotKeys for World {\n\n fn hotkeys(&self) -> Vec<HotKey> {\n\n vec![\n\n HotKey {\n\n description: \"Close map\",\n\n key: KeyCode::Char('m'),\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Close map\",\n\n key: KeyCode::Esc,\n\n modifiers: None,\n\n },\n\n ]\n\n }\n\n}\n\n\n", "file_path": "src/views/world.rs", "rank": 66, "score": 23.948684686067722 }, { "content": " \"Version List\".to_owned()\n\n }\n\n}\n\n\n\nimpl HotKeys for VersionView {\n\n fn hotkeys(&self) -> Vec<HotKey> {\n\n let mut hotkeys = vec![\n\n HotKey {\n\n description: \"Refresh version list\",\n\n key: KeyCode::F(5),\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Install selected version\",\n\n key: KeyCode::Char('i'),\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Run selected version (installs if needed)\",\n\n key: KeyCode::Enter,\n", "file_path": "src/views/versions.rs", "rank": 67, "score": 23.79531809937967 }, { "content": "use crate::states::app::{AppState, TaskResult};\n\n\n\npub struct VersionsState {\n\n pub items: ValueSortedMap<GameVersion, Installation>,\n\n pub install_dir_error: Option<String>,\n\n}\n\n\n\nimpl VersionsState {\n\n pub async fn new(_: &AppConfig) -> Self {\n\n Self {\n\n items: ValueSortedMap::new(),\n\n install_dir_error: None,\n\n }\n\n }\n\n\n\n pub async fn run(&mut self, app: Arc<AppState>) {\n\n self.spawn_installation_finder(app.clone()).await;\n\n }\n\n\n\n pub fn count(&self) -> usize {\n", "file_path": "src/states/versions.rs", "rank": 68, "score": 23.05136084191766 }, { "content": " }\n\n )\n\n }\n\n}\n\n\n\nimpl HotKeys for TabView {\n\n fn hotkeys(&self) -> Vec<HotKey> {\n\n let mut hotkeys = vec![\n\n HotKey {\n\n description: \"Go to next tab\",\n\n key: KeyCode::Tab,\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Go Servers tab\",\n\n key: KeyCode::Char('s'),\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Go versions tab\",\n", "file_path": "src/views/tabs.rs", "rank": 69, "score": 22.609563059852817 }, { "content": " .direction(Direction::Vertical)\n\n .constraints([Constraint::Length(1), Constraint::Min(0)].as_ref())\n\n .split(area);\n\n\n\n let header = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints([Constraint::Min(0)])\n\n .split(chunks[0]);\n\n\n\n let tabs = stream::iter(Tab::all())\n\n .then(|t| {\n\n let cloned_app = Arc::clone(&app);\n\n async move { Spans::from(t.name(cloned_app).await) }\n\n })\n\n .collect()\n\n .await;\n\n\n\n f.render_widget(\n\n Tabs::new(tabs)\n\n .block(Block::default().border_type(BorderType::Plain))\n", "file_path": "src/views/tabs.rs", "rank": 70, "score": 22.188808518637618 }, { "content": " key: KeyCode::Char('v'),\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Go Commits tab\",\n\n key: KeyCode::Char('c'),\n\n modifiers: None,\n\n },\n\n ];\n\n\n\n hotkeys.append(&mut match self.selected_tab() {\n\n Tab::Servers => self.view_servers.hotkeys(),\n\n Tab::Versions => self.view_versions.hotkeys(),\n\n Tab::Commits => self.view_commits.hotkeys(),\n\n });\n\n\n\n hotkeys\n\n }\n\n}\n\n\n", "file_path": "src/views/tabs.rs", "rank": 71, "score": 22.11097667402128 }, { "content": "}\n\n\n\nimpl HotKeys for ServerView {\n\n fn hotkeys(&self) -> Vec<HotKey> {\n\n let mut hotkeys = vec![\n\n #[cfg(feature = \"geolocation\")]\n\n HotKey {\n\n description: \"Show world map\",\n\n key: KeyCode::Char('m'),\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Install game version for selected server\",\n\n key: KeyCode::Char('i'),\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Connect to selected server (install if needed)\",\n\n key: KeyCode::Enter,\n\n modifiers: None,\n", "file_path": "src/views/servers.rs", "rank": 72, "score": 22.074568282138543 }, { "content": " .title(\"latest commits\")\n\n .title_alignment(Alignment::Center),\n\n )\n\n .highlight_style(Style::default().bg(Color::DarkGray));\n\n\n\n f.render_stateful_widget(list, chunks[0], &mut self.state.state);\n\n\n\n if let Some(i) = self.state.state.selected() {\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints([Constraint::Length(3), Constraint::Min(0)])\n\n .split(chunks[1]);\n\n\n\n let selected = &commits[i];\n\n\n\n f.render_widget(\n\n Paragraph::new(Text::from(format!(\"author: {}\", selected.author.name)))\n\n .alignment(Alignment::Left)\n\n .block(Block::default().borders(Borders::ALL).title(\"author\"))\n\n .wrap(Wrap { trim: true }),\n", "file_path": "src/views/commits.rs", "rank": 73, "score": 21.92444210747362 }, { "content": "pub mod app;\n\npub mod commits;\n\npub mod events;\n\npub mod help;\n\n#[cfg(feature = \"geolocation\")]\n\npub mod locations;\n\npub mod servers;\n\npub mod versions;\n\n\n\npub use app::AppState;\n\npub use commits::CommitState;\n\n#[cfg(feature = \"geolocation\")]\n\npub use locations::LocationsState;\n\npub use servers::ServersState;\n\npub use versions::VersionsState;\n\n\n\nuse crossterm::event::KeyCode;\n\n\n\nuse tui::widgets::{ListState, TableState};\n\n\n\nuse crate::app::AppAction;\n\nuse crate::input::UserInput;\n\nuse crate::states::help::HotKey;\n\n\n\n// tui states look same, but do not implement trait, so I made one\n", "file_path": "src/states/mod.rs", "rank": 74, "score": 21.697739956647943 }, { "content": " text::{Span, Spans, Text},\n\n widgets::{Block, Borders, Paragraph, Row, Table, TableState, Wrap},\n\n};\n\n\n\npub struct ServerView {\n\n state: StatelessList<TableState>,\n\n}\n\n\n\nimpl ServerView {\n\n pub fn new() -> Self {\n\n Self {\n\n state: StatelessList::new(TableState::default(), false),\n\n }\n\n }\n\n}\n\n\n\nimpl Named for ServerView {\n\n fn name(&self) -> String {\n\n \"Server List\".to_owned()\n\n }\n", "file_path": "src/views/servers.rs", "rank": 75, "score": 21.66563248533479 }, { "content": " },\n\n ];\n\n\n\n hotkeys.append(&mut self.state.hotkeys());\n\n\n\n hotkeys\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl InputProcessor for ServerView {\n\n async fn on_input(&mut self, input: &UserInput, app: Arc<AppState>) -> Option<AppAction> {\n\n match input {\n\n #[cfg(feature = \"geolocation\")]\n\n UserInput::Char('m' | 'M') => Some(AppAction::OpenView(ViewType::World)),\n\n UserInput::Char('i' | 'I') => {\n\n if let Some(i) = self.state.selected() {\n\n Some(AppAction::InstallVersion(\n\n app.servers.read().await.items[i].version.clone(),\n\n ))\n", "file_path": "src/views/servers.rs", "rank": 76, "score": 21.40054657017017 }, { "content": "\n\nuse crate::input::UserInput;\n\nuse crate::states::help::HotKey;\n\nuse crate::states::{AppState, StatelessList};\n\nuse crate::views::{\n\n commits::CommitView, servers::ServerView, versions::VersionView, AppView, Drawable, HotKeys,\n\n InputProcessor, Named,\n\n};\n\n\n\n#[derive(Copy, Clone)]\n", "file_path": "src/views/tabs.rs", "rank": 77, "score": 21.10714289285527 }, { "content": " );\n\n\n\n let chunks = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)])\n\n .split(area);\n\n\n\n f.render_widget(\n\n Paragraph::new(Text::from(\n\n r#\"\n\n help :\n\n build mode :\n\n data directory :\"#,\n\n ))\n\n .alignment(Alignment::Right)\n\n .wrap(Wrap { trim: true }),\n\n chunks[0],\n\n );\n\n f.render_widget(\n\n Paragraph::new(Text::from(format!(\n", "file_path": "src/views/servers.rs", "rank": 78, "score": 20.950171799283826 }, { "content": " _ => None,\n\n }\n\n }\n\n\n\n pub fn hotkeys(&self) -> Vec<HotKey> {\n\n vec![\n\n HotKey {\n\n description: \"Go up (scrollwheel support)\",\n\n key: KeyCode::Up,\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Go down (scrollwheel support)\",\n\n key: KeyCode::Down,\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Unselect\",\n\n key: KeyCode::Esc,\n\n modifiers: None,\n", "file_path": "src/states/mod.rs", "rank": 79, "score": 20.877619961951176 }, { "content": "#[async_trait::async_trait]\n\nimpl InputProcessor for TabView {\n\n async fn on_input(&mut self, input: &UserInput, app: Arc<AppState>) -> Option<AppAction> {\n\n match input {\n\n UserInput::Char('s' | 'S') => {\n\n self.select_tab(Tab::Servers);\n\n None\n\n }\n\n UserInput::Char('v' | 'V') => {\n\n self.select_tab(Tab::Versions);\n\n None\n\n }\n\n UserInput::Char('c' | 'C') => {\n\n self.select_tab(Tab::Commits);\n\n None\n\n }\n\n UserInput::Tab => {\n\n self.state.select_next(Tab::tab_count());\n\n None\n\n }\n", "file_path": "src/views/tabs.rs", "rank": 80, "score": 20.286722232323697 }, { "content": " ByteSize::b(*progress).to_string()\n\n }\n\n _ => \"0\".to_owned(),\n\n },\n\n ])\n\n })\n\n .collect();\n\n\n\n let mut constraints = vec![Constraint::Min(0)];\n\n\n\n if !in_progress.is_empty() {\n\n constraints.push(Constraint::Length(2 + in_progress.len() as u16));\n\n }\n\n\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints(constraints.clone())\n\n .split(area);\n\n\n\n let table = Table::new(items)\n", "file_path": "src/views/versions.rs", "rank": 81, "score": 19.526704711759855 }, { "content": "impl ServersState {\n\n pub async fn new(config: &AppConfig) -> Self {\n\n Self {\n\n items: Vec::new(),\n\n update_interval: Duration::from_secs(config.update_interval),\n\n }\n\n }\n\n\n\n pub async fn run(&mut self, app: Arc<AppState>) {\n\n #[cfg(debug_assertions)]\n\n {\n\n let ip = IP::Remote(\"8.8.8.8\".to_owned());\n\n let version = GameVersion {\n\n fork: \"evil-exploit\".to_owned(),\n\n build: 666.to_string(),\n\n download: DownloadUrl::new(\"http://evil.exploit\"),\n\n };\n\n\n\n self.items.push(Server {\n\n name: \"TEST SERVER PLEASE IGNORE\".to_owned(),\n", "file_path": "src/states/servers.rs", "rank": 82, "score": 19.40230624268779 }, { "content": " }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl InputProcessor for VersionView {\n\n async fn on_input(&mut self, input: &UserInput, app: Arc<AppState>) -> Option<AppAction> {\n\n match input {\n\n UserInput::Refresh => {\n\n let mut versions = app.versions.write().await;\n\n\n\n versions.refresh(app.clone()).await;\n\n\n\n if let Some(i) = self.state.selected() {\n\n if i >= versions.count() {\n\n self.state.unselect();\n\n }\n\n }\n", "file_path": "src/views/versions.rs", "rank": 83, "score": 19.228096531931207 }, { "content": " self.state.select(None);\n\n } else {\n\n self.state.select(Some(item_count - 1));\n\n }\n\n }\n\n\n\n pub fn select_index(&mut self, index: usize) {\n\n self.state.select(Some(index))\n\n }\n\n\n\n pub fn unselect(&mut self) {\n\n self.state.select(None);\n\n }\n\n\n\n pub fn selected(&self) -> Option<usize> {\n\n self.state.selected()\n\n }\n\n\n\n pub fn on_input(&mut self, input: &UserInput, item_count: usize) -> Option<AppAction> {\n\n match input {\n", "file_path": "src/states/mod.rs", "rank": 84, "score": 18.98733436595014 }, { "content": " if s.offline {\n\n count_offline += 1;\n\n } else if s.players == 0 {\n\n count_no_players += 1;\n\n } else {\n\n count_online += 1;\n\n }\n\n }\n\n\n\n let chunks = Layout::default()\n\n .constraints([Constraint::Min(0), Constraint::Length(5)])\n\n .direction(Direction::Vertical)\n\n .split(area);\n\n\n\n // \"BUILD\".len()\n\n let mut longest_build_name = 5;\n\n let mut longest_map_name = 3;\n\n\n\n let rows: Vec<Row> = servers\n\n .iter()\n", "file_path": "src/views/servers.rs", "rank": 85, "score": 18.44680570705882 }, { "content": " chunks[0],\n\n );\n\n f.render_widget(\n\n Paragraph::new(Text::from(selected.message.clone()))\n\n .alignment(Alignment::Left)\n\n .block(Block::default().borders(Borders::ALL).title(\"info\"))\n\n .wrap(Wrap { trim: true }),\n\n chunks[1],\n\n );\n\n } else {\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints([Constraint::Percentage(50), Constraint::Length(1)])\n\n .split(chunks[1]);\n\n f.render_widget(\n\n Paragraph::new(\"select commit\").alignment(Alignment::Center),\n\n chunks[1],\n\n );\n\n }\n\n\n", "file_path": "src/views/commits.rs", "rank": 86, "score": 18.1889798649483 }, { "content": " s.version.build.clone(),\n\n s.map.clone(),\n\n s.players.to_string(),\n\n ])\n\n .style(style)\n\n })\n\n .collect();\n\n\n\n let pop_header = format!(\"POP [{}]\", count_players);\n\n\n\n let widths = [\n\n Constraint::Percentage(60),\n\n Constraint::Length(longest_build_name as u16),\n\n // until https://github.com/fdehau/tui-rs/issues/525 is fixed\n\n Constraint::Length(longest_map_name as u16),\n\n Constraint::Length(pop_header.len() as u16),\n\n ];\n\n\n\n let table = Table::new(rows)\n\n .header(\n", "file_path": "src/views/servers.rs", "rank": 87, "score": 18.120946468590223 }, { "content": " let removed = if let Some((removed_key, removed_value)) = self.map.remove_entry(&k) {\n\n // remove entry from set using dumb cmp to key\n\n self.set.retain(|i| i != &removed_key);\n\n Some(removed_value)\n\n } else {\n\n None\n\n };\n\n\n\n self.map.insert(k, v.clone());\n\n self.set.insert(v);\n\n\n\n removed\n\n }\n\n\n\n pub fn get(&self, k: &K) -> Option<&V> {\n\n self.map.get(k)\n\n }\n\n\n\n pub fn retain<F>(&mut self, mut f: F)\n\n where\n", "file_path": "src/datatypes/value_sorted_map.rs", "rank": 88, "score": 17.988034698720252 }, { "content": "use std::fmt;\n\nuse std::sync::Arc;\n\nuse std::time::{Duration, Instant};\n\n\n\nuse tokio::sync::{mpsc, Mutex};\n\nuse tokio::time::sleep;\n\n\n\nuse crate::config::AppConfig;\n\n\n\nuse crate::states::app::{AppState, TaskResult};\n\n\n\n#[derive(Debug)]\n\npub enum AppEvent {\n\n Event(String),\n\n Error(anyhow::Error),\n\n}\n\n\n\nimpl fmt::Display for AppEvent {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n", "file_path": "src/states/events.rs", "rank": 89, "score": 17.974790356960277 }, { "content": "use std::sync::Arc;\n\n\n\nuse anyhow::Context;\n\n\n\nuse crate::constants::GITHUB_REPO_COMMIT_ENDPOINT_URL;\n\nuse crate::datatypes::commit::{Commit, CommitsJson};\n\nuse crate::states::app::{AppState, TaskResult};\n\n\n\npub struct CommitState {\n\n pub items: Vec<Commit>,\n\n}\n\n\n\nimpl CommitState {\n\n pub async fn new() -> Self {\n\n Self { items: Vec::new() }\n\n }\n\n\n\n pub fn count(&self) -> usize {\n\n self.items.len()\n\n }\n", "file_path": "src/states/commits.rs", "rank": 90, "score": 17.480117110399043 }, { "content": " )\n\n .widths(&widths)\n\n .highlight_style(\n\n Style::default()\n\n .bg(Color::DarkGray)\n\n .add_modifier(Modifier::BOLD),\n\n );\n\n\n\n // draw server info\n\n if let Some(selected) = self.state.selected().map(|s| &servers[s]) {\n\n draw_server_info(f, chunks[1], Arc::clone(&app), selected).await;\n\n } else {\n\n draw_info(f, chunks[1], Arc::clone(&app));\n\n }\n\n\n\n f.render_stateful_widget(table, chunks[0], &mut self.state.state);\n\n }\n\n}\n\n\n\nasync fn draw_server_info(\n", "file_path": "src/views/servers.rs", "rank": 91, "score": 17.257737048322785 }, { "content": " fn select(&mut self, index: Option<usize>) {\n\n TableState::select(self, index)\n\n }\n\n}\n\n\n\n// state compatible with both table and list\n\npub struct StatelessList<T: TuiState> {\n\n pub state: T,\n\n looped: bool,\n\n}\n\n\n\nimpl<T: TuiState> StatelessList<T> {\n\n pub fn new(state: T, looped: bool) -> Self {\n\n Self { state, looped }\n\n }\n\n\n\n pub fn select_next(&mut self, item_count: usize) {\n\n if item_count == 0 {\n\n self.state.select(None);\n\n } else {\n", "file_path": "src/states/mod.rs", "rank": 92, "score": 16.71645909471692 }, { "content": " Self::Event(text) => write!(f, \"{}\", text),\n\n Self::Error(err) => write!(f, \"{:#}\", err),\n\n }\n\n }\n\n}\n\n\n\npub struct EventsState {\n\n pub current_event: Option<AppEvent>,\n\n events: mpsc::UnboundedSender<AppEvent>,\n\n events_recv: Option<mpsc::UnboundedReceiver<AppEvent>>,\n\n}\n\n\n\nimpl EventsState {\n\n pub async fn new(_: &AppConfig) -> Self {\n\n let (events, events_recv) = mpsc::unbounded_channel();\n\n\n\n Self {\n\n current_event: None,\n\n events,\n\n events_recv: Some(events_recv),\n", "file_path": "src/states/events.rs", "rank": 93, "score": 16.457495207341907 }, { "content": "\n\n app.watch_task(tokio::task::spawn(Self::location_fetch_task(\n\n app.clone(),\n\n queue_recv,\n\n )))\n\n .await;\n\n }\n\n\n\n pub async fn resolve(&mut self, ip: &IP) {\n\n if self.items.get(ip).is_some() {\n\n return;\n\n }\n\n\n\n self.queue.send(ip.to_owned()).expect(\"closed channel\");\n\n }\n\n\n\n async fn location_fetch_task(\n\n app: Arc<AppState>,\n\n mut rx: mpsc::UnboundedReceiver<IP>,\n\n ) -> TaskResult {\n", "file_path": "src/states/locations.rs", "rank": 94, "score": 16.26445378337592 }, { "content": "use std::collections::HashMap;\n\nuse std::convert::TryFrom;\n\nuse std::sync::Arc;\n\n\n\nuse tokio::sync::mpsc;\n\n\n\nuse anyhow::{Context, Error};\n\n\n\nuse crate::config::AppConfig;\n\nuse crate::datatypes::geolocation::{Location, LocationJson, IP};\n\nuse crate::states::app::{AppState, TaskResult};\n\n\n\npub struct LocationsState {\n\n pub items: HashMap<IP, Location>,\n\n queue: mpsc::UnboundedSender<IP>,\n\n queue_recv: Option<mpsc::UnboundedReceiver<IP>>,\n\n}\n\n\n\nimpl LocationsState {\n\n pub async fn new(_: &AppConfig) -> Self {\n", "file_path": "src/states/locations.rs", "rank": 95, "score": 15.830860358065781 }, { "content": " },\n\n HotKey {\n\n description: \"Go to top\",\n\n key: KeyCode::Home,\n\n modifiers: None,\n\n },\n\n HotKey {\n\n description: \"Go to bottom\",\n\n key: KeyCode::End,\n\n modifiers: None,\n\n },\n\n ]\n\n }\n\n}\n", "file_path": "src/states/mod.rs", "rank": 96, "score": 15.827362534098562 }, { "content": " // - on 1st launch: fetch N latest commits, save latest hash\n\n // - on 2nd launch: read latest hash and fetch newer commits\n\n loaded: bool,\n\n\n\n state: StatelessList<ListState>,\n\n}\n\n\n\nimpl CommitView {\n\n pub fn new() -> Self {\n\n Self {\n\n loaded: false,\n\n state: StatelessList::new(ListState::default(), false),\n\n }\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Named for CommitView {\n\n fn name(&self) -> String {\n\n \"Recent Commit List\".to_owned()\n", "file_path": "src/views/commits.rs", "rank": 97, "score": 15.790181779583985 }, { "content": " .highlight_style(\n\n Style::default()\n\n .fg(Color::Blue)\n\n .add_modifier(Modifier::BOLD),\n\n )\n\n .divider(DOT)\n\n .select(self.state.selected().unwrap_or_default()),\n\n header[0],\n\n );\n\n\n\n // cannot move this to function because of match limitation for arms\n\n // even if they implement same trait\n\n match self.selected_tab() {\n\n Tab::Servers => self.view_servers.draw(f, chunks[1], app).await,\n\n Tab::Versions => self.view_versions.draw(f, chunks[1], app).await,\n\n Tab::Commits => self.view_commits.draw(f, chunks[1], app).await,\n\n };\n\n }\n\n}\n", "file_path": "src/views/tabs.rs", "rank": 98, "score": 15.723625700571354 }, { "content": " );\n\n\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints(progress_bars_constraints)\n\n .split(chunks[1]);\n\n\n\n for (i, progress_item) in in_progress.iter().enumerate() {\n\n let label = progress_item.label();\n\n let ratio = progress_item.ratio().unwrap_or(1.0);\n\n\n\n let gauge = Gauge::default().ratio(ratio).label(label).gauge_style(\n\n Style::default()\n\n .fg(Color::Green)\n\n .bg(Color::Red)\n\n .add_modifier(Modifier::BOLD),\n\n );\n\n\n\n // + 1 offset for upper border\n\n f.render_widget(\n", "file_path": "src/views/versions.rs", "rank": 99, "score": 15.699838886556961 } ]
Rust
puffin_http/src/client.rs
MarijnS95/puffin
71a4d7c97a63dac8dff42c573af3684559b952c6
use std::sync::{ atomic::{AtomicBool, Ordering::SeqCst}, Arc, Mutex, }; use puffin::{FrameData, FrameView}; pub struct Client { addr: String, connected: Arc<AtomicBool>, alive: Arc<AtomicBool>, frame_view: Arc<Mutex<FrameView>>, } impl Drop for Client { fn drop(&mut self) { self.alive.store(false, SeqCst); } } impl Client { pub fn new(addr: String) -> Self { let alive = Arc::new(AtomicBool::new(true)); let connected = Arc::new(AtomicBool::new(false)); let frame_view = Arc::new(Mutex::new(FrameView::default())); let client = Self { addr: addr.clone(), connected: connected.clone(), alive: alive.clone(), frame_view: frame_view.clone(), }; std::thread::spawn(move || { log::info!("Connecting to {}…", addr); while alive.load(SeqCst) { match std::net::TcpStream::connect(&addr) { Ok(mut stream) => { log::info!("Connected to {}", addr); connected.store(true, SeqCst); while alive.load(SeqCst) { match consume_message(&mut stream) { Ok(frame_data) => { frame_view .lock() .unwrap() .add_frame(std::sync::Arc::new(frame_data)); } Err(err) => { log::warn!( "Connection to puffin server closed: {}", error_display_chain(err.as_ref()) ); connected.store(false, SeqCst); break; } } } } Err(err) => { log::debug!("Failed to connect to {}: {}", addr, err); std::thread::sleep(std::time::Duration::from_secs(1)); } } } }); client } pub fn addr(&self) -> &str { &self.addr } pub fn connected(&self) -> bool { self.connected.load(SeqCst) } pub fn frame_view(&self) -> std::sync::MutexGuard<'_, FrameView> { self.frame_view.lock().unwrap() } } pub fn consume_message(stream: &mut impl std::io::Read) -> anyhow::Result<puffin::FrameData> { let mut server_version = [0_u8; 2]; stream.read_exact(&mut server_version)?; let server_version = u16::from_le_bytes(server_version); match server_version.cmp(&crate::PROTOCOL_VERSION) { std::cmp::Ordering::Less => { anyhow::bail!( "puffin server is using an older protocol version ({}) than the client ({}).", server_version, crate::PROTOCOL_VERSION ); } std::cmp::Ordering::Equal => {} std::cmp::Ordering::Greater => { anyhow::bail!( "puffin server is using a newer protocol version ({}) than the client ({}). Update puffin_viewer with 'cargo install puffin_viewer'.", server_version, crate::PROTOCOL_VERSION ); } } use anyhow::Context as _; FrameData::read_next(stream) .context("Failed to parse FrameData")? .ok_or_else(|| anyhow::format_err!("End of stream")) } fn error_display_chain(error: &dyn std::error::Error) -> String { let mut s = error.to_string(); if let Some(source) = error.source() { s.push_str(" -> "); s.push_str(&error_display_chain(source)); } s }
use std::sync::{ atomic::{AtomicBool, Ordering::SeqCst}, Arc, Mutex, }; use puffin::{FrameData, FrameView}; pub struct Client { addr: String, connected: Arc<AtomicBool>, alive: Arc<AtomicBool>, frame_view: Arc<Mutex<FrameView>>, } impl Drop for Client { fn drop(&mut self) { self.alive.store(false, SeqCst); } } impl Client { pub fn new(addr: String) -> Self { let alive = Arc::new(AtomicBool::new(true)); let connected = Arc::new(AtomicBool::new(false)); let frame_view = Arc::new(Mutex::new(FrameView::default())); let client = Self { addr: addr.clone(), connected: connected.clone(), alive: alive.clone(), frame_view: frame_view.clone(), }; std::thread::spawn(move || { log::info!("Connecting to {}…", addr); while alive.load(SeqCst) { match std::net::TcpStream::connect(&addr) { Ok(mut stream) => { log::info!("Connected to {}", addr); connected.store(true, SeqCst); while alive.load(SeqCst) { match consume_message(&mut stream) { Ok(frame_data) => { frame_view .lock() .unwrap() .add_frame(std::sync::Arc::new(frame_data)); } Err(err) => { log::warn!( "Connection to puffin server closed: {}", error_display_chain(err.as_ref()) ); connected.store(false, SeqCst); break; } } } } Err(err) => { log::debug!("Failed to connect to {}: {}", addr, err); std::thread::sleep(std::time::Duration::from_secs(1)); } } } }); client } pub fn addr(&self) -> &str { &self.addr } pub fn connected(&self) -> bool { self.connected.load(SeqCst) } pub fn frame_view(&self) -> std::sync::MutexGuard<'_, FrameView> { self.frame_view.lock().unwrap() } } pub fn consume_message(stream: &mut impl std::io::Read) -> anyhow::Result<puffin::FrameData> { let mut server_version = [0_u8; 2]; stream.read_exact(&mut server_version)?; let server_version = u16::from_le_bytes(server_version);
use anyhow::Context as _; FrameData::read_next(stream) .context("Failed to parse FrameData")? .ok_or_else(|| anyhow::format_err!("End of stream")) } fn error_display_chain(error: &dyn std::error::Error) -> String { let mut s = error.to_string(); if let Some(source) = error.source() { s.push_str(" -> "); s.push_str(&error_display_chain(source)); } s }
match server_version.cmp(&crate::PROTOCOL_VERSION) { std::cmp::Ordering::Less => { anyhow::bail!( "puffin server is using an older protocol version ({}) than the client ({}).", server_version, crate::PROTOCOL_VERSION ); } std::cmp::Ordering::Equal => {} std::cmp::Ordering::Greater => { anyhow::bail!( "puffin server is using a newer protocol version ({}) than the client ({}). Update puffin_viewer with 'cargo install puffin_viewer'.", server_version, crate::PROTOCOL_VERSION ); } }
if_condition
[ { "content": "/// Are the profiler scope macros turned on?\n\n/// This is [`false`] by default.\n\npub fn are_scopes_on() -> bool {\n\n MACROS_ON.load(Ordering::Relaxed)\n\n}\n\n\n\n/// All times are expressed as integer nanoseconds since some event.\n\npub type NanoSecond = i64;\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n/// Stream of profiling events from one thread.\n\n#[derive(Clone, Default)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Deserialize, serde::Serialize))]\n\npub struct Stream(Vec<u8>);\n\n\n\nimpl Stream {\n\n pub fn is_empty(&self) -> bool {\n\n self.0.is_empty()\n\n }\n\n\n\n pub fn len(&self) -> usize {\n", "file_path": "puffin/src/lib.rs", "rank": 1, "score": 153834.63908109858 }, { "content": "struct Client {\n\n client_addr: SocketAddr,\n\n packet_tx: Option<crossbeam_channel::Sender<Packet>>,\n\n join_handle: Option<std::thread::JoinHandle<()>>,\n\n}\n\n\n\nimpl Drop for Client {\n\n fn drop(&mut self) {\n\n // Take care to send everything before we shut down!\n\n\n\n // Drop the sender to signal to shut down:\n\n self.packet_tx = None;\n\n\n\n // Wait for the shutdown:\n\n if let Some(join_handle) = self.join_handle.take() {\n\n join_handle.join().ok();\n\n }\n\n }\n\n}\n\n\n", "file_path": "puffin_http/src/server.rs", "rank": 2, "score": 152923.92241122315 }, { "content": "#[doc(hidden)]\n\n#[inline]\n\npub fn short_file_name(name: &str) -> &str {\n\n // TODO: \"foo/bar/src/lib.rs\" -> \"bar/src/lib.rs\"\n\n\n\n if let Some(separator) = name.rfind(&['/', '\\\\'][..]) {\n\n // \"foo/bar/baz.rs\" -> \"baz.rs\"\n\n &name[separator + 1..]\n\n } else {\n\n name\n\n }\n\n}\n\n\n", "file_path": "puffin/src/lib.rs", "rank": 3, "score": 147670.01806759293 }, { "content": "#[doc(hidden)]\n\n#[inline]\n\npub fn clean_function_name(name: &str) -> &str {\n\n if let Some(colon) = name.rfind(\"::\") {\n\n if let Some(colon) = name[..colon].rfind(\"::\") {\n\n // \"foo::bar::baz::function_name\" -> \"baz::function_name\"\n\n &name[colon + 2..]\n\n } else {\n\n // \"foo::function_name\" -> \"foo::function_name\"\n\n name\n\n }\n\n } else {\n\n name\n\n }\n\n}\n\n\n", "file_path": "puffin/src/lib.rs", "rank": 4, "score": 147670.01806759293 }, { "content": "/// Turn on/off the profiler macros ([`profile_function`], [`profile_scope`] etc).\n\n/// When off, these calls take only 1-2 ns to call (100x faster).\n\n/// This is [`false`] by default.\n\npub fn set_scopes_on(on: bool) {\n\n MACROS_ON.store(on, Ordering::Relaxed);\n\n}\n\n\n", "file_path": "puffin/src/lib.rs", "rank": 5, "score": 143906.19337188342 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n puffin::set_scopes_on(true);\n\n puffin::profile_scope!(\"keep one scope open so we don't profile sending scopes\");\n\n\n\n c.bench_function(\"profile_function\", |b| {\n\n b.iter(|| {\n\n puffin::profile_function!();\n\n })\n\n });\n\n c.bench_function(\"profile_function_data\", |b| {\n\n b.iter(|| {\n\n puffin::profile_function!(\"my_mesh.obj\");\n\n })\n\n });\n\n c.bench_function(\"profile_scope\", |b| {\n\n b.iter(|| {\n\n puffin::profile_scope!(\"my longish scope name\");\n\n })\n\n });\n\n c.bench_function(\"profile_scope_data\", |b| {\n", "file_path": "puffin/benches/benchmark.rs", "rank": 6, "score": 135173.78004759108 }, { "content": "pub fn init(title: &str) -> System {\n\n let title = match Path::new(&title).file_name() {\n\n Some(file_name) => file_name.to_str().unwrap(),\n\n None => title,\n\n };\n\n let event_loop = EventLoop::new();\n\n let context = glutin::ContextBuilder::new().with_vsync(true);\n\n let builder = WindowBuilder::new()\n\n .with_title(title.to_owned())\n\n .with_inner_size(glutin::dpi::LogicalSize::new(1024f64, 768f64));\n\n let display =\n\n Display::new(builder, context, &event_loop).expect(\"Failed to initialize display\");\n\n\n\n let mut imgui = Context::create();\n\n imgui.set_ini_filename(None);\n\n\n\n let mut platform = WinitPlatform::init(&mut imgui);\n\n {\n\n let gl_window = display.gl_window();\n\n let window = gl_window.window();\n", "file_path": "puffin-imgui/examples/imgui.rs", "rank": 7, "score": 132115.35649997412 }, { "content": "/// Listens for incoming connections\n\n/// and streams them puffin profiler data.\n\nstruct PuffinServerImpl {\n\n tcp_listener: TcpListener,\n\n clients: Vec<Client>,\n\n num_clients: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl PuffinServerImpl {\n\n fn accept_new_clients(&mut self) -> anyhow::Result<()> {\n\n loop {\n\n match self.tcp_listener.accept() {\n\n Ok((tcp_stream, client_addr)) => {\n\n tcp_stream\n\n .set_nonblocking(false)\n\n .context(\"stream.set_nonblocking\")?;\n\n\n\n log::info!(\"{} connected\", client_addr);\n\n\n\n let (packet_tx, packet_rx) = crossbeam_channel::bounded(MAX_FRAMES_IN_QUEUE);\n\n\n\n let join_handle = std::thread::Builder::new()\n", "file_path": "puffin_http/src/server.rs", "rank": 8, "score": 129003.16009362601 }, { "content": "pub fn ui(ui: &mut egui::Ui, frames: &[std::sync::Arc<UnpackedFrameData>]) {\n\n let mut threads = std::collections::HashSet::<&ThreadInfo>::new();\n\n let mut stats = Stats::default();\n\n\n\n for frame in frames {\n\n threads.extend(frame.thread_streams.keys());\n\n for (thread_info, stream) in &frame.thread_streams {\n\n collect_stream(&mut stats, &thread_info.name, &stream.stream).ok();\n\n }\n\n }\n\n\n\n let mut total_bytes = 0;\n\n let mut total_ns = 0;\n\n for scope in stats.scopes.values() {\n\n total_bytes += scope.bytes;\n\n total_ns += scope.total_self_ns;\n\n }\n\n\n\n ui.label(\"This view can be used to find scopes that use up a lot of bandwidth, and should maybe be removed.\");\n\n\n", "file_path": "puffin_egui/src/stats.rs", "rank": 9, "score": 127327.03964991293 }, { "content": "/// Show an [`egui::Window`] with the profiler contents.\n\n///\n\n/// If you want to control the window yourself, use [`profiler_ui`] instead.\n\n///\n\n/// Returns `false` if the user closed the profile window.\n\npub fn profiler_window(ctx: &egui::Context) -> bool {\n\n puffin::profile_function!();\n\n let mut open = true;\n\n egui::Window::new(\"Profiler\")\n\n .default_size([1024.0, 600.0])\n\n .open(&mut open)\n\n .show(ctx, profiler_ui);\n\n open\n\n}\n\n\n\nstatic PROFILE_UI: once_cell::sync::Lazy<Mutex<GlobalProfilerUi>> =\n\n once_cell::sync::Lazy::new(Default::default);\n\n\n", "file_path": "puffin_egui/src/lib.rs", "rank": 10, "score": 122316.45431555298 }, { "content": "/// Show the profiler.\n\n///\n\n/// Call this from within an [`egui::Window`], or use [`profiler_window`] instead.\n\npub fn profiler_ui(ui: &mut egui::Ui) {\n\n let mut profile_ui = PROFILE_UI.lock().unwrap();\n\n profile_ui.ui(ui);\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n/// Show [`puffin::GlobalProfiler`], i.e. profile the app we are running in.\n\n#[derive(Default)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Deserialize, serde::Serialize))]\n\n#[cfg_attr(feature = \"serde\", serde(default))]\n\npub struct GlobalProfilerUi {\n\n #[cfg_attr(feature = \"serde\", serde(skip))]\n\n global_frame_view: GlobalFrameView,\n\n\n\n pub profiler_ui: ProfilerUi,\n\n}\n\n\n\nimpl GlobalProfilerUi {\n\n /// Show an [`egui::Window`] with the profiler contents.\n", "file_path": "puffin_egui/src/lib.rs", "rank": 11, "score": 122086.64994934895 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn type_name_of<T>(_: T) -> &'static str {\n\n std::any::type_name::<T>()\n\n}\n\n\n\n/// Returns the name of the calling function without a long module path prefix.\n\n#[macro_export]\n\nmacro_rules! current_function_name {\n\n () => {{\n\n fn f() {}\n\n let name = $crate::type_name_of(f);\n\n // Remove \"::f\" from the name:\n\n let name = &name.get(..name.len() - 3).unwrap();\n\n $crate::clean_function_name(name)\n\n }};\n\n}\n\n\n", "file_path": "puffin/src/lib.rs", "rank": 12, "score": 121484.58784660773 }, { "content": "fn client_loop(\n\n packet_rx: crossbeam_channel::Receiver<Packet>,\n\n client_addr: SocketAddr,\n\n mut tcp_stream: TcpStream,\n\n) {\n\n while let Ok(packet) = packet_rx.recv() {\n\n if let Err(err) = tcp_stream.write_all(&packet) {\n\n log::info!(\n\n \"puffin server failed sending to {}: {} (kind: {:?})\",\n\n client_addr,\n\n err,\n\n err.kind()\n\n );\n\n break;\n\n }\n\n }\n\n}\n", "file_path": "puffin_http/src/server.rs", "rank": 13, "score": 118020.73531627592 }, { "content": "/// Select the slowest frames, up to a certain count.\n\npub fn select_slowest(frames: &[Arc<FrameData>], max: usize) -> Vec<Arc<FrameData>> {\n\n let mut slowest: std::collections::BinaryHeap<OrderedByDuration> = Default::default();\n\n for frame in frames {\n\n slowest.push(OrderedByDuration(frame.clone()));\n\n while slowest.len() > max {\n\n slowest.pop();\n\n }\n\n }\n\n let mut slowest: Vec<_> = slowest.drain().map(|x| x.0).collect();\n\n slowest.sort_by_key(|frame| frame.frame_index());\n\n slowest\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "puffin/src/profile_view.rs", "rank": 14, "score": 114926.37519066995 }, { "content": "/// Show the flamegraph.\n\npub fn ui(ui: &mut egui::Ui, options: &mut Options, frames: &SelectedFrames) {\n\n puffin::profile_function!();\n\n let mut reset_view = false;\n\n\n\n let num_frames = frames.frames.len();\n\n\n\n {\n\n // reset view if number of selected frames changes (and we are viewing all of them):\n\n let num_frames_id = ui.id().with(\"num_frames\");\n\n let num_frames_last_frame = ui\n\n .memory()\n\n .data\n\n .get_temp::<usize>(num_frames_id)\n\n .unwrap_or_default();\n\n\n\n if num_frames_last_frame != num_frames && !options.merge_scopes {\n\n reset_view = true;\n\n }\n\n ui.memory().data.insert_temp(num_frames_id, num_frames);\n\n }\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 15, "score": 114383.02950828942 }, { "content": "fn max_frames_ui(ui: &mut egui::Ui, frame_view: &mut FrameView) {\n\n let uniq = frame_view.all_uniq();\n\n\n\n let mut bytes = 0;\n\n for frame in &uniq {\n\n bytes += frame.bytes_of_ram_used();\n\n }\n\n\n\n let frames_per_second = if let (Some(first), Some(last)) = (uniq.first(), uniq.last()) {\n\n let nanos = last.range_ns().1 - first.range_ns().0;\n\n let seconds = nanos as f64 * 1e-9;\n\n let frames = last.frame_index() - first.frame_index() + 1;\n\n frames as f64 / seconds\n\n } else {\n\n 60.0\n\n };\n\n\n\n ui.horizontal(|ui| {\n\n ui.label(\"Max recent frames to store:\");\n\n\n", "file_path": "puffin_egui/src/lib.rs", "rank": 16, "score": 113966.0464149853 }, { "content": "fn should_quit() -> bool {\n\n if cfg!(target_os = \"macos\") {\n\n (is_key_down(KeyCode::LeftSuper) || is_key_down(KeyCode::RightSuper))\n\n && is_key_down(KeyCode::Q)\n\n } else {\n\n (is_key_down(KeyCode::LeftAlt) || is_key_down(KeyCode::RightAlt))\n\n && is_key_down(KeyCode::F4)\n\n }\n\n}\n\n\n", "file_path": "puffin_egui/examples/macroquad.rs", "rank": 17, "score": 108108.68485539142 }, { "content": "/// Report a stream of profile data from a thread to the [`GlobalProfiler`] singleton.\n\npub fn global_reporter(info: ThreadInfo, stream_info: &StreamInfoRef<'_>) {\n\n GlobalProfiler::lock().report(info, stream_info);\n\n}\n\n\n\n/// Collects profiling data for one thread\n\npub struct ThreadProfiler {\n\n stream_info: StreamInfo,\n\n /// Current depth.\n\n depth: usize,\n\n now_ns: NsSource,\n\n reporter: ThreadReporter,\n\n start_time_ns: Option<NanoSecond>,\n\n}\n\n\n\nimpl Default for ThreadProfiler {\n\n fn default() -> Self {\n\n Self {\n\n stream_info: Default::default(),\n\n depth: 0,\n\n now_ns: crate::now_ns,\n", "file_path": "puffin/src/lib.rs", "rank": 18, "score": 107437.65877955993 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\n#[allow(clippy::unused_unit)]\n\n#[wasm_bindgen]\n\npub fn start(canvas_id: &str) -> Result<(), eframe::wasm_bindgen::JsValue> {\n\n puffin::set_scopes_on(true); // quiet warning in `puffin_egui`.\n\n eframe::start_web(\n\n canvas_id,\n\n Box::new(|_cc| Box::new(PuffinViewer::new(Source::None))),\n\n )\n\n}\n", "file_path": "puffin_viewer/src/lib.rs", "rank": 19, "score": 106317.36790866638 }, { "content": "/// For the given thread, merge all scopes with the same id path.\n\npub fn merge_scopes_for_thread<'s>(\n\n frames: &'s [std::sync::Arc<UnpackedFrameData>],\n\n thread_info: &ThreadInfo,\n\n) -> Result<Vec<MergeScope<'s>>> {\n\n let mut top_nodes: BTreeMap<&'s str, MergeNode<'s>> = Default::default();\n\n\n\n for frame in frames {\n\n if let Some(stream_info) = frame.thread_streams.get(thread_info) {\n\n let offset_ns = frame.meta.range_ns.0 - frames[0].meta.range_ns.0; // make everything relative to first frame\n\n\n\n let top_scopes = Reader::from_start(&stream_info.stream).read_top_scopes()?;\n\n for scope in top_scopes {\n\n top_nodes.entry(scope.record.id).or_default().add(\n\n &stream_info.stream,\n\n MergePiece {\n\n relative_start_ns: scope.record.start_ns - offset_ns,\n\n scope,\n\n },\n\n )?;\n\n }\n\n }\n\n }\n\n\n\n Ok(build(top_nodes, frames.len() as _))\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "puffin/src/merge.rs", "rank": 20, "score": 99905.49583897594 }, { "content": "#[derive(Clone)]\n\nstruct OrderedByDuration(Arc<FrameData>);\n\n\n\nimpl PartialEq for OrderedByDuration {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.0.duration_ns().eq(&other.0.duration_ns())\n\n }\n\n}\n\nimpl Eq for OrderedByDuration {}\n\n\n\nimpl PartialOrd for OrderedByDuration {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl Ord for OrderedByDuration {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n self.0.duration_ns().cmp(&other.0.duration_ns()).reverse()\n\n }\n\n}\n", "file_path": "puffin/src/profile_view.rs", "rank": 22, "score": 98803.04953224203 }, { "content": "#[inline]\n\npub fn now_ns() -> NanoSecond {\n\n // This can maybe be optimized\n\n use once_cell::sync::Lazy;\n\n use std::time::Instant;\n\n\n\n fn epoch_offset_and_start() -> (NanoSecond, Instant) {\n\n if let Ok(duration_since_epoch) = std::time::UNIX_EPOCH.elapsed() {\n\n let nanos_since_epoch = duration_since_epoch.as_nanos() as NanoSecond;\n\n (nanos_since_epoch, Instant::now())\n\n } else {\n\n // system time is set before 1970. this should be quite rare.\n\n (0, Instant::now())\n\n }\n\n }\n\n\n\n static START_TIME: Lazy<(NanoSecond, Instant)> = Lazy::new(epoch_offset_and_start);\n\n START_TIME.0 + START_TIME.1.elapsed().as_nanos() as NanoSecond\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n", "file_path": "puffin/src/lib.rs", "rank": 23, "score": 97569.78384196621 }, { "content": "fn main() {\n\n simple_logger::SimpleLogger::new()\n\n .with_level(log::LevelFilter::Info)\n\n .without_timestamps()\n\n .init()\n\n .ok();\n\n\n\n let server_addr = format!(\"0.0.0.0:{}\", puffin_http::DEFAULT_PORT);\n\n eprintln!(\"Serving demo profile data on {}\", server_addr);\n\n\n\n let _puffin_server = puffin_http::Server::new(&server_addr).unwrap();\n\n\n\n puffin::set_scopes_on(true);\n\n\n\n let mut frame_counter = 0;\n\n\n\n loop {\n\n puffin::profile_scope!(\"main_loop\", format!(\"frame {}\", frame_counter));\n\n puffin::GlobalProfiler::lock().new_frame();\n\n\n", "file_path": "puffin_http/examples/server.rs", "rank": 24, "score": 90278.26823158935 }, { "content": "fn longest_valid_utf8_prefix(data: &[u8]) -> &str {\n\n match std::str::from_utf8(data) {\n\n Ok(s) => s,\n\n Err(error) => {\n\n // The string may be been truncated to fit a max length of 255.\n\n // This truncation may have happened in the middle of a unicode character.\n\n std::str::from_utf8(&data[..error.valid_up_to()]).expect(\"We can trust valid_up_to\")\n\n }\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n/// Read each top-level sibling scopes\n\nimpl<'s> Iterator for Reader<'s> {\n\n type Item = Result<Scope<'s>>;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.parse_scope().transpose()\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "puffin/src/data.rs", "rank": 25, "score": 89746.16881581556 }, { "content": "#[test]\n\nfn test_maybe_mut_ref() {\n\n {\n\n let x = 42;\n\n let mut x_ref = MaybeMutRef::Ref(&x);\n\n assert_eq!(x_ref.as_mut(), None);\n\n assert_eq!(*x_ref, 42);\n\n }\n\n {\n\n let mut x = 42;\n\n let mut x_ref = MaybeMutRef::MutRef(&mut x);\n\n assert_eq!(*x_ref, 42);\n\n *x_ref.as_mut().unwrap() = 1337;\n\n assert_eq!(*x_ref, 1337);\n\n }\n\n}\n", "file_path": "puffin_egui/src/maybe_mut_ref.rs", "rank": 26, "score": 88641.2630275586 }, { "content": "fn grid_text(grid_ns: NanoSecond) -> String {\n\n let grid_ms = to_ms(grid_ns);\n\n if grid_ns % 1_000_000 == 0 {\n\n format!(\"{:.0} ms\", grid_ms)\n\n } else if grid_ns % 100_000 == 0 {\n\n format!(\"{:.1} ms\", grid_ms)\n\n } else if grid_ns % 10_000 == 0 {\n\n format!(\"{:.2} ms\", grid_ms)\n\n } else {\n\n format!(\"{:.3} ms\", grid_ms)\n\n }\n\n}\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 27, "score": 87979.32413356031 }, { "content": "fn grid_text(grid_ns: NanoSecond) -> String {\n\n let grid_ms = to_ms(grid_ns);\n\n if grid_ns % 1_000_000 == 0 {\n\n format!(\"{:.0} ms\", grid_ms)\n\n } else if grid_ns % 100_000 == 0 {\n\n format!(\"{:.1} ms\", grid_ms)\n\n } else if grid_ns % 10_000 == 0 {\n\n format!(\"{:.2} ms\", grid_ms)\n\n } else {\n\n format!(\"{:.3} ms\", grid_ms)\n\n }\n\n}\n\n\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 28, "score": 87979.32413356031 }, { "content": "fn format_time(nanos: NanoSecond) -> Option<String> {\n\n let years_since_epoch = nanos / 1_000_000_000 / 60 / 60 / 24 / 365;\n\n if 50 <= years_since_epoch && years_since_epoch <= 150 {\n\n use chrono::TimeZone as _;\n\n let datetime = chrono::Utc.timestamp(nanos / 1_000_000_000, (nanos % 1_000_000_000) as _);\n\n Some(datetime.format(\"%Y-%m-%d %H:%M:%S%.3f UTC\").to_string())\n\n } else {\n\n None // `nanos` is likely not counting from epoch.\n\n }\n\n}\n\n\n", "file_path": "puffin_egui/src/lib.rs", "rank": 29, "score": 84676.97927106665 }, { "content": "fn collect_stream<'s>(\n\n stats: &mut Stats<'s>,\n\n thread_name: &str,\n\n stream: &'s puffin::Stream,\n\n) -> puffin::Result<()> {\n\n for scope in puffin::Reader::from_start(stream) {\n\n collect_scope(stats, thread_name, stream, &scope?)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "puffin_egui/src/stats.rs", "rank": 30, "score": 84551.39581213945 }, { "content": "type Packet = Arc<[u8]>;\n\n\n", "file_path": "puffin_http/src/server.rs", "rank": 31, "score": 82804.95330737189 }, { "content": "fn sleep_ms(ms: usize) {\n\n puffin::profile_function!();\n\n match ms {\n\n 0 => {}\n\n 1 => std::thread::sleep(std::time::Duration::from_millis(1)),\n\n _ => {\n\n sleep_ms(ms / 2);\n\n sleep_ms(ms - (ms / 2));\n\n }\n\n }\n\n}\n", "file_path": "puffin_http/examples/server.rs", "rank": 32, "score": 77126.2977074873 }, { "content": "fn frames_info_ui(ui: &mut egui::Ui, selection: &SelectedFrames) {\n\n let mut sum_ns = 0;\n\n let mut sum_scopes = 0;\n\n\n\n for frame in &selection.frames {\n\n let (min_ns, max_ns) = frame.range_ns();\n\n sum_ns += max_ns - min_ns;\n\n\n\n sum_scopes += frame.meta.num_scopes;\n\n }\n\n\n\n let frame_indices = if selection.frames.len() == 1 {\n\n format!(\"frame #{}\", selection.frames[0].frame_index())\n\n } else if selection.frames.len() as u64\n\n == selection.frames.last().frame_index() - selection.frames.first().frame_index() + 1\n\n {\n\n format!(\n\n \"{} frames (#{} - #{})\",\n\n selection.frames.len(),\n\n selection.frames.first().frame_index(),\n", "file_path": "puffin_egui/src/lib.rs", "rank": 33, "score": 74500.55203764718 }, { "content": "#[cfg(feature = \"packing\")]\n\n#[cfg(target_arch = \"wasm32\")]\n\n#[cfg(feature = \"ruzstd\")]\n\nfn decode_zstd(mut bytes: &[u8]) -> anyhow::Result<Vec<u8>> {\n\n use anyhow::Context as _;\n\n use std::io::Read as _;\n\n let mut decoded = Vec::new();\n\n let mut decoder = ruzstd::StreamingDecoder::new(&mut bytes)\n\n .map_err(|err| anyhow::format_err!(\"zstd decompress: {}\", err))?;\n\n decoder\n\n .read_to_end(&mut decoded)\n\n .context(\"zstd decompress\")?;\n\n Ok(decoded)\n\n}\n\n\n\n#[cfg(feature = \"packing\")]\n\n#[cfg(all(not(feature = \"zstd\"), not(feature = \"ruzstd\")))]\n\ncompile_error!(\"Either feature zstd or ruzstd must be enabled\");\n", "file_path": "puffin/src/frame_data.rs", "rank": 34, "score": 74076.2495210253 }, { "content": "fn interact_with_canvas(options: &mut Options, response: &Response, info: &Info) {\n\n if response.drag_delta().x != 0.0 {\n\n options.sideways_pan_in_points += response.drag_delta().x;\n\n options.zoom_to_relative_ns_range = None;\n\n }\n\n\n\n if response.hovered() {\n\n // Sideways pan with e.g. a touch pad:\n\n if info.ctx.input().scroll_delta.x != 0.0 {\n\n options.sideways_pan_in_points += info.ctx.input().scroll_delta.x;\n\n options.zoom_to_relative_ns_range = None;\n\n }\n\n\n\n let mut zoom_factor = info.ctx.input().zoom_delta_2d().x;\n\n\n\n if response.dragged_by(PointerButton::Secondary) {\n\n zoom_factor *= (response.drag_delta().y * 0.01).exp();\n\n }\n\n\n\n if zoom_factor != 1.0 {\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 35, "score": 74076.2495210253 }, { "content": "type ThreadReporter = fn(ThreadInfo, &StreamInfoRef<'_>);\n\n\n", "file_path": "puffin/src/lib.rs", "rank": 36, "score": 67745.1777712293 }, { "content": "fn merge_scope_tooltip(ui: &mut egui::Ui, merge: &MergeScope<'_>, num_frames: usize) {\n\n #![allow(clippy::collapsible_else_if)]\n\n\n\n ui.monospace(format!(\"id: {}\", merge.id));\n\n if !merge.location.is_empty() {\n\n ui.monospace(format!(\"location: {}\", merge.location));\n\n }\n\n if !merge.data.is_empty() {\n\n ui.monospace(format!(\"data: {}\", merge.data));\n\n }\n\n ui.add_space(8.0);\n\n\n\n if num_frames <= 1 {\n\n if merge.num_pieces <= 1 {\n\n ui.monospace(format!(\n\n \"duration: {:7.3} ms\",\n\n to_ms(merge.duration_per_frame_ns)\n\n ));\n\n } else {\n\n ui.monospace(format!(\"sum of {} scopes\", merge.num_pieces));\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 37, "score": 65296.278501795845 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct Filter {\n\n filter: String,\n\n}\n\n\n\nimpl Filter {\n\n fn ui(&mut self, ui: &imgui::Ui<'_>) {\n\n ui.text(\"Scope filter:\");\n\n ui.same_line();\n\n ui.input_text(\"##scopefilter\", &mut self.filter).build();\n\n self.filter = self.filter.to_lowercase();\n\n ui.same_line();\n\n if ui.button(\"X\") {\n\n self.filter.clear();\n\n }\n\n }\n\n\n\n /// if true, show everything\n\n fn is_empty(&self) -> bool {\n\n self.filter.is_empty()\n\n }\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 38, "score": 62184.294153378185 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct Filter {\n\n filter: String,\n\n}\n\n\n\nimpl Filter {\n\n fn ui(&mut self, ui: &mut egui::Ui) {\n\n ui.horizontal(|ui| {\n\n ui.label(\"Scope filter:\");\n\n ui.text_edit_singleline(&mut self.filter);\n\n self.filter = self.filter.to_lowercase();\n\n if ui.button(\"x\").clicked() {\n\n self.filter.clear();\n\n }\n\n });\n\n }\n\n\n\n /// if true, show everything\n\n fn is_empty(&self) -> bool {\n\n self.filter.is_empty()\n\n }\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 39, "score": 62184.294153378185 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct Vec2 {\n\n pub x: f32,\n\n pub y: f32,\n\n}\n\n\n\nimpl Vec2 {\n\n pub fn new(x: f32, y: f32) -> Self {\n\n Self { x, y }\n\n }\n\n\n\n pub fn min(self, other: Self) -> Self {\n\n Self {\n\n x: self.x.min(other.x),\n\n y: self.y.min(other.y),\n\n }\n\n }\n\n\n\n pub fn max(self, other: Self) -> Self {\n\n Self {\n\n x: self.x.max(other.x),\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 40, "score": 62184.294153378185 }, { "content": "/// Context for painting a frame.\n\nstruct Info {\n\n ctx: egui::Context,\n\n /// Bounding box of canvas in points:\n\n canvas: Rect,\n\n /// Interaction with the profiler canvas\n\n response: Response,\n\n painter: egui::Painter,\n\n text_height: f32,\n\n /// Time of first event\n\n start_ns: NanoSecond,\n\n /// Time of last event\n\n stop_ns: NanoSecond,\n\n /// How many frames we are viewing\n\n num_frames: usize,\n\n\n\n font_id: FontId,\n\n}\n\n\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 41, "score": 62184.294153378185 }, { "content": "fn build<'s>(nodes: BTreeMap<&'s str, MergeNode<'s>>, num_frames: i64) -> Vec<MergeScope<'s>> {\n\n let mut scopes: Vec<_> = nodes\n\n .into_values()\n\n .map(|node| node.build(num_frames))\n\n .collect();\n\n\n\n // Earliest first:\n\n scopes.sort_by_key(|scope| scope.relative_start_ns);\n\n\n\n // Make sure sibling scopes do not overlap:\n\n let mut relative_ns = 0;\n\n for scope in &mut scopes {\n\n scope.relative_start_ns = scope.relative_start_ns.max(relative_ns);\n\n relative_ns = scope.relative_start_ns + scope.duration_per_frame_ns;\n\n }\n\n\n\n scopes\n\n}\n\n\n", "file_path": "puffin/src/merge.rs", "rank": 42, "score": 61913.778582568026 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct ScopeStats {\n\n count: usize,\n\n bytes: usize,\n\n /// Time covered by all scopes, minus those covered by child scopes.\n\n /// A lot of time == useful scope.\n\n total_self_ns: NanoSecond,\n\n /// Time covered by the slowest scope, minus those covered by child scopes.\n\n /// A lot of time == useful scope.\n\n max_ns: NanoSecond,\n\n}\n\n\n", "file_path": "puffin_egui/src/stats.rs", "rank": 43, "score": 60713.55066363978 }, { "content": "#[derive(Default)]\n\nstruct Stats<'s> {\n\n scopes: std::collections::HashMap<Key<'s>, ScopeStats>,\n\n}\n\n\n", "file_path": "puffin_egui/src/stats.rs", "rank": 44, "score": 58698.51487155137 }, { "content": "#[derive(Default)]\n\nstruct MergeNode<'s> {\n\n /// These are the raw scopes that got merged into us.\n\n /// All these scopes have the same `id`.\n\n pieces: Vec<MergePiece<'s>>,\n\n\n\n /// indexed by their id\n\n children: BTreeMap<&'s str, MergeNode<'s>>,\n\n}\n\n\n", "file_path": "puffin/src/merge.rs", "rank": 45, "score": 58698.51487155137 }, { "content": "/// Context for painting a frame.\n\nstruct Info<'a> {\n\n // Bounding box of canvas in pixels:\n\n canvas_min: Vec2,\n\n canvas_max: Vec2,\n\n\n\n mouse_pos: Vec2,\n\n\n\n ui: &'a Ui<'a>,\n\n draw_list: &'a DrawListMut<'a>,\n\n font_size: f32,\n\n\n\n /// Time of first event\n\n start_ns: NanoSecond,\n\n}\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 46, "score": 58698.51487155137 }, { "content": "#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct Key<'s> {\n\n id: &'s str,\n\n location: &'s str,\n\n thread_name: String,\n\n}\n\n\n", "file_path": "puffin_egui/src/stats.rs", "rank": 47, "score": 58698.51487155137 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\nstruct MergePiece<'s> {\n\n /// The start of the scope relative to its *parent* [`Scope`].\n\n pub relative_start_ns: NanoSecond,\n\n /// The raw scope, just like it is found in the input stream\n\n pub scope: Scope<'s>,\n\n}\n\n\n\n/// A scope that has been merged from many different sources\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct MergeScope<'s> {\n\n /// Relative to parent.\n\n pub relative_start_ns: NanoSecond,\n\n /// Sum of all durations over all frames\n\n pub total_duration_ns: NanoSecond,\n\n /// [`Self::total_duration_ns`] divided by number of frames.\n\n pub duration_per_frame_ns: NanoSecond,\n\n /// The slowest individual piece.\n\n pub max_duration_ns: NanoSecond,\n\n /// Number of pieces that got merged together to us.\n\n pub num_pieces: usize,\n", "file_path": "puffin/src/merge.rs", "rank": 48, "score": 58698.51487155137 }, { "content": "fn main() {\n\n puffin::set_scopes_on(true); // Remember to call this, or puffin will be disabled!\n\n\n\n let native_options = Default::default();\n\n eframe::run_native(\n\n \"puffin egui eframe\",\n\n native_options,\n\n Box::new(|_cc| Box::new(ExampleApp::default())),\n\n );\n\n}\n\n\n\n#[derive(Default)]\n\npub struct ExampleApp {\n\n frame_counter: u64,\n\n}\n\n\n\nimpl eframe::App for ExampleApp {\n\n fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {\n\n puffin::profile_function!();\n\n puffin::GlobalProfiler::lock().new_frame(); // call once per frame!\n", "file_path": "puffin_egui/examples/eframe.rs", "rank": 49, "score": 58553.122435113386 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\nfn main() {\n\n /// puffin profile viewer.\n\n ///\n\n /// Can either connect remotely to a puffin server\n\n /// or open a .puffin recording file.\n\n #[derive(argh::FromArgs)]\n\n struct Arguments {\n\n /// which server to connect to, e.g. `127.0.0.1:8585`.\n\n #[argh(option, default = \"default_url()\")]\n\n url: String,\n\n\n\n /// what .puffin file to open, e.g. `my/recording.puffin`.\n\n #[argh(positional)]\n\n file: Option<String>,\n\n }\n\n\n\n fn default_url() -> String {\n\n format!(\"127.0.0.1:{}\", puffin_http::DEFAULT_PORT)\n\n }\n\n\n", "file_path": "puffin_viewer/src/main.rs", "rank": 50, "score": 58553.122435113386 }, { "content": "#[test]\n\nfn test_merge() {\n\n use crate::*;\n\n\n\n let stream = {\n\n let mut stream = Stream::default();\n\n\n\n for i in 0..2 {\n\n let ns = 1000 * i;\n\n let a = stream.begin_scope(ns + 100, \"a\", \"\", \"\");\n\n stream.end_scope(a, ns + 200);\n\n\n\n let b = stream.begin_scope(ns + 200, \"b\", \"\", \"\");\n\n\n\n let ba = stream.begin_scope(ns + 400, \"ba\", \"\", \"\");\n\n stream.end_scope(ba, ns + 600);\n\n\n\n let bb = stream.begin_scope(ns + 600, \"bb\", \"\", \"\");\n\n let bba = stream.begin_scope(ns + 600, \"bba\", \"\", \"\");\n\n stream.end_scope(bba, ns + 700);\n\n stream.end_scope(bb, ns + 800);\n", "file_path": "puffin/src/merge.rs", "rank": 51, "score": 58553.122435113386 }, { "content": "fn main() {\n\n puffin::set_scopes_on(true); // Remember to call this, or puffin will be disabled!\n\n let mut profiler_ui = puffin_imgui::ProfilerUi::default();\n\n let mut frame_counter = 0;\n\n\n\n let system = init(file!());\n\n system.main_loop(move |_run, ui| {\n\n puffin::profile_function!();\n\n puffin::GlobalProfiler::lock().new_frame(); // call once per frame!\n\n\n\n profiler_ui.window(ui);\n\n\n\n // Give us something to inspect:\n\n\n\n std::thread::Builder::new()\n\n .name(\"Other thread\".to_owned())\n\n .spawn(|| {\n\n sleep_ms(5);\n\n })\n\n .unwrap();\n", "file_path": "puffin-imgui/examples/imgui.rs", "rank": 52, "score": 58553.122435113386 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\nfn main() {}\n", "file_path": "puffin_viewer/src/main.rs", "rank": 53, "score": 58553.122435113386 }, { "content": "fn scope_byte_size(scope: &puffin::Scope<'_>) -> usize {\n\n 1 + // `(` sentinel\n\n 8 + // start time\n\n 1 + scope.record.id.len() + //\n\n 1 + scope.record.location.len() + //\n\n 1 + scope.record.data.len() + //\n\n 8 + // scope size\n\n 1 + // `)` sentinel\n\n 8 // stop time\n\n}\n", "file_path": "puffin_egui/src/stats.rs", "rank": 54, "score": 57860.73515116746 }, { "content": "#[derive(Clone, Copy, Eq, PartialEq)]\n\nstruct ScopeSize(u64);\n\n\n\nimpl ScopeSize {\n\n /// Special value to indicate that this profile scope was never closed\n\n pub fn unfinished() -> Self {\n\n Self(u64::MAX)\n\n }\n\n}\n\n\n\n/// Errors that can happen when parsing a [`Stream`] of profile data.\n\n#[derive(Debug)]\n\npub enum Error {\n\n PrematureEnd,\n\n InvalidStream,\n\n ScopeNeverEnded,\n\n InvalidOffset,\n\n Empty,\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n", "file_path": "puffin/src/data.rs", "rank": 55, "score": 57227.771381812956 }, { "content": "fn paint_record(\n\n info: &Info,\n\n options: &mut Options,\n\n prefix: &str,\n\n suffix: &str,\n\n record: &Record<'_>,\n\n top_y: f32,\n\n) -> PaintResult {\n\n let start_x = info.point_from_ns(options, record.start_ns);\n\n let stop_x = info.point_from_ns(options, record.stop_ns());\n\n if info.canvas.max.x < start_x\n\n || stop_x < info.canvas.min.x\n\n || stop_x - start_x < options.cull_width\n\n {\n\n return PaintResult::Culled;\n\n }\n\n\n\n let bottom_y = top_y + options.rect_height;\n\n\n\n let rect = Rect::from_min_max(pos2(start_x, top_y), pos2(stop_x, bottom_y));\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 56, "score": 57119.38598061139 }, { "content": "fn paint_record(\n\n info: &Info<'_>,\n\n options: &mut Options,\n\n prefix: &str,\n\n record: &Record<'_>,\n\n top_y: f32,\n\n) -> PaintResult {\n\n let mut start_x = info.pixel_from_ns(options, record.start_ns);\n\n let mut stop_x = info.pixel_from_ns(options, record.stop_ns());\n\n if info.canvas_max.x < start_x\n\n || stop_x < info.canvas_min.x\n\n || stop_x - start_x < options.cull_width\n\n {\n\n return PaintResult::Culled;\n\n }\n\n\n\n let mut min_width = options.min_width;\n\n\n\n let bottom_y = top_y + options.rect_height;\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 57, "score": 57119.38598061139 }, { "content": "fn paint_timeline(\n\n info: &Info,\n\n canvas: Rect,\n\n options: &Options,\n\n start_ns: NanoSecond,\n\n) -> Vec<egui::Shape> {\n\n let mut shapes = vec![];\n\n\n\n if options.canvas_width_ns <= 0.0 {\n\n return shapes;\n\n }\n\n\n\n let alpha_multiplier = if options.filter.is_empty() { 0.3 } else { 0.1 };\n\n\n\n // We show all measurements relative to start_ns\n\n\n\n let max_lines = canvas.width() / 4.0;\n\n let mut grid_spacing_ns = 1_000;\n\n while options.canvas_width_ns / (grid_spacing_ns as f32) > max_lines {\n\n grid_spacing_ns *= 10;\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 58, "score": 57119.38598061139 }, { "content": "fn paint_scope(\n\n info: &Info<'_>,\n\n options: &mut Options,\n\n stream: &Stream,\n\n scope: &Scope<'_>,\n\n depth: usize,\n\n min_y: f32,\n\n) -> Result<PaintResult> {\n\n let top_y = min_y + (depth as f32) * (options.rect_height + options.spacing);\n\n\n\n let result = paint_record(info, options, \"\", &scope.record, top_y);\n\n\n\n if result != PaintResult::Culled {\n\n let mut num_children = 0;\n\n for child_scope in Reader::with_offset(stream, scope.child_begin_position)? {\n\n paint_scope(info, options, stream, &child_scope?, depth + 1, min_y)?;\n\n num_children += 1;\n\n }\n\n\n\n if result == PaintResult::Hovered {\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 59, "score": 57119.38598061139 }, { "content": "#[test]\n\nfn test_profile_data() {\n\n let stream = {\n\n let mut stream = Stream::default();\n\n let t0 = stream.begin_scope(100, \"top\", \"top.rs\", \"data_top\");\n\n let m1 = stream.begin_scope(200, \"middle_0\", \"middle.rs\", \"data_middle_0\");\n\n stream.end_scope(m1, 300);\n\n let m1 = stream.begin_scope(300, \"middle_1\", \"middle.rs:42\", \"data_middle_1\");\n\n stream.end_scope(m1, 400);\n\n stream.end_scope(t0, 400);\n\n stream\n\n };\n\n\n\n let top_scopes = Reader::from_start(&stream).read_top_scopes().unwrap();\n\n assert_eq!(top_scopes.len(), 1);\n\n let middle_scopes = Reader::with_offset(&stream, top_scopes[0].child_begin_position)\n\n .unwrap()\n\n .read_top_scopes()\n\n .unwrap();\n\n\n\n assert_eq!(\n", "file_path": "puffin/src/data.rs", "rank": 60, "score": 57119.38598061139 }, { "content": "fn paint_scope(\n\n info: &Info,\n\n options: &mut Options,\n\n stream: &Stream,\n\n scope: &Scope<'_>,\n\n depth: usize,\n\n min_y: f32,\n\n) -> Result<PaintResult> {\n\n let top_y = min_y + (depth as f32) * (options.rect_height + options.spacing);\n\n\n\n let result = paint_record(info, options, \"\", \"\", &scope.record, top_y);\n\n\n\n if result != PaintResult::Culled {\n\n let mut num_children = 0;\n\n for child_scope in Reader::with_offset(stream, scope.child_begin_position)? {\n\n paint_scope(info, options, stream, &child_scope?, depth + 1, min_y)?;\n\n num_children += 1;\n\n }\n\n\n\n if result == PaintResult::Hovered {\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 61, "score": 57119.38598061139 }, { "content": "fn ui_canvas(\n\n options: &mut Options,\n\n info: &Info,\n\n frames: &SelectedFrames,\n\n (min_ns, max_ns): (NanoSecond, NanoSecond),\n\n) -> f32 {\n\n puffin::profile_function!();\n\n\n\n if options.canvas_width_ns <= 0.0 {\n\n options.canvas_width_ns = (max_ns - min_ns) as f32;\n\n options.zoom_to_relative_ns_range = None;\n\n }\n\n\n\n // We paint the threads top-down\n\n let mut cursor_y = info.canvas.top();\n\n cursor_y += info.text_height; // Leave room for time labels\n\n\n\n let threads = frames.threads.keys().cloned().collect();\n\n let threads = options.sorting.sort(threads);\n\n\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 62, "score": 57119.38598061139 }, { "content": "#[test]\n\nfn test_clean_function_name() {\n\n assert_eq!(clean_function_name(\"\"), \"\");\n\n assert_eq!(clean_function_name(\"foo\"), \"foo\");\n\n assert_eq!(clean_function_name(\"foo::bar\"), \"foo::bar\");\n\n assert_eq!(clean_function_name(\"foo::bar::baz\"), \"bar::baz\");\n\n}\n\n\n\n/// Returns a shortened path to the current file.\n\n#[macro_export]\n\nmacro_rules! current_file_name {\n\n () => {\n\n $crate::short_file_name(file!())\n\n };\n\n}\n\n\n\n/// Removes long path prefix to focus on the last parts of the path (and the file name).\n", "file_path": "puffin/src/lib.rs", "rank": 63, "score": 55799.84486245818 }, { "content": "#[test]\n\nfn test_short_file_name() {\n\n assert_eq!(short_file_name(\"\"), \"\");\n\n assert_eq!(short_file_name(\"foo.rs\"), \"foo.rs\");\n\n assert_eq!(short_file_name(\"foo/bar.rs\"), \"bar.rs\");\n\n assert_eq!(short_file_name(\"foo/bar/baz.rs\"), \"baz.rs\");\n\n assert_eq!(short_file_name(r\"C:\\\\windows\\is\\weird\\src.rs\"), \"src.rs\");\n\n}\n\n\n\n#[allow(clippy::doc_markdown)] // clippy wants to put \"MacBook\" in ticks 🙄\n\n/// Automatically name the profiling scope based on function name.\n\n///\n\n/// Names should be descriptive, ASCII and without spaces.\n\n///\n\n/// Example:\n\n/// ```\n\n/// # struct Image {};\n\n/// fn load_image(path: &str) -> Image {\n\n/// puffin::profile_function!();\n\n/// /* … */\n\n/// # let image = Image {};\n", "file_path": "puffin/src/lib.rs", "rank": 64, "score": 55799.84486245818 }, { "content": "fn paint_merge_scope(\n\n info: &Info<'_>,\n\n options: &mut Options,\n\n ns_offset: NanoSecond,\n\n merge: &MergeScope<'_>,\n\n depth: usize,\n\n min_y: f32,\n\n) -> Result<PaintResult> {\n\n let top_y = min_y + (depth as f32) * (options.rect_height + options.spacing);\n\n\n\n let prefix = if merge.num_pieces <= 1 {\n\n String::default()\n\n } else {\n\n format!(\"{}x \", merge.num_pieces)\n\n };\n\n\n\n let record = Record {\n\n start_ns: ns_offset + merge.relative_start_ns,\n\n duration_ns: merge.duration_per_frame_ns,\n\n id: &merge.id,\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 65, "score": 55799.84486245818 }, { "content": "fn paint_merge_scope(\n\n info: &Info,\n\n options: &mut Options,\n\n ns_offset: NanoSecond,\n\n merge: &MergeScope<'_>,\n\n depth: usize,\n\n min_y: f32,\n\n) -> Result<PaintResult> {\n\n let top_y = min_y + (depth as f32) * (options.rect_height + options.spacing);\n\n\n\n let prefix = if info.num_frames <= 1 {\n\n if merge.num_pieces <= 1 {\n\n String::default()\n\n } else {\n\n format!(\"{}x \", merge.num_pieces)\n\n }\n\n } else {\n\n let is_integral = merge.num_pieces % info.num_frames == 0;\n\n if is_integral {\n\n format!(\"{}x \", merge.num_pieces / info.num_frames)\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 66, "score": 55799.84486245818 }, { "content": "/// Current time in seconds\n\nfn now() -> f64 {\n\n std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .unwrap_or_default()\n\n .as_secs_f64()\n\n}\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 67, "score": 53990.857527546745 }, { "content": "fn collect_scope<'s>(\n\n stats: &mut Stats<'s>,\n\n thread_name: &str,\n\n stream: &'s puffin::Stream,\n\n scope: &puffin::Scope<'s>,\n\n) -> puffin::Result<()> {\n\n let mut ns_used_by_children = 0;\n\n for child_scope in Reader::with_offset(stream, scope.child_begin_position)? {\n\n let child_scope = &child_scope?;\n\n collect_scope(stats, thread_name, stream, child_scope)?;\n\n ns_used_by_children += child_scope.record.duration_ns;\n\n }\n\n\n\n let self_time = scope.record.duration_ns.saturating_sub(ns_used_by_children);\n\n\n\n let key = Key {\n\n id: scope.record.id,\n\n location: scope.record.location,\n\n thread_name: thread_name.to_owned(),\n\n };\n\n let scope_stats = stats.scopes.entry(key).or_default();\n\n scope_stats.count += 1;\n\n scope_stats.bytes += scope_byte_size(scope);\n\n scope_stats.total_self_ns += self_time;\n\n scope_stats.max_ns = scope_stats.max_ns.max(self_time);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "puffin_egui/src/stats.rs", "rank": 68, "score": 53990.857527546745 }, { "content": "fn window_conf() -> Conf {\n\n Conf {\n\n window_title: \"puffin_egui with macroquad\".to_owned(),\n\n window_width: 1200,\n\n window_height: 800,\n\n high_dpi: true,\n\n ..Default::default()\n\n }\n\n}\n\n\n\n#[macroquad::main(window_conf)]\n\nasync fn main() {\n\n puffin::set_scopes_on(true); // Remember to call this, or puffin will be disabled!\n\n\n\n let mut frame_counter = 0;\n\n\n\n while !should_quit() {\n\n puffin::profile_scope!(\"main_loop\");\n\n\n\n puffin::GlobalProfiler::lock().new_frame(); // call once per frame!\n", "file_path": "puffin_egui/examples/macroquad.rs", "rank": 69, "score": 52671.316409393534 }, { "content": "fn sleep_ms(ms: usize) {\n\n puffin::profile_function!();\n\n match ms {\n\n 0 => {}\n\n 1 => std::thread::sleep(std::time::Duration::from_millis(1)),\n\n _ => {\n\n sleep_ms(ms / 2);\n\n sleep_ms(ms - (ms / 2));\n\n }\n\n }\n\n}\n", "file_path": "puffin-imgui/examples/imgui.rs", "rank": 70, "score": 49046.28952877561 }, { "content": "fn sleep_ms(ms: usize) {\n\n puffin::profile_function!();\n\n match ms {\n\n 0 => {}\n\n 1 => std::thread::sleep(std::time::Duration::from_millis(1)),\n\n _ => {\n\n sleep_ms(ms / 2);\n\n sleep_ms(ms - (ms / 2));\n\n }\n\n }\n\n}\n", "file_path": "puffin_egui/examples/eframe.rs", "rank": 71, "score": 49046.28952877561 }, { "content": "fn sleep_ms(ms: usize) {\n\n puffin::profile_function!();\n\n match ms {\n\n 0 => {}\n\n 1 => std::thread::sleep(std::time::Duration::from_millis(1)),\n\n _ => {\n\n sleep_ms(ms / 2);\n\n sleep_ms(ms - (ms / 2));\n\n }\n\n }\n\n}\n", "file_path": "puffin_egui/examples/macroquad.rs", "rank": 72, "score": 49046.28952877561 }, { "content": "type NsSource = fn() -> NanoSecond;\n", "file_path": "puffin/src/lib.rs", "rank": 73, "score": 49046.28952877561 }, { "content": "fn max_memory_controls(ui: &Ui<'_>, frames: &Frames, frame_view: &GlobalFrameView) {\n\n let uniq = frames.all_uniq();\n\n\n\n let mut bytes = 0;\n\n let mut unpacked = 0;\n\n for frame in &uniq {\n\n bytes += frame.bytes_of_ram_used();\n\n unpacked += frame.has_unpacked() as usize;\n\n }\n\n ui.text(format!(\n\n \"{} frames ({} unpacked) using approximately {:.1} MB.\",\n\n uniq.len(),\n\n unpacked,\n\n bytes as f64 * 1e-6\n\n ));\n\n\n\n let frames_per_second = if let (Some(first), Some(last)) = (uniq.first(), uniq.last()) {\n\n let nanos = last.range_ns().1 - first.range_ns().0;\n\n let seconds = nanos as f64 * 1e-9;\n\n let frames = last.frame_index() - first.frame_index() + 1;\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 74, "score": 48371.04900449826 }, { "content": "fn paint_thread_info(info: &Info<'_>, thread_info: &ThreadInfo, pos: [f32; 2]) {\n\n let text = &thread_info.name;\n\n let text_size = info.ui.calc_text_size(&ImString::new(text));\n\n\n\n info.draw_list\n\n .add_rect(\n\n pos,\n\n [pos[0] + text_size[0], pos[1] + text_size[1]],\n\n [0.0, 0.0, 0.0, 0.5],\n\n )\n\n .filled(true)\n\n .rounding(0.0)\n\n .build();\n\n\n\n info.draw_list.add_text(pos, [0.9, 0.9, 0.9, 1.0], text);\n\n}\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 75, "score": 47395.00317750221 }, { "content": "fn to_ms(ns: NanoSecond) -> f64 {\n\n ns as f64 * 1e-6\n\n}\n\n\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 76, "score": 46009.06143175404 }, { "content": "fn to_ms(ns: NanoSecond) -> f64 {\n\n ns as f64 * 1e-6\n\n}\n\n\n\nuse std::ops::{Add, Mul, RangeInclusive};\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 77, "score": 46009.06143175404 }, { "content": "fn color_from_duration(ns: NanoSecond) -> Rgba {\n\n let ms = to_ms(ns) as f32;\n\n // Brighter = more time.\n\n // So we start with dark colors (blue) and later bright colors (green).\n\n let b = remap_clamp(ms, 0.0..=5.0, 1.0..=0.3);\n\n let r = remap_clamp(ms, 0.0..=10.0, 0.5..=0.8);\n\n let g = remap_clamp(ms, 10.0..=33.0, 0.1..=0.8);\n\n let a = 0.9;\n\n Rgba::from_rgb(r, g, b) * a\n\n}\n\n\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 78, "score": 44960.78111928557 }, { "content": "fn color_from_duration(ns: NanoSecond) -> [f32; 4] {\n\n let ms = to_ms(ns) as f32;\n\n // Brighter = more time.\n\n // So we start with dark colors (blue) and later bright colors (green).\n\n let b = remap_clamp(ms, 0.0..=5.0, 1.0..=0.3);\n\n let r = remap_clamp(ms, 0.0..=10.0, 0.5..=0.8);\n\n let g = remap_clamp(ms, 10.0..=33.3, 0.1..=0.8);\n\n let a = 0.9;\n\n [r, g, b, a]\n\n}\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 79, "score": 43410.00192918888 }, { "content": "#[cfg(feature = \"packing\")]\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\n#[cfg(feature = \"zstd\")]\n\nfn decode_zstd(bytes: &[u8]) -> anyhow::Result<Vec<u8>> {\n\n use anyhow::Context as _;\n\n zstd::decode_all(bytes).context(\"zstd decompress\")\n\n}\n\n\n", "file_path": "puffin/src/frame_data.rs", "rank": 80, "score": 39155.35097058749 }, { "content": "fn merge_scope_tooltip(ui: &Ui<'_>, merge: &MergeScope<'_>) {\n\n ui.text(&format!(\"id: {}\", merge.id));\n\n if !merge.location.is_empty() {\n\n ui.text(&format!(\"location: {}\", merge.location));\n\n }\n\n if !merge.data.is_empty() {\n\n ui.text(&format!(\"data: {}\", merge.data));\n\n }\n\n\n\n if merge.num_pieces <= 1 {\n\n ui.text(&format!(\n\n \"duration: {:6.3} ms\",\n\n to_ms(merge.duration_per_frame_ns)\n\n ));\n\n } else {\n\n ui.text(&format!(\"sum of: {} scopes\", merge.num_pieces));\n\n ui.text(&format!(\n\n \"total: {:6.3} ms\",\n\n to_ms(merge.duration_per_frame_ns)\n\n ));\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 81, "score": 39155.35097058749 }, { "content": "fn lerp<T>(range: RangeInclusive<T>, t: f32) -> T\n\nwhere\n\n f32: Mul<T, Output = T>,\n\n T: Add<T, Output = T> + Copy,\n\n{\n\n (1.0 - t) * *range.start() + t * *range.end()\n\n}\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 82, "score": 38231.65194079947 }, { "content": "fn paint_thread_info(info: &Info, thread: &ThreadInfo, pos: Pos2) {\n\n let galley = info.ctx.fonts().layout_no_wrap(\n\n thread.name.clone(),\n\n info.font_id.clone(),\n\n Rgba::from_white_alpha(0.9).into(),\n\n );\n\n let rect = Rect::from_min_size(pos, galley.size());\n\n\n\n info.painter\n\n .rect_filled(rect.expand(2.0), 0.0, Rgba::from_black_alpha(0.5));\n\n info.painter.galley(rect.min, galley);\n\n}\n", "file_path": "puffin_egui/src/flamegraph.rs", "rank": 83, "score": 37502.82767319082 }, { "content": "use anyhow::Context as _;\n\nuse puffin::GlobalProfiler;\n\nuse std::{\n\n io::Write,\n\n net::{SocketAddr, TcpListener, TcpStream},\n\n sync::{\n\n atomic::{AtomicUsize, Ordering},\n\n Arc,\n\n },\n\n};\n\n\n\n/// Maximum size of the backlog of packets to send to a client if they aren't reading fast enough.\n\nconst MAX_FRAMES_IN_QUEUE: usize = 30;\n\n\n\n/// Listens for incoming connections\n\n/// and streams them puffin profiler data.\n\n///\n\n/// Drop to stop transmitting and listening for new connections.\n\npub struct Server {\n\n sink_id: puffin::FrameSinkId,\n", "file_path": "puffin_http/src/server.rs", "rank": 89, "score": 37076.753485520116 }, { "content": " join_handle: Option<std::thread::JoinHandle<()>>,\n\n num_clients: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl Server {\n\n /// Start listening for connections on this addr (e.g. \"0.0.0.0:8585\")\n\n pub fn new(bind_addr: &str) -> anyhow::Result<Self> {\n\n let tcp_listener = TcpListener::bind(bind_addr).context(\"binding server TCP socket\")?;\n\n tcp_listener\n\n .set_nonblocking(true)\n\n .context(\"TCP set_nonblocking\")?;\n\n\n\n // We use crossbeam_channel instead of `mpsc`,\n\n // because on shutdown we want all frames to be sent.\n\n // `mpsc::Receiver` stops receiving as soon as the `Sender` is dropped,\n\n // but `crossbeam_channel` will continue until the channel is empty.\n\n let (tx, rx): (crossbeam_channel::Sender<Arc<puffin::FrameData>>, _) =\n\n crossbeam_channel::unbounded();\n\n\n\n let num_clients = Arc::new(AtomicUsize::default());\n", "file_path": "puffin_http/src/server.rs", "rank": 90, "score": 37076.248019526465 }, { "content": " let num_clients_cloned = num_clients.clone();\n\n\n\n let join_handle = std::thread::Builder::new()\n\n .name(\"puffin-server\".to_owned())\n\n .spawn(move || {\n\n let mut server_impl = PuffinServerImpl {\n\n tcp_listener,\n\n clients: Default::default(),\n\n num_clients: num_clients_cloned,\n\n };\n\n\n\n while let Ok(frame) = rx.recv() {\n\n if let Err(err) = server_impl.accept_new_clients() {\n\n log::warn!(\"puffin server failure: {}\", err);\n\n }\n\n if let Err(err) = server_impl.send(&*frame) {\n\n log::warn!(\"puffin server failure: {}\", err);\n\n }\n\n }\n\n })\n", "file_path": "puffin_http/src/server.rs", "rank": 91, "score": 37072.85212890475 }, { "content": " .name(\"puffin-server-client\".to_owned())\n\n .spawn(move || client_loop(packet_rx, client_addr, tcp_stream))\n\n .context(\"Couldn't spawn thread\")?;\n\n\n\n self.clients.push(Client {\n\n client_addr,\n\n packet_tx: Some(packet_tx),\n\n join_handle: Some(join_handle),\n\n });\n\n self.num_clients.store(self.clients.len(), Ordering::SeqCst);\n\n }\n\n Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n\n break; // Nothing to do for now.\n\n }\n\n Err(e) => {\n\n anyhow::bail!(\"puffin server TCP error: {:?}\", e);\n\n }\n\n }\n\n }\n\n Ok(())\n", "file_path": "puffin_http/src/server.rs", "rank": 92, "score": 37071.58671371669 }, { "content": " .context(\"Couldn't spawn thread\")?;\n\n\n\n let sink_id = GlobalProfiler::lock().add_sink(Box::new(move |frame| {\n\n tx.send(frame).ok();\n\n }));\n\n\n\n Ok(Server {\n\n sink_id,\n\n join_handle: Some(join_handle),\n\n num_clients,\n\n })\n\n }\n\n\n\n /// Number of clients currently connected.\n\n pub fn num_clients(&self) -> usize {\n\n self.num_clients.load(Ordering::SeqCst)\n\n }\n\n}\n\n\n\nimpl Drop for Server {\n", "file_path": "puffin_http/src/server.rs", "rank": 93, "score": 37069.692557608105 }, { "content": " }\n\n\n\n pub fn send(&mut self, frame: &puffin::FrameData) -> anyhow::Result<()> {\n\n if self.clients.is_empty() {\n\n return Ok(());\n\n }\n\n puffin::profile_function!();\n\n\n\n let mut packet = vec![];\n\n packet\n\n .write_all(&crate::PROTOCOL_VERSION.to_le_bytes())\n\n .unwrap();\n\n frame\n\n .write_into(&mut packet)\n\n .context(\"Encode puffin frame\")?;\n\n\n\n let packet: Packet = packet.into();\n\n\n\n self.clients.retain(|client| match &client.packet_tx {\n\n None => false,\n", "file_path": "puffin_http/src/server.rs", "rank": 94, "score": 37068.69271128281 }, { "content": " Some(packet_tx) => match packet_tx.try_send(packet.clone()) {\n\n Ok(()) => true,\n\n Err(crossbeam_channel::TrySendError::Disconnected(_)) => false,\n\n Err(crossbeam_channel::TrySendError::Full(_)) => {\n\n log::info!(\n\n \"puffin client {} is not accepting data fast enough; dropping a frame\",\n\n client.client_addr\n\n );\n\n true\n\n }\n\n },\n\n });\n\n self.num_clients.store(self.clients.len(), Ordering::SeqCst);\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "puffin_http/src/server.rs", "rank": 95, "score": 37067.07091095253 }, { "content": " fn drop(&mut self) {\n\n GlobalProfiler::lock().remove_sink(self.sink_id);\n\n\n\n // Take care to send everything before we shut down:\n\n if let Some(join_handle) = self.join_handle.take() {\n\n join_handle.join().ok();\n\n }\n\n }\n\n}\n\n\n", "file_path": "puffin_http/src/server.rs", "rank": 96, "score": 37058.228954456776 }, { "content": " // Give us something to inspect:\n\n\n\n std::thread::Builder::new()\n\n .name(\"Other thread\".to_owned())\n\n .spawn(|| {\n\n sleep_ms(5);\n\n })\n\n .unwrap();\n\n\n\n sleep_ms(14);\n\n if frame_counter % 7 == 0 {\n\n puffin::profile_scope!(\"Spike\");\n\n std::thread::sleep(std::time::Duration::from_millis(10))\n\n }\n\n\n\n for _ in 0..1000 {\n\n puffin::profile_scope!(\"very thin\");\n\n }\n\n\n\n frame_counter += 1;\n\n }\n\n}\n\n\n", "file_path": "puffin_http/examples/server.rs", "rank": 97, "score": 37055.22574539313 }, { "content": "fn paint_timeline(info: &Info<'_>, options: &Options, start_ns: NanoSecond) {\n\n if options.canvas_width_ns <= 0.0 {\n\n return;\n\n }\n\n\n\n let alpha_multiplier = if options.filter.is_empty() { 1.0 } else { 0.3 };\n\n\n\n // We show all measurements relative to start_ns\n\n\n\n let max_lines = 300.0;\n\n let mut grid_spacing_ns = 1_000;\n\n while options.canvas_width_ns / (grid_spacing_ns as f32) > max_lines {\n\n grid_spacing_ns *= 10;\n\n }\n\n\n\n // We fade in lines as we zoom in:\n\n let num_tiny_lines = options.canvas_width_ns / (grid_spacing_ns as f32);\n\n let zoom_factor = remap_clamp(num_tiny_lines, (0.1 * max_lines)..=max_lines, 1.0..=0.0);\n\n let zoom_factor = zoom_factor * zoom_factor;\n\n let big_alpha = remap_clamp(zoom_factor, 0.0..=1.0, 0.5..=1.0);\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 98, "score": 36579.1286434028 }, { "content": "fn remap_clamp(x: f32, from: RangeInclusive<f32>, to: RangeInclusive<f32>) -> f32 {\n\n let t = if x <= *from.start() {\n\n 0.0\n\n } else if x >= *from.end() {\n\n 1.0\n\n } else {\n\n (x - from.start()) / (from.end() - from.start())\n\n };\n\n lerp(to, t)\n\n}\n\n\n", "file_path": "puffin-imgui/src/ui.rs", "rank": 99, "score": 35085.487659064725 } ]
Rust
firmware/hal/build.rs
Lotterleben/embedded2020
623fa53461c4da29d69318aa85de8b4119c50719
use std::{ env, error::Error, fs, path::{Path, PathBuf}, }; fn main() -> Result<(), Box<dyn Error>> { let out_dir = &PathBuf::from(env::var("OUT_DIR")?); let flash = env::var_os("CARGO_FEATURE_FLASH").is_some(); descs(&out_dir)?; fs::copy("interrupts.x", out_dir.join("interrupts.x"))?; let suffix = if flash { "flash" } else { "ram" }; fs::copy(format!("link-{}.x", suffix), out_dir.join("link.x"))?; println!("cargo:rustc-link-search={}", out_dir.display()); Ok(()) } fn descs(out_dir: &Path) -> Result<(), Box<dyn Error>> { use core::num::NonZeroU8; use quote::quote; use usb2::{ cdc::{self, acm, call, header, union}, configuration::{self, bmAttributes}, device::{self, bMaxPacketSize0}, endpoint, hid, ia, interface, Direction, Endpoint, }; const PACKET_SIZE: bMaxPacketSize0 = bMaxPacketSize0::B64; const CONFIG_VAL: u8 = 1; const CDC_IFACE: u8 = 0; const HID_IFACE: u8 = 2; let device_desc = device::Descriptor { bDeviceClass: 0xEF, bDeviceSubClass: 2, bDeviceProtocol: 1, bMaxPacketSize0: bMaxPacketSize0::B64, bNumConfigurations: NonZeroU8::new(1).unwrap(), bcdDevice: 0x01_00, iManufacturer: None, iProduct: None, iSerialNumber: None, idProduct: consts::PID, idVendor: consts::VID, }; fn full_config_desc() -> Vec<u8> { let hid = env::var_os("CARGO_FEATURE_HID").is_some(); let mut bytes = vec![]; let mut nifaces = 2; if hid { nifaces += 1; } let config = configuration::Descriptor { bConfigurationValue: NonZeroU8::new(CONFIG_VAL).unwrap(), bMaxPower: 250, bNumInterfaces: NonZeroU8::new(nifaces).unwrap(), bmAttributes: bmAttributes { remote_wakeup: false, self_powered: false, }, iConfiguration: None, wTotalLength: 0, }; bytes.extend_from_slice(&config.bytes()); { let comm = cdc::Class::Communications { subclass: cdc::SubClass::AbstractControlModel, protocol: cdc::Protocol::ATCommands, }; let ia = ia::Descriptor { bFirstInterface: CDC_IFACE, bFunctionClass: comm.class(), bFunctionSubClass: comm.subclass(), bFunctionProtocol: comm.protocol(), bInterfaceCount: NonZeroU8::new(2).unwrap(), iFunction: None, }; bytes.extend_from_slice(&ia.bytes()); let iface0 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: CDC_IFACE, bInterfaceClass: comm.class().get(), bInterfaceSubClass: comm.subclass(), bInterfaceProtocol: comm.protocol(), bNumEndpoints: 1, iInterface: None, }; bytes.extend_from_slice(&iface0.bytes()); let header = header::Descriptor { bcdCDC: 0x01_10 }; bytes.extend_from_slice(&header.bytes()); let call = call::Descriptor { bmCapabilities: call::Capabilities { call_management: true, data_class: true, }, bDataInterface: 1, }; bytes.extend_from_slice(&call.bytes()); let acm = acm::Descriptor { bmCapabilities: acm::Capabilities { comm_features: false, line_serial: true, network_connection: false, send_break: false, }, }; bytes.extend_from_slice(&acm.bytes()); let union = union::Descriptor { bControlInterface: 0, bSubordinateInterface0: 1, }; bytes.extend_from_slice(&union.bytes()); let ep1in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 1, }, bInterval: 32, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep1in.bytes()); } { let cdc_data = cdc::Class::CdcData; let iface1 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: 1, bInterfaceClass: cdc_data.class().get(), bInterfaceSubClass: cdc_data.subclass(), bInterfaceProtocol: cdc_data.protocol(), bNumEndpoints: 2, iInterface: None, }; bytes.extend_from_slice(&iface1.bytes()); let ep2out = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::Out, number: 2, }, bInterval: 0, ty: endpoint::Type::Bulk, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep2out.bytes()); let ep2in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 2, }, bInterval: 0, ty: endpoint::Type::Bulk, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep2in.bytes()); } if hid { let hid = hid::Class; let iface2 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: HID_IFACE, bInterfaceClass: hid.class().get(), bInterfaceSubClass: hid.subclass(), bInterfaceProtocol: hid.protocol(), bNumEndpoints: 2, iInterface: None, }; bytes.extend_from_slice(&iface2.bytes()); let report = hid::Descriptor { bCountryCode: hid::Country::NotSupported, wDescriptorLength: 33, }; bytes.extend_from_slice(&report.bytes()); let ep3out = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::Out, number: 3, }, bInterval: 1, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep3out.bytes()); let ep3in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 3, }, bInterval: 1, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep3in.bytes()); } let total_length = bytes.len(); assert!( total_length <= usize::from(u16::max_value()), "configuration descriptor is too long" ); bytes[2] = total_length as u8; bytes[3] = (total_length >> 8) as u8; bytes } let line_coding = acm::LineCoding { bCharFormat: acm::bCharFormat::Stop1, bDataBits: acm::bDataBits::_8, bParityType: acm::bParityType::None, dwDTERate: 9_600, }; let serial_state = acm::SerialState { interface: 0, bOverRun: false, bParity: false, bFraming: false, bRingSignal: false, bBreak: false, bTxCarrier: true, bRxCarrier: true, }; let max_packet_size0 = PACKET_SIZE as u8; let lcb = line_coding.bytes(); let lcl = lcb.len(); let ssb = serial_state.bytes(); let ssl = ssb.len(); let ddb = device_desc.bytes(); let ddl = ddb.len(); let cdb = full_config_desc(); let cdl = cdb.len(); fs::write( out_dir.join("descs.rs"), quote!( const CONFIG_VAL: core::num::NonZeroU8 = unsafe { core::num::NonZeroU8::new_unchecked(#CONFIG_VAL) }; const MAX_PACKET_SIZE0: u8 = #max_packet_size0; #[allow(dead_code)] #[link_section = ".data.CONFIG_DESC"] static CONFIG_DESC: [u8; #cdl] = [#(#cdb,)*]; #[allow(dead_code)] #[link_section = ".data.DEVICE_DESC"] static DEVICE_DESC: [u8; #ddl] = [#(#ddb,)*]; #[allow(dead_code)] static mut LINE_CODING: [u8; #lcl] = [#(#lcb,)*]; #[allow(dead_code)] #[link_section = ".data.SERIAL_STATE"] static SERIAL_STATE: crate::util::Align4<[u8; #ssl]> = crate::util::Align4([#(#ssb,)*]); #[allow(dead_code)] const CDC_IFACE: u8 = #CDC_IFACE; #[allow(dead_code)] const HID_IFACE: u8 = #HID_IFACE; ) .to_string(), )?; Ok(()) }
use std::{ env, error::Error, fs, path::{Path, PathBuf}, }; fn main() -> Result<(), Box<dyn Error>> { let out_dir = &PathBuf::from(env::var("OUT_DIR")?); let flash = env::var_os("CARGO_FEATURE_FLASH").is_some(); descs(&out_dir)?; fs::copy("interrupts.x", out_dir.join("interrupts.x"))?; let suffix = if flash { "flash" } else { "ram" }; fs::copy(format!("link-{}.x", suffix), out_dir.join("link.x"))?; println!("cargo:rustc-link-search={}", out_dir.display()); Ok(()) } fn descs(out_dir: &Path) -> Result<(), Box<dyn Error>> { use core::num::NonZeroU8; use quote::quote; use usb2::{ cdc::{self, acm, call, header, union}, configuration::{self, bmAttributes}, device::{self, bMaxPacketSize0}, endpoint, hid, ia, interface, Direction, Endpoint, }; const PACKET_SIZE: bMaxPacketSize0 = bMaxPacketSize0::B64; const CONFIG_VAL: u8 = 1; const CDC_IFACE: u8 = 0; const HID_IFACE: u8 = 2; let device_desc = device::Descriptor { bDeviceClass: 0xEF, bDeviceSubClass: 2, bDeviceProtocol: 1, bMaxPacketSize0: bMaxPacketSize0::B64, bNumConfigurations: NonZeroU8::new(1).unwrap(), bcdDevice: 0x01_00, iManufacturer: None, iProduct: None, iSerialNumber: None, idProduct: consts::PID, idVendor: consts::VID, }; fn full_config_desc() -> Vec<u8> { let hid = env::var_os("CARGO_FEATURE_HID").is_some(); let mut bytes = vec![]; let mut nifaces = 2; if hid { nifaces += 1; } let config = configuration::Descriptor { bConfigurationValue: NonZeroU8::new(CONFIG_VAL).unwrap(), bMaxPower: 250, bNumInterfaces: NonZeroU8::new(nifaces).unwrap(), bmAttributes: bmAttributes { remote_wakeup: false, self_powered: false, }, iConfiguration: None, wTotalLength: 0, }; bytes.extend_from_slice(&config.bytes()); { let comm = cdc::Class::Communications { subclass: cdc::SubClass::AbstractControlModel, protocol: cdc::Protocol::ATCommands, }; let ia = ia::Descriptor { bFirstInterface: CDC_IFACE, bFunctionClass: comm.class(), bFunctionSubClass: comm.subclass(), bFunctionProtocol: comm.protocol(), bInterfaceCount: NonZeroU8::new(2).unwrap(), iFunction: None, }; bytes.extend_from_slice(&ia.bytes()); let iface0 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: CDC_IFACE, bInterfaceClass: comm.class().get(), bInterfaceSubClass: comm.subclass(), bInterfaceProtocol: comm.protocol(), bNumEndpoints: 1, iInterface: None, }; bytes.extend_from_slice(&iface0.bytes()); let header = header::Descriptor { bcdCDC: 0x01_10 }; bytes.extend_from_slice(&header.bytes()); let call = call::Descriptor { bmCapabilities: call::Capabilities { call_management: true, data_class: true, }, bDataInterface: 1, }; bytes.extend_from_slice(&call.bytes()); let acm = acm::Descripto
let line_coding = acm::LineCoding { bCharFormat: acm::bCharFormat::Stop1, bDataBits: acm::bDataBits::_8, bParityType: acm::bParityType::None, dwDTERate: 9_600, }; let serial_state = acm::SerialState { interface: 0, bOverRun: false, bParity: false, bFraming: false, bRingSignal: false, bBreak: false, bTxCarrier: true, bRxCarrier: true, }; let max_packet_size0 = PACKET_SIZE as u8; let lcb = line_coding.bytes(); let lcl = lcb.len(); let ssb = serial_state.bytes(); let ssl = ssb.len(); let ddb = device_desc.bytes(); let ddl = ddb.len(); let cdb = full_config_desc(); let cdl = cdb.len(); fs::write( out_dir.join("descs.rs"), quote!( const CONFIG_VAL: core::num::NonZeroU8 = unsafe { core::num::NonZeroU8::new_unchecked(#CONFIG_VAL) }; const MAX_PACKET_SIZE0: u8 = #max_packet_size0; #[allow(dead_code)] #[link_section = ".data.CONFIG_DESC"] static CONFIG_DESC: [u8; #cdl] = [#(#cdb,)*]; #[allow(dead_code)] #[link_section = ".data.DEVICE_DESC"] static DEVICE_DESC: [u8; #ddl] = [#(#ddb,)*]; #[allow(dead_code)] static mut LINE_CODING: [u8; #lcl] = [#(#lcb,)*]; #[allow(dead_code)] #[link_section = ".data.SERIAL_STATE"] static SERIAL_STATE: crate::util::Align4<[u8; #ssl]> = crate::util::Align4([#(#ssb,)*]); #[allow(dead_code)] const CDC_IFACE: u8 = #CDC_IFACE; #[allow(dead_code)] const HID_IFACE: u8 = #HID_IFACE; ) .to_string(), )?; Ok(()) }
r { bmCapabilities: acm::Capabilities { comm_features: false, line_serial: true, network_connection: false, send_break: false, }, }; bytes.extend_from_slice(&acm.bytes()); let union = union::Descriptor { bControlInterface: 0, bSubordinateInterface0: 1, }; bytes.extend_from_slice(&union.bytes()); let ep1in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 1, }, bInterval: 32, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep1in.bytes()); } { let cdc_data = cdc::Class::CdcData; let iface1 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: 1, bInterfaceClass: cdc_data.class().get(), bInterfaceSubClass: cdc_data.subclass(), bInterfaceProtocol: cdc_data.protocol(), bNumEndpoints: 2, iInterface: None, }; bytes.extend_from_slice(&iface1.bytes()); let ep2out = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::Out, number: 2, }, bInterval: 0, ty: endpoint::Type::Bulk, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep2out.bytes()); let ep2in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 2, }, bInterval: 0, ty: endpoint::Type::Bulk, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep2in.bytes()); } if hid { let hid = hid::Class; let iface2 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: HID_IFACE, bInterfaceClass: hid.class().get(), bInterfaceSubClass: hid.subclass(), bInterfaceProtocol: hid.protocol(), bNumEndpoints: 2, iInterface: None, }; bytes.extend_from_slice(&iface2.bytes()); let report = hid::Descriptor { bCountryCode: hid::Country::NotSupported, wDescriptorLength: 33, }; bytes.extend_from_slice(&report.bytes()); let ep3out = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::Out, number: 3, }, bInterval: 1, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep3out.bytes()); let ep3in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 3, }, bInterval: 1, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep3in.bytes()); } let total_length = bytes.len(); assert!( total_length <= usize::from(u16::max_value()), "configuration descriptor is too long" ); bytes[2] = total_length as u8; bytes[3] = (total_length >> 8) as u8; bytes }
function_block-function_prefixed
[ { "content": "fn main() -> Result<(), anyhow::Error> {\n\n let dev = serialport::available_ports()?\n\n .into_iter()\n\n .filter(|info| match info.port_type {\n\n SerialPortType::UsbPort(ref port) => port.vid == consts::VID,\n\n _ => false,\n\n })\n\n .next()\n\n .ok_or_else(|| anyhow!(\"device not found\"))?;\n\n\n\n let mut port = serialport::open(&dev.port_name)?;\n\n\n\n let stdout = io::stdout();\n\n let mut stdout = stdout.lock();\n\n let mut buf = [0; 64];\n\n loop {\n\n if port.bytes_to_read()? != 0 {\n\n let n = port.read(&mut buf)?;\n\n stdout.write(&buf[..n])?;\n\n } else {\n\n thread::sleep(Duration::from_millis(1))\n\n }\n\n }\n\n}\n", "file_path": "host/acm-cat/src/main.rs", "rank": 0, "score": 291195.7999440584 }, { "content": "fn gen(mut dev: ir::Device<'_>, lib: &Path) -> Result<(), anyhow::Error> {\n\n assert!(lib.is_file());\n\n\n\n dev.verify()?;\n\n opt::device(&mut dev);\n\n let krate = codegen::device(&dev);\n\n fs::write(lib, krate)?;\n\n Ok(())\n\n}\n", "file_path": "host/regen/src/main.rs", "rank": 1, "score": 281307.1484737253 }, { "content": "fn gen_nrf52(lib: &Path) -> Result<(), anyhow::Error> {\n\n let xml = fs::read_to_string(\"nrf52840.svd\")?;\n\n let dev = svd_parser::parse(&xml)?;\n\n let mut dev = translate::svd::device(&dev, AUDITED);\n\n audit_nrf52(&mut dev);\n\n gen(dev, lib)\n\n}\n\n\n", "file_path": "host/regen/src/main.rs", "rank": 2, "score": 261018.57232718053 }, { "content": "fn gen_cm(lib: &Path) -> Result<(), anyhow::Error> {\n\n let dev = cm::device();\n\n gen(dev, lib)\n\n}\n\n\n", "file_path": "host/regen/src/main.rs", "rank": 3, "score": 261018.57232718053 }, { "content": "fn main() -> Result<(), anyhow::Error> {\n\n gen_cm(Path::new(\"../../shared/cm/src/lib.rs\"))?;\n\n gen_nrf52(Path::new(\"../../firmware/pac/src/lib.rs\"))?;\n\n\n\n Ok(())\n\n}\n\n\n\n// Audited register writes\n\nconst AUDITED: &[&str] = &[\n\n \"CLOCK\", \"FICR\", \"P0\", \"POWER\", \"RADIO\", \"RTC0\", \"TWIM0\", \"USBD\", \"SPIM0\",\n\n];\n\n\n", "file_path": "host/regen/src/main.rs", "rank": 4, "score": 259434.84851585492 }, { "content": "fn main() -> Result<(), anyhow::Error> {\n\n process::exit(not_main()?)\n\n}\n\n\n", "file_path": "host/semiprobe/src/main.rs", "rank": 5, "score": 259434.84851585492 }, { "content": "fn main() -> Result<(), anyhow::Error> {\n\n let args = env::args().skip(1).collect::<Vec<_>>(); // skip program name\n\n ensure!(!args.is_empty(), \"expected at least one argument\");\n\n\n\n let api = HidApi::new()?;\n\n let dev = api\n\n .device_list()\n\n .filter(|dev| dev.vendor_id() == consts::VID && dev.product_id() == consts::PID)\n\n .next()\n\n .ok_or_else(|| anyhow!(\"device not found\"))?\n\n .open_device(&api)?;\n\n\n\n dev.write(args[0].as_bytes())?;\n\n let mut buf = [0; 64];\n\n let n = dev.read(&mut buf)?;\n\n println!(\"{:?}\", str::from_utf8(&buf[..n]));\n\n\n\n Ok(())\n\n}\n", "file_path": "host/hidc/src/main.rs", "rank": 6, "score": 259434.84851585492 }, { "content": "fn main() -> Result<(), anyhow::Error> {\n\n process::exit(not_main()?)\n\n}\n\n\n", "file_path": "host/semidap/src/main.rs", "rank": 7, "score": 259434.84851585497 }, { "content": "fn prompt(dap: &mut Dap) -> Result<(), anyhow::Error> {\n\n println!(\"------------------------------------------\");\n\n\n\n let mut rl = Editor::<()>::new();\n\n while let Ok(line) = rl.readline(\"\\n> \") {\n\n let mut line = line.trim();\n\n // remove comments\n\n line = line.splitn(2, '#').next().unwrap_or(\"\");\n\n\n\n if line.is_empty() {\n\n // just a comment; nothing to do\n\n continue;\n\n } else if line == \"help\" {\n\n println!(\n\n \"\\\n\ncommands:\n\n help Displays this text\n\n show <address> <i16> Displays memory\n\n show <address> -<u16> <u16> Displays memory\n\n exit Exits the debugger\n", "file_path": "host/semidap/src/main.rs", "rank": 8, "score": 256095.65272888396 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let out_dir = &PathBuf::from(env::var(\"OUT_DIR\")?);\n\n let pkg_name = env::var(\"CARGO_PKG_NAME\")?;\n\n let target = env::var(\"TARGET\")?;\n\n\n\n // place the pre-compiled assembly somewhere the linker can find it\n\n fs::copy(\n\n format!(\"bin/{}.a\", target),\n\n out_dir.join(format!(\"lib{}.a\", pkg_name)),\n\n )?;\n\n println!(\"cargo:rustc-link-lib=static={}\", pkg_name);\n\n\n\n println!(\"cargo:rustc-link-search={}\", out_dir.display());\n\n\n\n Ok(())\n\n}\n", "file_path": "firmware/semidap/build.rs", "rank": 10, "score": 243479.52195308602 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let out_dir = &PathBuf::from(env::var(\"OUT_DIR\")?);\n\n let pkg_name = env::var(\"CARGO_PKG_NAME\")?;\n\n let target = env::var(\"TARGET\")?;\n\n\n\n // place the pre-compiled assembly somewhere the linker can find it\n\n if target.starts_with(\"thumb\") {\n\n fs::copy(\n\n format!(\"bin/{}.a\", target),\n\n out_dir.join(format!(\"lib{}.a\", pkg_name)),\n\n )?;\n\n println!(\"cargo:rustc-link-lib=static={}\", pkg_name);\n\n\n\n println!(\"cargo:rustc-link-search={}\", out_dir.display());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "firmware/asm/build.rs", "rank": 12, "score": 243479.52195308602 }, { "content": "fn read_word_16(core: &mut Core, addr: u32) -> Result<u16, anyhow::Error> {\n\n if addr % 4 == 0 {\n\n Ok(core.read_word_32(addr)? as u16)\n\n } else if addr % 4 == 2 {\n\n Ok((core.read_word_32(addr - 2)? >> 16) as u16)\n\n } else {\n\n unreachable!()\n\n }\n\n}\n", "file_path": "host/semiprobe/src/main.rs", "rank": 13, "score": 229049.7276922866 }, { "content": "fn not_main() -> Result<i32, anyhow::Error> {\n\n let beginning = Instant::now();\n\n env_logger::init();\n\n\n\n let opts = Opts::from_args();\n\n\n\n let bytes = fs::read(opts.elf)?;\n\n debug!(\"parsing ELF file\");\n\n let elf = &ElfFile::new(&bytes).map_err(anyhow::Error::msg)?;\n\n\n\n debug!(\"extracting allocatable sections from the ELF file\");\n\n let mut vectors = None;\n\n let mut footprints = BTreeMap::new();\n\n let mut sections = vec![];\n\n let mut ncursors = 0;\n\n let mut semidap_cursor = None;\n\n let mut semidap_buffer = None;\n\n let mut debug_frame = None;\n\n let mut range_names = vec![];\n\n let binfmt_shndx = elf\n", "file_path": "host/semidap/src/main.rs", "rank": 14, "score": 222966.36933163024 }, { "content": "fn not_main() -> Result<i32, anyhow::Error> {\n\n let beginning = Instant::now();\n\n env_logger::init();\n\n\n\n let opts = Opts::from_args();\n\n\n\n let bytes = fs::read(opts.elf)?;\n\n debug!(\"parsing ELF file\");\n\n let elf = &ElfFile::new(&bytes).map_err(anyhow::Error::msg)?;\n\n\n\n debug!(\"extracting allocatable sections from the ELF file\");\n\n let mut vectors = None;\n\n let mut footprints = BTreeMap::new();\n\n let mut sections = vec![];\n\n let mut ncursors = 0;\n\n let mut semidap_cursor = None;\n\n let mut semidap_buffer = None;\n\n let mut debug_frame = None;\n\n let mut range_names = vec![];\n\n let binfmt_shndx = elf\n", "file_path": "host/semiprobe/src/main.rs", "rank": 15, "score": 222966.36933163024 }, { "content": "fn ep0setup(usb_state: &mut usb2::State, ep_state: &mut Ep0State) -> Result<(), ()> {\n\n let bmrequesttype = BMREQUESTTYPE();\n\n let brequest = BREQUEST();\n\n let wvalue = WVALUE();\n\n let windex = WINDEX();\n\n let wlength = WLENGTH();\n\n\n\n let req = Request::parse(bmrequesttype, brequest, wvalue, windex, wlength).map_err(|_| {\n\n semidap::error!(\n\n \"EP0SETUP: unknown request (bmrequesttype={}, brequest={}, wvalue={}, windex={}, wlength={})\",\n\n bmrequesttype,\n\n brequest,\n\n wvalue,\n\n windex,\n\n wlength\n\n );\n\n })?;\n\n\n\n match req {\n\n Request::Standard(req) => std_req(usb_state, ep_state, req)?,\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 16, "score": 220233.4768133114 }, { "content": "fn acm_req(ep_state: &mut Ep0State, req: acm::Request) -> Result<(), ()> {\n\n if req.interface != CDC_IFACE {\n\n semidap::error!(\"ACM request sent to the wrong interface\");\n\n return Err(());\n\n }\n\n\n\n match req.kind {\n\n acm::Kind::GetLineCoding => {\n\n semidap::info!(\"ACM: GET_LINE_CODING\");\n\n\n\n start_epin0(unsafe { &LINE_CODING }, ep_state);\n\n }\n\n\n\n acm::Kind::SetLineCoding => {\n\n semidap::info!(\"ACM: SET_LINE_CODING\");\n\n\n\n if *ep_state != Ep0State::Idle {\n\n #[cfg(debug_assertions)]\n\n unreachable()\n\n }\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 17, "score": 218157.53951391682 }, { "content": "fn parse_hex(s: &str) -> Result<u16, anyhow::Error> {\n\n u16::from_str_radix(s, 16).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "host/semidap/src/main.rs", "rank": 18, "score": 198202.44654861212 }, { "content": "fn parse_hex(s: &str) -> Result<u16, anyhow::Error> {\n\n u16::from_str_radix(s, 16).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "host/semiprobe/src/main.rs", "rank": 19, "score": 198202.44654861212 }, { "content": "fn start_epin0(bytes: &'static [u8], ep_state: &mut Ep0State) {\n\n #[cfg(debug_assertions)]\n\n semidap::assert!(\n\n *ep_state == Ep0State::Idle,\n\n \"tried to start a control read transfer before the previous one finished\"\n\n );\n\n\n\n let len = bytes.len() as u16;\n\n\n\n let maxcnt = if len <= MAX_PACKET_SIZE0.into() {\n\n // done in a single transfer\n\n short_ep0datadone_ep0status();\n\n *ep_state = Ep0State::Write { leftover: 0 };\n\n len as u8\n\n } else {\n\n unshort_ep0datadone_ep0status();\n\n let maxcnt = MAX_PACKET_SIZE0;\n\n *ep_state = Ep0State::Write {\n\n leftover: len - u16::from(maxcnt),\n\n };\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 20, "score": 189945.98435211842 }, { "content": "fn leb128_decode_u32(bytes: &[u8]) -> Result<(u32, usize), EoS> {\n\n let mut val = 0;\n\n for (i, byte) in bytes.iter().enumerate() {\n\n val |= u32::from(*byte & !CONTINUE) << (7 * i);\n\n\n\n if *byte & CONTINUE == 0 {\n\n return Ok((val, i + 1));\n\n }\n\n }\n\n\n\n Err(EoS)\n\n}\n\n\n", "file_path": "host/binfmt-parser/src/lib.rs", "rank": 21, "score": 186616.06358128323 }, { "content": "fn hid_req(req: hid::Request) -> Result<(), ()> {\n\n if req.interface != HID_IFACE {\n\n semidap::error!(\"HID request sent to the wrong interface\");\n\n return Err(());\n\n }\n\n\n\n match req.kind {\n\n hid::Kind::SetIdle {\n\n duration,\n\n report_id,\n\n } => {\n\n semidap::info!(\n\n \"HID: SET_IDLE dur={} report={}\",\n\n duration.map(|nz| nz.get()).unwrap_or(0),\n\n report_id.map(|nz| nz.get()).unwrap_or(0),\n\n );\n\n\n\n ep0status()\n\n }\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 22, "score": 186278.14468014683 }, { "content": "fn split(block: Block) -> parse::Result<(Vec<Static>, Vec<Stmt>)> {\n\n let mut istmts = block.stmts.into_iter();\n\n let mut stmts = vec![];\n\n let mut statics = vec![];\n\n\n\n while let Some(stmt) = istmts.next() {\n\n if let Stmt::Item(syn::Item::Static(s)) = stmt {\n\n statics.push(verify(s)?);\n\n } else {\n\n stmts.push(stmt);\n\n break;\n\n }\n\n }\n\n\n\n stmts.extend(istmts);\n\n\n\n Ok((statics, stmts))\n\n}\n\n\n", "file_path": "host/tasks-macros/src/lib.rs", "rank": 23, "score": 172857.4343347248 }, { "content": "fn audit_nrf52(dev: &mut ir::Device<'_>) {\n\n for periph in &mut dev.peripherals {\n\n // all peripherals\n\n for reg in &mut periph.registers {\n\n match &*reg.name {\n\n // enabling interrupts can break critical sections\n\n \"INTEN\" | \"INTENSET\" => {\n\n reg.access.make_write_unsafe();\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n // Fix bitfield widths to match the OPS\n\n if periph.name == \"TWIM0\" {\n\n for reg in &mut periph.registers {\n\n match &*reg.name {\n\n \"RXD_AMOUNT\" | \"TXD_AMOUNT\" => {\n\n for field in reg.r_fields.iter_mut().chain(&mut reg.w_fields) {\n\n if field.name == \"AMOUNT\" {\n", "file_path": "host/regen/src/main.rs", "rank": 24, "score": 145636.77088786953 }, { "content": "#[cfg(feature = \"hid\")]\n\npub fn hid() -> (HidOut, HidIn) {\n\n static ONCE: AtomicBool = AtomicBool::new(false);\n\n\n\n if ONCE\n\n .compare_exchange(false, true, Ordering::Relaxed, Ordering::Relaxed)\n\n .is_ok()\n\n {\n\n (\n\n HidOut {\n\n _not_send_or_sync: NotSendOrSync::new(),\n\n },\n\n HidIn {\n\n _not_send_or_sync: NotSendOrSync::new(),\n\n },\n\n )\n\n } else {\n\n semidap::panic!(\"`usbd::hid` interface has already been claimed\")\n\n }\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 25, "score": 136513.99907896927 }, { "content": "#[allow(non_snake_case)]\n\nfn BREQUEST() -> u8 {\n\n USBD::borrow_unchecked(|usbd| usbd.BREQUEST.read().bits())\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 26, "score": 131220.53892404048 }, { "content": "#[allow(non_snake_case)]\n\nfn CRCSTATUS() -> u8 {\n\n RADIO::borrow_unchecked(|radio| radio.CRCSTATUS.read().CRCSTATUS())\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 27, "score": 131220.53892404048 }, { "content": "#[allow(non_snake_case)]\n\nfn BMREQUESTTYPE() -> u8 {\n\n USBD::borrow_unchecked(|usbd| usbd.BMREQUESTTYPE.read().bits())\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 28, "score": 131220.53892404048 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn SIZE_EPOUT1() -> u8 {\n\n USBD::borrow_unchecked(|usbd| usbd.SIZE_EPOUT1.read().bits())\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 29, "score": 129015.1205211381 }, { "content": "fn node_next<T>(node: *mut Node<T>) -> *mut *mut Node<T> {\n\n node.cast()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use core::mem::MaybeUninit;\n\n\n\n use super::Node;\n\n\n\n #[test]\n\n fn empty() {\n\n pool!(pub A: [u8; 1]);\n\n\n\n assert!(A::try_alloc().is_none());\n\n }\n\n\n\n #[test]\n\n fn sanity() {\n\n static mut N: MaybeUninit<Node<[u8; 1]>> = MaybeUninit::uninit();\n", "file_path": "firmware/pool/src/lib.rs", "rank": 30, "score": 127107.7961597091 }, { "content": "fn bit_range(br: svd::BitRange) -> (u8, u8) {\n\n (\n\n br.offset.try_into().expect(\"unreachable\"),\n\n br.width.try_into().expect(\"unreachable\"),\n\n )\n\n}\n", "file_path": "host/regen/src/translate/svd.rs", "rank": 31, "score": 126593.87503704129 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn EPOUT1_MAXCNT(cnt: u8) {\n\n USBD::borrow_unchecked(|usbd| usbd.EPOUT1_MAXCNT.write(|w| w.MAXCNT(cnt)))\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 32, "score": 123618.41390986064 }, { "content": "fn node_data<T>(node: *mut Node<T>) -> *mut T {\n\n node.cast()\n\n}\n\n\n", "file_path": "firmware/pool/src/lib.rs", "rank": 33, "score": 122584.7593784846 }, { "content": "fn peripherals() -> Vec<Peripheral<'static>> {\n\n vec![\n\n Peripheral {\n\n description: Some(\"Debug Control Block\".into()),\n\n instances: Instances::Single {\n\n base_address: 0xE000_EDF0,\n\n },\n\n name: \"DCB\".into(),\n\n registers: vec![\n\n {\n\n let fields = vec![\n\n Bitfield {\n\n description: None,\n\n name: \"C_DEBUGEN\".into(),\n\n offset: 0,\n\n width: 1,\n\n },\n\n Bitfield {\n\n description: None,\n\n name: \"C_HALT\".into(),\n", "file_path": "host/regen/src/cm.rs", "rank": 34, "score": 121027.09455873983 }, { "content": "fn start_epin1(buf: &'static [u8]) {\n\n let n = cmp::min(buf.len(), 64) as u8;\n\n semidap::info!(\"EP1IN: sending {} bytes\", n);\n\n\n\n USBD::borrow_unchecked(|usbd| {\n\n usbd.EPIN1_PTR.write(|w| w.PTR(buf.as_ptr() as u32));\n\n usbd.EPIN1_MAXCNT.write(|w| w.MAXCNT(n));\n\n crate::dma_start();\n\n usbd.TASKS_STARTEPIN1.write(|w| w.TASKS_STARTEPIN(1));\n\n });\n\n}\n\n\n\n/// # Safety\n\n/// This hands `buf` to the DMA. Caller must manually enforce that aliasing rules are respected\n\nunsafe fn start_epin2(buf: &mut [u8; 63]) {\n\n let n = TX_BUF.read(buf) as u8;\n\n if n != 0 {\n\n semidap::info!(\"EP2IN: sending {} bytes\", n);\n\n USBD::borrow_unchecked(|usbd| {\n\n usbd.EPIN2_PTR.write(|w| w.PTR(buf.as_ptr() as u32));\n\n usbd.EPIN2_MAXCNT.write(|w| w.MAXCNT(n));\n\n crate::dma_start();\n\n usbd.TASKS_STARTEPIN2.write(|w| w.TASKS_STARTEPIN(1));\n\n });\n\n EP2IN_STATE.store(Ep2InState::InUse);\n\n } else {\n\n EP2IN_STATE.store(Ep2InState::Idle);\n\n }\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 35, "score": 118712.29719029918 }, { "content": "fn continue_epin0(leftover: &mut u16) {\n\n USBD::borrow_unchecked(|usbd| {\n\n usbd.EPIN0_PTR\n\n .rmw(|r, w| w.PTR(r.PTR() + u32::from(MAX_PACKET_SIZE0)));\n\n\n\n let max_packet_size0 = u16::from(MAX_PACKET_SIZE0);\n\n if *leftover <= max_packet_size0 {\n\n let maxcnt = *leftover as u8;\n\n semidap::info!(\"EPIN0: sending last {}B of data\", maxcnt);\n\n short_ep0datadone_ep0status();\n\n usbd.EPIN0_MAXCNT.write(|w| w.MAXCNT(maxcnt));\n\n *leftover = 0;\n\n } else {\n\n semidap::info!(\"EPIN0: sending next {}B of data\", MAX_PACKET_SIZE0);\n\n *leftover -= max_packet_size0;\n\n }\n\n\n\n usbd.TASKS_STARTEPIN0.write(|w| w.TASKS_STARTEPIN(1));\n\n })\n\n}\n\n\n\n/// CDC ACM transmit (device to host) endpoint\n\npub struct Tx {\n\n _not_send_or_sync: NotSendOrSync,\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 36, "score": 118595.38280801853 }, { "content": "pub fn device(_device: &mut Device<'_>) {}\n", "file_path": "host/regen/src/opt.rs", "rank": 37, "score": 113627.0285326776 }, { "content": "pub fn bitwidth2ty(width: u8) -> TokenStream2 {\n\n if width <= 8 {\n\n quote!(u8)\n\n } else if width <= 16 {\n\n quote!(u16)\n\n } else if width <= 32 {\n\n quote!(u32)\n\n } else if width <= 64 {\n\n quote!(u64)\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "host/regen/src/codegen/util.rs", "rank": 38, "score": 112461.02527674116 }, { "content": "pub fn unsuffixed(val: u8) -> LitInt {\n\n LitInt::new(&val.to_string(), Span2::call_site())\n\n}\n\n\n", "file_path": "host/regen/src/codegen/util.rs", "rank": 39, "score": 112461.02527674116 }, { "content": "fn verify(s: syn::ItemStatic) -> parse::Result<Static> {\n\n let span = s.span();\n\n let mut attrs = s.attrs;\n\n\n\n let mut uninit = false;\n\n if let Some(pos) = attrs.iter().position(|attr| {\n\n attr.path\n\n .get_ident()\n\n .map(|id| id == \"uninit\")\n\n .unwrap_or(false)\n\n }) {\n\n let attr = &attrs[pos];\n\n let span = attr.span();\n\n\n\n if attr.tokens.to_string() == \"(unsafe)\" {\n\n attrs.remove(pos);\n\n uninit = true;\n\n } else {\n\n return Err(parse::Error::new(\n\n span,\n", "file_path": "host/tasks-macros/src/lib.rs", "rank": 40, "score": 105780.63589446773 }, { "content": "#[doc(hidden)]\n\npub fn log(stdout: &mut impl binWrite, level: Level) {\n\n extern \"Rust\" {\n\n fn __semidap_timestamp() -> u32;\n\n }\n\n let ts = unsafe { __semidap_timestamp() };\n\n stdout.write_byte(level as u8);\n\n stdout.leb128_write(ts);\n\n}\n\n\n\n/// Aborts the `semidap` process running on the host\n", "file_path": "firmware/semidap/src/lib.rs", "rank": 41, "score": 102681.84284281355 }, { "content": "fn backtrace(\n\n core: &mut Core,\n\n debug_frame: &DebugFrame<EndianSlice<LittleEndian>>,\n\n range_names: &[(Range<u64>, String)],\n\n lr: u32,\n\n mut pc: u32,\n\n sp: u32,\n\n) -> Result<(), anyhow::Error> {\n\n fn gimli2cortex(reg: &gimli::Register) -> cortex_m::Register {\n\n if reg.0 == 13 {\n\n Register::SP\n\n } else if reg.0 == 14 {\n\n Register::LR\n\n } else if reg.0 == 11 {\n\n Register::R11\n\n } else if reg.0 == 10 {\n\n Register::R10\n\n } else if reg.0 == 9 {\n\n Register::R9\n\n } else if reg.0 == 8 {\n", "file_path": "host/semiprobe/src/main.rs", "rank": 42, "score": 102208.41972698263 }, { "content": "fn backtrace(\n\n dap: &mut Dap,\n\n debug_frame: &DebugFrame<EndianSlice<LittleEndian>>,\n\n range_names: &[(Range<u64>, String)],\n\n lr: u32,\n\n mut pc: u32,\n\n sp: u32,\n\n) -> Result<(), anyhow::Error> {\n\n fn gimli2cortex(reg: &gimli::Register) -> cortex_m::Register {\n\n if reg.0 == 13 {\n\n Register::SP\n\n } else if reg.0 == 14 {\n\n Register::LR\n\n } else if reg.0 == 11 {\n\n Register::R11\n\n } else if reg.0 == 10 {\n\n Register::R10\n\n } else if reg.0 == 9 {\n\n Register::R9\n\n } else if reg.0 == 8 {\n", "file_path": "host/semidap/src/main.rs", "rank": 43, "score": 102208.41972698263 }, { "content": "// TODO gate each peripheral family (e.g. `UARTx`) behind a Cargo feature\n\nfn peripheral(peripheral: &Peripheral<'_>, rng: &mut XorShiftRng) -> TokenStream2 {\n\n let base_addr = match peripheral.instances {\n\n Instances::Single { base_address } => util::hex(base_address),\n\n _ => unimplemented!(),\n\n };\n\n\n\n let mut items = vec![];\n\n let mut field_decls = vec![];\n\n let mut field_exprs = vec![];\n\n\n\n for reg in &peripheral.registers {\n\n items.push(codegen::register(reg, rng));\n\n\n\n let doc = reg\n\n .description\n\n .as_ref()\n\n .map(|s| Cow::from(&**s))\n\n .unwrap_or_else(|| format!(\"{} register\", reg.name).into());\n\n let name = format_ident!(\"{}\", *reg.name);\n\n field_decls.push(quote!(\n", "file_path": "host/regen/src/codegen.rs", "rank": 44, "score": 101165.20096712136 }, { "content": "fn register(register: &Register<'_>, rng: &mut XorShiftRng) -> TokenStream2 {\n\n let name = format_ident!(\"{}\", *register.name);\n\n let mod_name = util::ident(&register.name.to_snake_case());\n\n\n\n let rty = util::width2ty(register.width);\n\n let mut mod_items = vec![];\n\n\n\n let mut rmethods = vec![];\n\n if register.access.can_read() {\n\n let mut chain = vec![];\n\n let methods = register\n\n .r_fields\n\n .iter()\n\n .map(|field| {\n\n let fty = util::bitwidth2ty(field.width);\n\n let field_name = format_ident!(\"{}\", *field.name);\n\n let offset = util::unsuffixed(field.offset);\n\n let mask = util::hex(field.mask());\n\n let doc = util::field_docs(&field);\n\n\n", "file_path": "host/regen/src/codegen.rs", "rank": 45, "score": 101165.20096712136 }, { "content": "fn count_args(footprint: &str, span: Span2) -> parse::Result<usize> {\n\n let mut chars = footprint.chars().peekable();\n\n\n\n let mut nargs = 0;\n\n while let Some(c) = chars.next() {\n\n if c == '{' {\n\n let next = chars.peek();\n\n\n\n if next == Some(&'}') {\n\n let _ = chars.next();\n\n\n\n nargs += 1;\n\n } else if next == Some(&'{') {\n\n // escaped brace\n\n let _ = chars.next();\n\n } else {\n\n return Err(parse::Error::new(\n\n span,\n\n \"unmatched `{`; use `{{` to escape it\",\n\n ));\n", "file_path": "shared/binfmt/macros/src/lib.rs", "rank": 46, "score": 100764.26997190279 }, { "content": "fn std_req(\n\n usb_state: &mut usb2::State,\n\n ep_state: &mut Ep0State,\n\n req: StandardRequest,\n\n) -> Result<(), ()> {\n\n match req {\n\n StandardRequest::GetDescriptor { descriptor, length } => {\n\n semidap::info!(\"GET_DESCRIPTOR [{}] ..\", length as u8);\n\n\n\n match descriptor {\n\n GetDescriptor::Device => {\n\n semidap::info!(\"GET_DESCRIPTOR Device\");\n\n\n\n start_epin0(\n\n DEVICE_DESC.get(..length.into()).unwrap_or(&DEVICE_DESC),\n\n ep_state,\n\n );\n\n }\n\n\n\n GetDescriptor::DeviceQualifier => {\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 47, "score": 100743.36985607998 }, { "content": "// if the target device is halted it is because it performed a system call using\n\n// the BKPT instruction. The immediate value passed to the BKPT instruction will\n\n// tell us which system call to service. All system calls are 'diverging' from\n\n// the point of view of the device; system calls also terminate this `semidap`\n\n// instance\n\nfn handle_syscall(\n\n dap: &mut Dap,\n\n debug_frame: &DebugFrame<EndianSlice<LittleEndian>>,\n\n range_names: &[(Range<u64>, String)],\n\n) -> Result<i32, anyhow::Error> {\n\n const SYS_ABORT: u16 = 0xbeaa; // BKPT 0xAA\n\n const SYS_EXCEPTION: u16 = 0xbeff; // BKPT 0xFF\n\n const SYS_EXIT: u16 = 0xbeab; // BKPT 0xAB\n\n\n\n let pc = dap.read_core_register(cortex_m::Register::PC)?;\n\n let insn = dap.memory_read::<u16>(pc, 1)?[0];\n\n\n\n match insn {\n\n SYS_EXIT => {\n\n let r0 = dap.read_core_register(cortex_m::Register::R0)?;\n\n Ok(r0 as i32)\n\n }\n\n\n\n SYS_EXCEPTION => handle_exception(dap, debug_frame, range_names),\n\n\n", "file_path": "host/semidap/src/main.rs", "rank": 48, "score": 99899.59113165017 }, { "content": "fn handle_syscall(\n\n core: &mut Core,\n\n debug_frame: &DebugFrame<EndianSlice<LittleEndian>>,\n\n range_names: &[(Range<u64>, String)],\n\n) -> Result<i32, anyhow::Error> {\n\n const SYS_ABORT: u16 = 0xbeaa; // BKPT 0xAA\n\n const SYS_EXCEPTION: u16 = 0xbeff; // BKPT 0xFF\n\n const SYS_EXIT: u16 = 0xbeab; // BKPT 0xAB\n\n\n\n let pc = core.read_core_reg(cortex_m::Register::PC as u16)?;\n\n let insn = u16::from(core.read_word_8(pc)?) | u16::from(core.read_word_8(pc + 1)?) << 8;\n\n\n\n match insn {\n\n SYS_EXIT => {\n\n let r0 = core.read_core_reg(cortex_m::Register::R0 as u16)?;\n\n Ok(r0 as i32)\n\n }\n\n\n\n SYS_EXCEPTION => Ok(1),\n\n\n", "file_path": "host/semiprobe/src/main.rs", "rank": 49, "score": 99891.1426489331 }, { "content": "fn handle_exception(\n\n dap: &mut Dap,\n\n debug_frame: &DebugFrame<EndianSlice<LittleEndian>>,\n\n range_names: &[(Range<u64>, String)],\n\n) -> Result<i32, anyhow::Error> {\n\n use cortex_m::Register;\n\n\n\n fn read_register(dap: &mut Dap, reg: Register) -> Result<(Register, u32), anyhow::Error> {\n\n let val = dap.read_core_register(reg)?;\n\n Ok((reg, val))\n\n }\n\n\n\n const SCB_ICSR: u32 = 0xE000_ED04;\n\n\n\n let icsr = dap.memory_read_word(SCB_ICSR)?;\n\n let vectactive = icsr as u8;\n\n\n\n if vectactive == 0 {\n\n println!(\"error: SYS_EXCEPTION called from thread mode\");\n\n return Ok(1);\n", "file_path": "host/semidap/src/main.rs", "rank": 50, "score": 99891.1426489331 }, { "content": "fn write_(input: Input, newline: bool, tag: bool) -> parse::Result<TokenStream> {\n\n let mut footprint = input.footprint.value();\n\n\n\n let span = input.footprint.span();\n\n if footprint.contains('@') {\n\n return Err(parse::Error::new(span, \"`@` character is not allowed\"));\n\n }\n\n\n\n if newline {\n\n footprint.push('\\n');\n\n }\n\n\n\n let fargs = count_args(&footprint, span)?;\n\n let iargs = input.args.len();\n\n\n\n if fargs != iargs {\n\n return Err(parse::Error::new(\n\n span,\n\n &format!(\n\n \"supplied args (n={}) don't match footprint args (n={})\",\n", "file_path": "shared/binfmt/macros/src/lib.rs", "rank": 51, "score": 94608.54869656573 }, { "content": " pub bmAttributes: bmAttributes,\n\n /// Maximum power (1 ULP = 2 mA)\n\n pub bMaxPower: u8,\n\n}\n\n\n\nimpl Desc {\n\n /// The size of this descriptor on the wire\n\n pub const SIZE: u8 = 9;\n\n\n\n /// Returns the wire representation of this descriptor\n\n pub fn bytes(&self) -> [u8; Self::SIZE as usize] {\n\n [\n\n Self::SIZE,\n\n DescriptorType::CONFIGURATION as u8,\n\n self.wTotalLength as u8,\n\n (self.wTotalLength >> 8) as u8,\n\n self.bNumInterfaces,\n\n self.bConfigurationValue,\n\n self.iConfiguration,\n\n (1 << 7)\n", "file_path": "shared/usb2/src/config.rs", "rank": 52, "score": 82117.5951097182 }, { "content": "//! Configuration descriptor\n\n\n\nuse crate::DescriptorType;\n\n\n\n/// Configuration Descriptor\n\n#[allow(non_snake_case)]\n\n#[derive(Clone, Copy)]\n\npub struct Desc {\n\n // pub blength: u8,\n\n // pub bDescriptorType: u8,\n\n /// The total length of this configuration descriptor plus the descriptors (interface, endpoint,\n\n /// etc.) below it\n\n pub wTotalLength: u16,\n\n /// Number of interfaces associated to this configuration\n\n pub bNumInterfaces: u8,\n\n /// Configuration value\n\n pub bConfigurationValue: u8,\n\n /// Configuration string index\n\n pub iConfiguration: u8,\n\n /// Attributes\n", "file_path": "shared/usb2/src/config.rs", "rank": 53, "score": 82116.86547394398 }, { "content": " | if self.bmAttributes.self_powered {\n\n 1 << 6\n\n } else {\n\n 0\n\n }\n\n | if self.bmAttributes.remote_wakeup {\n\n 1 << 5\n\n } else {\n\n 0\n\n },\n\n self.bMaxPower,\n\n ]\n\n }\n\n}\n\n\n\n/// Attributes\n\n#[allow(non_camel_case_types)]\n\n#[derive(Clone, Copy)]\n\npub struct bmAttributes {\n\n /// Self-powered?\n\n pub self_powered: bool,\n\n /// Remote wakeup\n\n pub remote_wakeup: bool,\n\n}\n", "file_path": "shared/usb2/src/config.rs", "rank": 54, "score": 82106.83215310273 }, { "content": " pub fn bytes(&self) -> [u8; Self::SIZE as usize] {\n\n [\n\n Self::SIZE,\n\n super::CS_INTERFACE,\n\n super::SUBTYPE_UNION,\n\n self.bControlInterface,\n\n self.bSubordinateInterface0,\n\n ]\n\n }\n\n}\n", "file_path": "shared/usb2/src/cdc/union.rs", "rank": 55, "score": 79608.20982153525 }, { "content": "\n\n /// Returns the wire representation of this device endpoint\n\n pub fn bytes(&self) -> [u8; Self::SIZE as usize] {\n\n [\n\n Self::SIZE,\n\n super::CS_INTERFACE,\n\n super::SUBTYPE_HEADER,\n\n self.bcdCDC as u16 as u8,\n\n (self.bcdCDC as u16 >> 8) as u8,\n\n ]\n\n }\n\n}\n", "file_path": "shared/usb2/src/cdc/header.rs", "rank": 56, "score": 79607.2080012184 }, { "content": "//! Union Interface functional descriptor\n\n\n\n/// Union Interface functional descriptor\n\n#[allow(non_snake_case)]\n\n#[derive(Clone, Copy)]\n\npub struct Desc {\n\n // bFunctionLength: u8,\n\n // bDescriptorType: u8,\n\n // bDescriptorSubtype: u8,\n\n /// Controlling interface\n\n pub bControlInterface: u8,\n\n /// Subordinate interface\n\n pub bSubordinateInterface0: u8,\n\n}\n\n\n\nimpl Desc {\n\n /// Size of this descriptor on the wire\n\n pub const SIZE: u8 = 5;\n\n\n\n /// Returns the wire representation of this device endpoint\n", "file_path": "shared/usb2/src/cdc/union.rs", "rank": 57, "score": 79605.5241780465 }, { "content": " /// Device can send/receive call management information over a Data Class interface\n\n pub data_class: bool,\n\n}\n\n\n\nimpl Capabilities {\n\n fn byte(&self) -> u8 {\n\n let mut byte = 0;\n\n if self.call_management {\n\n byte |= 1 << 0;\n\n }\n\n if self.data_class {\n\n byte |= 1 << 1;\n\n }\n\n byte\n\n }\n\n}\n\n\n\nimpl Desc {\n\n /// Size of this descriptor on the wire\n\n pub const SIZE: u8 = 5;\n", "file_path": "shared/usb2/src/cdc/call.rs", "rank": 58, "score": 79600.23596652766 }, { "content": "\n\n /// Returns the wire representation of this device endpoint\n\n pub fn bytes(&self) -> [u8; Self::SIZE as usize] {\n\n [\n\n Self::SIZE,\n\n super::CS_INTERFACE,\n\n super::SUBTYPE_CALL,\n\n self.bmCapabilities.byte(),\n\n self.bDataInterface,\n\n ]\n\n }\n\n}\n", "file_path": "shared/usb2/src/cdc/call.rs", "rank": 59, "score": 79599.65808710513 }, { "content": "//! Header functional descriptor\n\n\n\n/// Header functional descriptor\n\n#[allow(non_snake_case)]\n\npub struct Desc {\n\n /// Communications Devices Specification release number\n\n pub bcdCDC: bcdCDC,\n\n}\n\n\n\n/// Communications Devices specification release number\n\n#[allow(non_camel_case_types)]\n\n#[derive(Clone, Copy)]\n\npub enum bcdCDC {\n\n /// 1.10\n\n V11 = 0x01_10,\n\n}\n\n\n\nimpl Desc {\n\n /// The size of this descriptor on the wire\n\n pub const SIZE: u8 = 5;\n", "file_path": "shared/usb2/src/cdc/header.rs", "rank": 60, "score": 79598.67277765261 }, { "content": "//! Call Management functional descriptor\n\n\n\n/// Call Management functional descriptor\n\n#[allow(non_snake_case)]\n\n#[derive(Clone, Copy)]\n\npub struct Desc {\n\n // bFunctionLength: u8,\n\n // bDescriptorType: u8,\n\n // bDescriptorSubtype: u8,\n\n /// Capabilities\n\n pub bmCapabilities: Capabilities,\n\n /// Interface of the Data Class interface\n\n pub bDataInterface: u8,\n\n}\n\n\n\n/// Capabilities\n\n#[derive(Clone, Copy)]\n\npub struct Capabilities {\n\n /// Device handles call management itself\n\n pub call_management: bool,\n", "file_path": "shared/usb2/src/cdc/call.rs", "rank": 61, "score": 79596.74151691694 }, { "content": " }\n\n byte\n\n }\n\n}\n\n\n\nimpl Desc {\n\n /// Size of this descriptor on the wire\n\n pub const SIZE: u8 = 4;\n\n\n\n /// Returns the wire representation of this device endpoint\n\n pub fn bytes(&self) -> [u8; Self::SIZE as usize] {\n\n [\n\n Self::SIZE,\n\n super::CS_INTERFACE,\n\n super::SUBTYPE_ACM,\n\n self.bmCapabilities.byte(),\n\n ]\n\n }\n\n}\n", "file_path": "shared/usb2/src/cdc/acm.rs", "rank": 62, "score": 79574.30500846136 }, { "content": " /// Device supports `Send_Break`\n\n pub send_break: bool,\n\n /// Device supports `Network_Connection`\n\n pub network_connection: bool,\n\n}\n\n\n\nimpl Capabilities {\n\n fn byte(&self) -> u8 {\n\n let mut byte = 0;\n\n if self.comm_features {\n\n byte |= 1 << 0;\n\n }\n\n if self.line_serial {\n\n byte |= 1 << 1;\n\n }\n\n if self.send_break {\n\n byte |= 1 << 2;\n\n }\n\n if self.network_connection {\n\n byte |= 1 << 3;\n", "file_path": "shared/usb2/src/cdc/acm.rs", "rank": 63, "score": 79573.42548153595 }, { "content": "//! Abstract Control Management functional descriptor\n\n\n\n/// Abstract Control Management functional descriptor\n\n#[allow(non_snake_case)]\n\n#[derive(Clone, Copy)]\n\npub struct Desc {\n\n // bFunctionLength: u8,\n\n // bDescriptorType: u8,\n\n // bDescriptorSubtype: u8,\n\n /// Capabilities\n\n pub bmCapabilities: Capabilities,\n\n}\n\n\n\n/// Capabilities\n\n#[derive(Clone, Copy)]\n\npub struct Capabilities {\n\n /// Device supports `{Set,Clear,Get}_Comm_Feature`\n\n pub comm_features: bool,\n\n /// Device supports `{Set,Get}_Line_Coding`, `Set_Control_Line_State` and `Serial_State`\n\n pub line_serial: bool,\n", "file_path": "shared/usb2/src/cdc/acm.rs", "rank": 64, "score": 79566.15851075122 }, { "content": "use core::time::Duration;\n\nuse std::{\n\n io::{self, Read as _, Write as _},\n\n thread,\n\n};\n\n\n\nuse anyhow::anyhow;\n\nuse serialport::SerialPortType;\n\n\n", "file_path": "host/acm-cat/src/main.rs", "rank": 65, "score": 78998.3019916416 }, { "content": "fn suspend() {\n\n semidap::info!(\"entering low power mode\");\n\n USBD::borrow_unchecked(|usbd| usbd.LOWPOWER.write(|w| w.LOWPOWER(1)))\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 66, "score": 63131.829806860536 }, { "content": "#[allow(dead_code)]\n\nfn unreachable() -> ! {\n\n disconnect();\n\n semidap::panic!(\"unreachable\")\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 67, "score": 63131.829806860536 }, { "content": "fn resume() {\n\n semidap::info!(\"leaving low power mode\");\n\n USBD::borrow_unchecked(|usbd| usbd.LOWPOWER.zero())\n\n}\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 68, "score": 63131.829806860536 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn STARTEPOUT1() {\n\n USBD::borrow_unchecked(|usbd| usbd.TASKS_STARTEPOUT1.write(|w| w.TASKS_STARTEPOUT(1)));\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 69, "score": 63131.829806860536 }, { "content": "// NOTE(borrow_unchecked) all these are either single instruction reads w/o side effects or single\n\n// instruction writes to registers that won't be RMW-ed\n\nfn connect() {\n\n USBD::borrow_unchecked(|usbd| usbd.USBPULLUP.write(|w| w.CONNECT(1)));\n\n semidap::info!(\"pulled D+ up\");\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 70, "score": 63131.829806860536 }, { "content": "// simulate a disconnect so the host doesn't retry enumeration while the device is halted\n\nfn disconnect() {\n\n USBD::borrow_unchecked(|usbd| usbd.USBPULLUP.zero());\n\n semidap::info!(\"detached from the bus\");\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 71, "score": 63131.829806860536 }, { "content": "fn ep0status() {\n\n USBD::borrow_unchecked(|usbd| {\n\n usbd.TASKS_EP0STATUS.write(|w| w.TASKS_EP0STATUS(1));\n\n });\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 72, "score": 63131.829806860536 }, { "content": "fn todo() -> ! {\n\n semidap::panic!(\"unimplemented\")\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 73, "score": 63131.829806860536 }, { "content": "fn unreachable() -> ! {\n\n semidap::panic!(\"unreachable\")\n\n}\n", "file_path": "firmware/hal/src/radio.rs", "rank": 74, "score": 63131.829806860536 }, { "content": "#[allow(non_snake_case)]\n\nfn EP0STALL() {\n\n USBD::borrow_unchecked(|usbd| usbd.TASKS_EP0STALL.write(|w| w.TASKS_EP0STALL(1)));\n\n semidap::info!(\"EP0: stalled\");\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 75, "score": 63131.829806860536 }, { "content": "fn todo() -> ! {\n\n disconnect();\n\n semidap::panic!(\"unimplemented\")\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 76, "score": 63131.829806860536 }, { "content": "#[allow(non_snake_case)]\n\nfn TASKS_CCASTART() {\n\n RADIO::borrow_unchecked(|radio| radio.TASKS_CCASTART.write(|w| w.TASKS_CCASTART(1)))\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 77, "score": 61979.680012959194 }, { "content": "#[allow(non_snake_case)]\n\nfn INTENCLR_FRAMESTART() {\n\n RADIO::borrow_unchecked(|radio| radio.INTENCLR.write(|w| w.FRAMESTART(1)));\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 78, "score": 61979.680012959194 }, { "content": "// NOTE must be followed by a volatile STORE operation\n\nfn dma_start() {\n\n sync::atomic::compiler_fence(Ordering::Release)\n\n}\n\n\n", "file_path": "firmware/hal/src/lib.rs", "rank": 79, "score": 61979.680012959194 }, { "content": "#[allow(non_snake_case)]\n\nfn TASKS_STOP() {\n\n RADIO::borrow_unchecked(|radio| radio.TASKS_STOP.write(|w| w.TASKS_STOP(1)))\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 80, "score": 61979.680012959194 }, { "content": "#[allow(non_snake_case)]\n\nfn TASKS_START() {\n\n RADIO::borrow_unchecked(|radio| radio.TASKS_START.write(|w| w.TASKS_START(1)))\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 81, "score": 61979.680012959194 }, { "content": "#[allow(non_snake_case)]\n\nfn TASKS_RXEN() {\n\n RADIO::borrow_unchecked(|radio| radio.TASKS_RXEN.write(|w| w.TASKS_RXEN(1)))\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 82, "score": 61979.680012959194 }, { "content": "struct Fn {\n\n locals: Vec<Static>,\n\n name: Ident2,\n\n output: ReturnType,\n\n stmts: Vec<Stmt>,\n\n}\n\n\n", "file_path": "host/tasks-macros/src/lib.rs", "rank": 83, "score": 61979.680012959194 }, { "content": "// NOTE must be preced by a volatile LOAD operation\n\nfn dma_end() {\n\n sync::atomic::compiler_fence(Ordering::Acquire)\n\n}\n\n\n\n#[allow(dead_code)]\n\nasync fn poll_fn<T, F>(f: F) -> T\n\nwhere\n\n F: FnMut() -> Poll<T> + Unpin,\n\n{\n\n struct PollFn<F> {\n\n f: F,\n\n }\n\n\n\n impl<T, F> Future for PollFn<F>\n\n where\n\n F: FnMut() -> Poll<T> + Unpin,\n\n {\n\n type Output = T;\n\n\n\n fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<T> {\n", "file_path": "firmware/hal/src/lib.rs", "rank": 84, "score": 61979.680012959194 }, { "content": "fn unshort_ep0datadone_ep0status() {\n\n USBD::borrow_unchecked(|usbd| {\n\n usbd.SHORTS.rmw(|_, w| w.EP0DATADONE_EP0STATUS(0));\n\n });\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 85, "score": 60894.24711155456 }, { "content": "fn short_ep0datadone_ep0status() {\n\n USBD::borrow_unchecked(|usbd| {\n\n usbd.SHORTS.rmw(|_, w| w.EP0DATADONE_EP0STATUS(1));\n\n });\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 86, "score": 60894.24711155456 }, { "content": "#[allow(non_snake_case)]\n\nfn RXCRC() -> u32 {\n\n RADIO::borrow_unchecked(|radio| radio.RXCRC.read().RXCRC())\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 87, "score": 60410.173790191926 }, { "content": "#[allow(non_snake_case)]\n\nfn WLENGTH() -> u16 {\n\n USBD::borrow_unchecked(|usbd| {\n\n u16::from(usbd.WLENGTHL.read().bits()) | (u16::from(usbd.WLENGTHH.read().bits()) << 8)\n\n })\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 88, "score": 60410.173790191926 }, { "content": "#[allow(non_snake_case)]\n\nfn WINDEX() -> u16 {\n\n USBD::borrow_unchecked(|usbd| {\n\n u16::from(usbd.WINDEXL.read().bits()) | (u16::from(usbd.WINDEXH.read().bits()) << 8)\n\n })\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 89, "score": 60410.173790191926 }, { "content": "/// Wait For Event\n\npub fn wfe() {\n\n extern \"C\" {\n\n fn __wfe();\n\n }\n\n unsafe { __wfe() }\n\n}\n\n\n", "file_path": "firmware/asm/src/lib.rs", "rank": 90, "score": 60410.173790191926 }, { "content": "#[allow(non_snake_case)]\n\nfn STATE() -> State {\n\n RADIO::borrow_unchecked(|radio| {\n\n let bits = radio.STATE.read().bits();\n\n let state = unsafe { mem::transmute(bits) };\n\n semidap::debug!(\"State::{}\", state);\n\n state\n\n })\n\n}\n\n\n", "file_path": "firmware/hal/src/radio.rs", "rank": 91, "score": 60410.173790191926 }, { "content": "fn register_<'a>(\n\n r: &'a svd::Register,\n\n ci: Option<&svd::ClusterInfo>,\n\n defaults: &[&svd::RegisterProperties],\n\n ir_regs: &mut Vec<ir::Register<'a>>,\n\n) {\n\n match r {\n\n svd::Register::Single(ri) => {\n\n ir_regs.push(translate::register(ri, ci, defaults));\n\n }\n\n\n\n svd::Register::Array(ri, dim) => {\n\n assert!(dim.dim_index.is_none(), \"unimplemented\");\n\n assert!(ri.name.contains(\"[%s]\"), \"unimplemented\");\n\n\n\n let template = &ri.name;\n\n let offset = ri.address_offset;\n\n\n\n for i in 0..dim.dim {\n\n // FIXME too lazy to do ownership correctly right now\n", "file_path": "host/regen/src/translate/svd.rs", "rank": 92, "score": 60410.173790191926 }, { "content": "/// Send EVent\n\npub fn sev() {\n\n #[cfg(target_arch = \"arm\")]\n\n extern \"C\" {\n\n fn __sev();\n\n }\n\n #[cfg(target_arch = \"arm\")]\n\n unsafe {\n\n __sev()\n\n }\n\n}\n\n\n", "file_path": "firmware/asm/src/lib.rs", "rank": 93, "score": 60410.173790191926 }, { "content": "/// Wait For Interrupt\n\npub fn wfi() {\n\n extern \"C\" {\n\n fn __wfi();\n\n }\n\n unsafe { __wfi() }\n\n}\n", "file_path": "firmware/asm/src/lib.rs", "rank": 94, "score": 60410.173790191926 }, { "content": "#[inline(always)]\n\npub fn abort() -> ! {\n\n extern \"C\" {\n\n fn __abort() -> !;\n\n }\n\n unsafe { __abort() }\n\n}\n\n\n\n/// Exits the `semidap` process running on the host with the specified exit code\n", "file_path": "firmware/semidap/src/lib.rs", "rank": 95, "score": 60410.173790191926 }, { "content": "#[allow(non_snake_case)]\n\nfn WVALUE() -> u16 {\n\n USBD::borrow_unchecked(|usbd| {\n\n u16::from(usbd.WVALUEL.read().bits()) | (u16::from(usbd.WVALUEH.read().bits()) << 8)\n\n })\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 96, "score": 60410.173790191926 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn EPIN1_PTR() -> u32 {\n\n USBD::borrow_unchecked(|usbd| usbd.EPIN1_PTR.read().bits())\n\n}\n\n\n", "file_path": "firmware/hal/src/usbd.rs", "rank": 97, "score": 59324.740888787295 }, { "content": "/// Masks interrupts\n\npub fn disable_irq() {\n\n extern \"C\" {\n\n fn __cpsidi();\n\n }\n\n unsafe { __cpsidi() }\n\n}\n\n\n", "file_path": "firmware/asm/src/lib.rs", "rank": 98, "score": 59324.740888787295 }, { "content": "/// Unmasks interrupts\n\npub fn enable_irq() {\n\n extern \"C\" {\n\n fn __cpsiei();\n\n }\n\n unsafe { __cpsiei() }\n\n}\n\n\n", "file_path": "firmware/asm/src/lib.rs", "rank": 99, "score": 59324.740888787295 } ]
Rust
examples/main.rs
grafana/grafana-plugin-sdk-rust
707d2b65b0c0be0d43d80a7b7778384d12f9d3c7
use std::{ sync::{ atomic::{AtomicUsize, Ordering}, Arc, }, time::Duration, }; use bytes::Bytes; use chrono::prelude::*; use futures_util::stream::FuturesOrdered; use http::Response; use thiserror::Error; use tokio_stream::StreamExt; use tracing::{debug, info}; use grafana_plugin_sdk::{backend, data, prelude::*}; #[derive(Clone, Debug, Default)] struct MyPluginService(Arc<AtomicUsize>); impl MyPluginService { fn new() -> Self { Self(Arc::new(AtomicUsize::new(0))) } } #[derive(Debug, Error)] #[error("Error querying backend for query {ref_id}: {source}")] struct QueryError { source: data::Error, ref_id: String, } impl backend::DataQueryError for QueryError { fn ref_id(self) -> String { self.ref_id } } #[tonic::async_trait] impl backend::DataService for MyPluginService { type QueryError = QueryError; type Stream = backend::BoxDataResponseStream<Self::QueryError>; async fn query_data(&self, request: backend::QueryDataRequest) -> Self::Stream { Box::pin( request .queries .into_iter() .map(|x| async { Ok(backend::DataResponse::new( x.ref_id.clone(), vec![[ [ Utc.ymd(2021, 1, 1).and_hms(12, 0, 0), Utc.ymd(2021, 1, 1).and_hms(12, 0, 1), Utc.ymd(2021, 1, 1).and_hms(12, 0, 2), ] .into_field("time"), [1_u32, 2, 3].into_field("x"), ["a", "b", "c"].into_field("y"), ] .into_frame("foo") .check() .map_err(|source| QueryError { ref_id: x.ref_id, source, })?], )) }) .collect::<FuturesOrdered<_>>(), ) } } #[derive(Debug, Error)] #[error("Error streaming data")] enum StreamError { #[error("Error converting frame: {0}")] Conversion(#[from] backend::ConvertToError), #[error("Invalid frame returned: {0}")] InvalidFrame(#[from] data::Error), } #[tonic::async_trait] impl backend::StreamService for MyPluginService { type JsonValue = (); async fn subscribe_stream( &self, request: backend::SubscribeStreamRequest, ) -> Result<backend::SubscribeStreamResponse, Self::Error> { let response = if request.path.as_str() == "stream" { backend::SubscribeStreamResponse::ok(None) } else { backend::SubscribeStreamResponse::not_found() }; info!(path = %request.path, "Subscribing to stream"); Ok(response) } type Error = StreamError; type Stream = backend::BoxRunStream<Self::Error>; async fn run_stream( &self, _request: backend::RunStreamRequest, ) -> Result<Self::Stream, Self::Error> { info!("Running stream"); let mut x = 0u32; let n = 3; let mut frame = data::Frame::new("foo").with_field((x..x + n).into_field("x")); Ok(Box::pin( async_stream::try_stream! { loop { frame.fields_mut()[0].set_values( (x..x+n) )?; let packet = backend::StreamPacket::from_frame(frame.check()?)?; debug!("Yielding frame from {} to {}", x, x+n); yield packet; x += n; } } .throttle(Duration::from_secs(1)), )) } async fn publish_stream( &self, _request: backend::PublishStreamRequest, ) -> Result<backend::PublishStreamResponse, Self::Error> { info!("Publishing to stream"); todo!() } } #[derive(Debug, Error)] enum ResourceError { #[error("HTTP error: {0}")] Http(#[from] http::Error), #[error("Not found")] NotFound, } impl backend::ErrIntoHttpResponse for ResourceError { fn into_http_response(self) -> Result<http::Response<Bytes>, Box<dyn std::error::Error>> { let status = match &self { Self::Http(_) => http::StatusCode::INTERNAL_SERVER_ERROR, Self::NotFound => http::StatusCode::NOT_FOUND, }; Ok(Response::builder() .status(status) .header(http::header::CONTENT_TYPE, "application/json") .body(Bytes::from(serde_json::to_vec( &serde_json::json!({"error": self.to_string()}), )?))?) } } #[tonic::async_trait] impl backend::ResourceService for MyPluginService { type Error = ResourceError; type InitialResponse = http::Response<Bytes>; type Stream = backend::BoxResourceStream<Self::Error>; async fn call_resource( &self, r: backend::CallResourceRequest, ) -> Result<(Self::InitialResponse, Self::Stream), Self::Error> { let count = Arc::clone(&self.0); let response_and_stream = match r.request.uri().path() { "/echo" => Ok(( Response::new(r.request.into_body()), Box::pin(futures::stream::empty()) as Self::Stream, )), "/count" => Ok(( Response::new( count .fetch_add(1, Ordering::SeqCst) .to_string() .into_bytes() .into(), ), Box::pin(async_stream::try_stream! { loop { let body = count .fetch_add(1, Ordering::SeqCst) .to_string() .into_bytes() .into(); yield body; } }) as Self::Stream, )), _ => return Err(ResourceError::NotFound), }; response_and_stream } } #[grafana_plugin_sdk::main( services(data, resource, stream), init_subscriber = true, shutdown_handler = "0.0.0.0:10001" )] async fn plugin() -> MyPluginService { MyPluginService::new() }
use std::{ sync::{ atomic::{AtomicUsize, Ordering}, Arc, }, time::Duration, }; use bytes::Bytes; use chrono::prelude::*; use futures_util::stream::FuturesOrdered; use http::Response; use thiserror::Error; use tokio_stream::StreamExt; use tracing::{debug, info}; use grafana_plugin_sdk::{backend, data, prelude::*}; #[derive(Clone, Debug, Default)] struct MyPluginService(Arc<AtomicUsize>); impl MyPluginService { fn new() -> Self { Self(Arc::new(AtomicUsize::new(0))) } } #[derive(Debug, Error)] #[error("Error querying backend for query {ref_id}: {source}")] struct QueryError { source: data::Error, ref_id: String, } impl backend::DataQueryError for QueryError { fn ref_id(self) -> String { self.ref_id } } #[tonic::async_trait] impl backend::DataService for MyPluginService { type QueryError = QueryError; type Stream = backend::BoxDataResponseStream<Self::QueryError>; async fn query_data(&self, request: backend::QueryDataRequest) -> Self::Stream { Box::pin( request .queries .into_iter() .map(|x| async { Ok(backend::DataResponse::new( x.ref_id.clone(), vec![[ [ Utc.ymd(2021, 1, 1).and_hms(12, 0, 0), Utc.ymd(2021, 1, 1).and_hms(12, 0, 1), Utc.ymd(2021, 1, 1).and_hms(12, 0, 2), ] .into_field("time"), [1_u32, 2, 3].into_field("x"), ["a", "b", "c"].into_field("y"), ] .into_frame("foo") .check() .map_err(|source| QueryError { ref_id: x.ref_id, source, })?], )) }) .collect::<FuturesOrdered<_>>(), ) } } #[derive(Debug, Error)] #[error("Error streaming data")] enum StreamError { #[error("Error converting frame: {0}")] Conversion(#[from] backend::ConvertToError), #[error("Invalid frame returned: {0}")] InvalidFrame(#[from] data::Error), } #[tonic::async_trait] impl backend::StreamService for MyPluginService { type JsonValue = (); async fn subscribe_stream( &self, request: backend::SubscribeStreamRequest, ) -> Result<backend::SubscribeStreamResponse, Self::Error> { let response = if request.path.as_str() == "stream" { backend::SubscribeStreamResponse::ok(None) } else { backend::SubscribeStreamResponse::not_found() }; info!(path = %request.path, "Subscribing to stream"); Ok(response) } type Error = StreamError; type Stream = backend::BoxRunStream<Self::Error>; async fn run_stream( &self, _request: backend::RunStreamRequest, ) -> Result<Self::Stream, Self::Error> { info!("Running stream"); let mut x = 0u32; let n = 3; let mut frame = da
async fn publish_stream( &self, _request: backend::PublishStreamRequest, ) -> Result<backend::PublishStreamResponse, Self::Error> { info!("Publishing to stream"); todo!() } } #[derive(Debug, Error)] enum ResourceError { #[error("HTTP error: {0}")] Http(#[from] http::Error), #[error("Not found")] NotFound, } impl backend::ErrIntoHttpResponse for ResourceError { fn into_http_response(self) -> Result<http::Response<Bytes>, Box<dyn std::error::Error>> { let status = match &self { Self::Http(_) => http::StatusCode::INTERNAL_SERVER_ERROR, Self::NotFound => http::StatusCode::NOT_FOUND, }; Ok(Response::builder() .status(status) .header(http::header::CONTENT_TYPE, "application/json") .body(Bytes::from(serde_json::to_vec( &serde_json::json!({"error": self.to_string()}), )?))?) } } #[tonic::async_trait] impl backend::ResourceService for MyPluginService { type Error = ResourceError; type InitialResponse = http::Response<Bytes>; type Stream = backend::BoxResourceStream<Self::Error>; async fn call_resource( &self, r: backend::CallResourceRequest, ) -> Result<(Self::InitialResponse, Self::Stream), Self::Error> { let count = Arc::clone(&self.0); let response_and_stream = match r.request.uri().path() { "/echo" => Ok(( Response::new(r.request.into_body()), Box::pin(futures::stream::empty()) as Self::Stream, )), "/count" => Ok(( Response::new( count .fetch_add(1, Ordering::SeqCst) .to_string() .into_bytes() .into(), ), Box::pin(async_stream::try_stream! { loop { let body = count .fetch_add(1, Ordering::SeqCst) .to_string() .into_bytes() .into(); yield body; } }) as Self::Stream, )), _ => return Err(ResourceError::NotFound), }; response_and_stream } } #[grafana_plugin_sdk::main( services(data, resource, stream), init_subscriber = true, shutdown_handler = "0.0.0.0:10001" )] async fn plugin() -> MyPluginService { MyPluginService::new() }
ta::Frame::new("foo").with_field((x..x + n).into_field("x")); Ok(Box::pin( async_stream::try_stream! { loop { frame.fields_mut()[0].set_values( (x..x+n) )?; let packet = backend::StreamPacket::from_frame(frame.check()?)?; debug!("Yielding frame from {} to {}", x, x+n); yield packet; x += n; } } .throttle(Duration::from_secs(1)), )) }
function_block-function_prefixed
[ { "content": "/// Error supertrait used in [`DataService::query_data`].\n\npub trait DataQueryError: std::error::Error {\n\n /// Return the `ref_id` of the incoming query to which this error corresponds.\n\n ///\n\n /// This allows the SDK to align queries up with any failed requests.\n\n fn ref_id(self) -> String;\n\n}\n\n\n\n/// Used to respond for requests for data from datasources and app plugins.\n\n///\n\n/// Datasource plugins will usually want to implement this trait to perform the\n\n/// bulk of their processing.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use futures_util::stream::FuturesOrdered;\n\n/// use grafana_plugin_sdk::{backend, data, prelude::*};\n\n/// use thiserror::Error;\n\n///\n\n/// struct MyPlugin;\n", "file_path": "src/backend/data.rs", "rank": 0, "score": 190401.9789481971 }, { "content": "#[derive(Debug, Error)]\n\n#[error(\"Error querying backend for query {ref_id}: {source}\")]\n\nstruct QueryError {\n\n source: data::Error,\n\n ref_id: String,\n\n}\n\n\n\nimpl backend::DataQueryError for QueryError {\n\n fn ref_id(self) -> String {\n\n self.ref_id\n\n }\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl backend::DataService for MyPlugin {\n\n\n\n /// The type of error that could be returned by an individual query.\n\n type QueryError = QueryError;\n\n\n\n /// The type of iterator we're returning.\n\n ///\n\n /// In general the concrete type will be impossible to name in advance,\n", "file_path": "src/backend/mod.rs", "rank": 1, "score": 175173.18570870493 }, { "content": "type ConvertFromResult<T> = std::result::Result<T, ConvertFromError>;\n\n\n\npub(self) fn read_json(jdoc: &[u8]) -> ConvertFromResult<Value> {\n\n // Grafana sometimes sends an empty string instead of an empty map, probably\n\n // because of some zero value Golang stuff?\n\n (!jdoc.is_empty())\n\n .then(|| {\n\n serde_json::from_slice(jdoc).map_err(|err| ConvertFromError::InvalidJson {\n\n err,\n\n json: String::from_utf8(jdoc.to_vec()).unwrap_or_else(|_| {\n\n format!(\"non-utf8 string: {}\", String::from_utf8_lossy(jdoc))\n\n }),\n\n })\n\n })\n\n .unwrap_or_else(|| Ok(serde_json::json!({})))\n\n}\n\n\n\n/// The time range for a query.\n\n#[derive(Clone, Debug)]\n\n#[non_exhaustive]\n", "file_path": "src/backend/mod.rs", "rank": 2, "score": 156583.1454670506 }, { "content": "fn token_stream_with_error(mut tokens: TokenStream, error: syn::Error) -> TokenStream {\n\n tokens.extend(TokenStream::from(error.into_compile_error()));\n\n tokens\n\n}\n\n\n", "file_path": "grafana-plugin-sdk-macros/src/lib.rs", "rank": 3, "score": 154982.4391764912 }, { "content": "#[derive(Debug)]\n\nstruct Data {\n\n values: Vec<Arc<dyn Array>>,\n\n}\n\n\n\nimpl TryFrom<(&'_ Schema, RawData<'_>)> for Data {\n\n type Error = serde_json::Error;\n\n\n\n /// Create `Data` from the schema and data objects found in the JSON representation.\n\n ///\n\n /// This handles deserializing each of the arrays in `values` with the correct datatypes,\n\n /// replacing any 'entities' (`NaN`, `Inf`, `-Inf`) in those arrays, and converting any\n\n /// which require it (e.g. timestamps).\n\n fn try_from((schema, data): (&'_ Schema, RawData<'_>)) -> Result<Self, Self::Error> {\n\n // Handle entity replacement, return values.\n\n let fields = schema.fields.iter();\n\n let values = data.values.into_iter();\n\n let entities = data\n\n .entities\n\n .unwrap_or_else(|| vec![None; fields.len()])\n\n .into_iter();\n", "file_path": "src/data/frame/de.rs", "rank": 4, "score": 150719.60814677368 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut config = prost_build::Config::new();\n\n config.bytes(&[\n\n \".pluginv2.CallResourceRequest\",\n\n \".pluginv2.CallResourceResponse\",\n\n \".pluginv2.RunStreamRequest\",\n\n \".pluginv2.SubscribeStreamRequest\",\n\n ]);\n\n Ok(tonic_build::configure().compile_with_config(\n\n config,\n\n &[\"./vendor/proto/backend.proto\"],\n\n &[\"./vendor/proto\"],\n\n )?)\n\n}\n", "file_path": "build.rs", "rank": 7, "score": 139518.81164385538 }, { "content": "struct SerializableFrameDataValues<'a, 'b> {\n\n fields: &'a [Field],\n\n entities: &'b [RefCell<Option<Entities>>],\n\n}\n\n\n\nimpl<'a, 'b> Serialize for SerializableFrameDataValues<'a, 'b> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut seq = serializer.serialize_seq(Some(self.fields.len()))?;\n\n for (arr, e) in self.fields.iter().zip(self.entities.iter()) {\n\n seq.serialize_element(&SerializableArray(&*arr.values, e))?;\n\n }\n\n seq.end()\n\n }\n\n}\n\n\n", "file_path": "src/data/frame/ser.rs", "rank": 8, "score": 138005.55512096934 }, { "content": "fn parse_services(list: &AttributeArgs) -> Result<Vec<String>, syn::Error> {\n\n list.iter()\n\n .map(|item| match item {\n\n syn::NestedMeta::Meta(syn::Meta::Path(path)) => {\n\n let svc = path\n\n .get_ident()\n\n .ok_or_else(|| syn::Error::new_spanned(&list, \"Must have specified ident\"))?\n\n .to_string()\n\n .to_lowercase();\n\n if ![\"data\", \"diagnostics\", \"resource\", \"stream\"].contains(&svc.as_str()) {\n\n let msg = format!(\n\n \"invalid service {}; must be one of `data`, `diagnostics`, `resource`, `stream`\",\n\n svc,\n\n );\n\n return Err(syn::Error::new_spanned(path, msg))\n\n }\n\n Ok(svc)\n\n },\n\n other => {\n\n let msg = \"invalid service specification: must contain one or more of `data`, `diagnostics`, `resource`, `stream`\";\n\n Err(syn::Error::new_spanned(other, msg))\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "grafana-plugin-sdk-macros/src/lib.rs", "rank": 9, "score": 133160.58973879775 }, { "content": "struct SerializableSpan<'a, 'b, Span, N>(\n\n &'b tracing_subscriber::registry::SpanRef<'a, Span>,\n\n std::marker::PhantomData<N>,\n\n)\n\nwhere\n\n Span: for<'lookup> tracing_subscriber::registry::LookupSpan<'lookup>,\n\n N: for<'writer> FormatFields<'writer> + 'static;\n\n\n\nimpl<'a, 'b, Span, N> serde::ser::Serialize for SerializableSpan<'a, 'b, Span, N>\n\nwhere\n\n Span: for<'lookup> tracing_subscriber::registry::LookupSpan<'lookup>,\n\n N: for<'writer> FormatFields<'writer> + 'static,\n\n{\n\n fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>\n\n where\n\n Ser: serde::ser::Serializer,\n\n {\n\n let mut serializer = serializer.serialize_map(None)?;\n\n\n\n let ext = self.0.extensions();\n", "file_path": "src/backend/tracing_fmt.rs", "rank": 10, "score": 128342.47218197273 }, { "content": "# use std::sync::Arc;\n\n#\n\n# use grafana_plugin_sdk::backend;\n\n#\n\n# #[derive(Clone)]\n\n# struct Plugin;\n\n#\n\n# #[backend::async_trait]\n\n# impl backend::ResourceService for Plugin {\n\n# type Error = Arc<dyn std::error::Error + Send + Sync>;\n\n# type InitialResponse = Vec<u8>;\n\n# type Stream = backend::BoxResourceStream<Self::Error>;\n\n# async fn call_resource(&self, r: backend::CallResourceRequest) -> (Result<Self::InitialResponse, Self::Error>, Self::Stream) {\n\n# todo!()\n\n# }\n\n# }\n\n#\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let fut = async {\n\n let listener = ::grafana_plugin_sdk::backend::initialize().await?;\n\n let service = Plugin;\n\n ::grafana_plugin_sdk::backend::Plugin::new()\n\n .resource_service(service.clone())\n\n .init_subscriber(true)\n\n .shutdown_handler(\"127.0.0.1:10001\".parse().expect(\"could not parse shutdown handler as SocketAddr\"))\n\n .start(listener)\n\n .await?;\n\n Ok::<_, Box<dyn std::error::Error>>(())\n\n };\n\n # if false {\n\n tokio::runtime::Builder::new_multi_thread()\n\n .thread_name(\"grafana-plugin-worker-thread\")\n\n .enable_all()\n\n .build()\n\n .expect(\"create tokio runtime\")\n\n .block_on(fut)?;\n\n # }\n\n Ok(())\n\n}\n\n```\n\n*/\n", "file_path": "grafana-plugin-sdk-macros/src/lib.rs", "rank": 11, "score": 126103.87357970884 }, { "content": "/// Trait describing how an error should be converted into a `http::Response<Bytes>`.\n\npub trait ErrIntoHttpResponse: std::error::Error + Sized {\n\n /// Convert this error into a HTTP response.\n\n ///\n\n /// The default implementation returns a response with status code 500 (Internal Server Error)\n\n /// and the `Display` implementation of `Self` inside the `\"error\"` field of a JSON object\n\n /// in the body.\n\n ///\n\n /// Implementors may wish to override this if they wish to provide an alternative status code\n\n /// depending on, for example, the type of error returned from a resource call.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// use bytes::Bytes;\n\n /// use grafana_plugin_sdk::backend;\n\n /// use thiserror::Error;\n\n ///\n\n /// #[derive(Debug, Error)]\n\n /// enum ResourceError {\n\n /// #[error(\"HTTP error: {0}\")]\n", "file_path": "src/backend/resource.rs", "rank": 12, "score": 119567.16326987864 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct RawData<'a> {\n\n #[serde(borrow, default)]\n\n values: Vec<&'a serde_json::value::RawValue>,\n\n #[serde(default)]\n\n entities: Option<Vec<Option<Entities>>>,\n\n}\n\n\n", "file_path": "src/data/frame/de.rs", "rank": 13, "score": 118314.02978287739 }, { "content": "type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// The scope of a channel.\n\n///\n\n/// This determines the purpose of a channel in Grafana Live.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum Scope {\n\n /// Built-in real-time features of Grafana core.\n\n Grafana,\n\n /// Passes control to a plugin.\n\n Plugin,\n\n /// Passes control to a datasource plugin.\n\n Datasource,\n\n /// A managed data frame stream.\n\n Stream,\n\n}\n\n\n\nimpl FromStr for Scope {\n\n type Err = Error;\n\n fn from_str(s: &str) -> Result<Self> {\n", "file_path": "src/live/channel.rs", "rank": 14, "score": 116756.18345987718 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Schema {\n\n name: String,\n\n ref_id: String,\n\n #[serde(default)]\n\n fields: Vec<Field>,\n\n #[serde(default)]\n\n meta: Option<Metadata>,\n\n}\n\n\n", "file_path": "src/data/frame/de.rs", "rank": 15, "score": 116519.68812627898 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Field {\n\n #[serde(default)]\n\n name: String,\n\n #[serde(default)]\n\n labels: BTreeMap<String, String>,\n\n #[serde(default)]\n\n config: Option<FieldConfig>,\n\n #[serde(rename = \"type\")]\n\n _type: SimpleType,\n\n type_info: TypeInfo,\n\n}\n\n\n", "file_path": "src/data/frame/de.rs", "rank": 16, "score": 116519.68812627898 }, { "content": "#[doc(hidden)]\n\npub fn async_main<R>(fut: impl std::future::Future<Output = R> + Send) -> R {\n\n tokio::runtime::Builder::new_multi_thread()\n\n .thread_name(\"grafana-plugin-worker-thread\")\n\n .enable_all()\n\n .build()\n\n .expect(\"create tokio runtime\")\n\n .block_on(fut)\n\n}\n", "file_path": "src/lib.rs", "rank": 17, "score": 115979.1027690189 }, { "content": "struct TimestampProcessor;\n\nimpl ElementProcessor<i64> for TimestampProcessor {\n\n fn process_element(el: i64) -> i64 {\n\n el * 1_000_000\n\n }\n\n}\n\n\n\n/// Helper struct used to deserialize a sequence into an Arrow `Array`.\n", "file_path": "src/data/frame/de.rs", "rank": 18, "score": 112696.78076358153 }, { "content": "fn primitive_array_iter<T>(array: &dyn Array) -> impl Iterator<Item = Option<&T>>\n\nwhere\n\n T: NativeType + Clone,\n\n{\n\n array\n\n .as_any()\n\n .downcast_ref::<PrimitiveArray<T>>()\n\n .unwrap()\n\n .iter()\n\n}\n\n\n", "file_path": "src/data/frame/ser.rs", "rank": 19, "score": 104075.95143732782 }, { "content": "/// Parse a JSON array containing elements of `U` into a mutable Arrow array `T`.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an error if the string is invalid JSON, if the elements of\n\n/// the array are not of type `U`, or if the Arrow buffer could not be\n\n/// created.\n\nfn parse_array<'de, T, U, V>(s: &'de str) -> Result<T, serde_json::Error>\n\nwhere\n\n T: Default + MutableArray + TryPush<Option<U>> + WithCapacity,\n\n U: Deserialize<'de>,\n\n V: ElementProcessor<U>,\n\n{\n\n Ok(from_str::<DeArray<T, U, V>>(s)?.array)\n\n}\n\n\n", "file_path": "src/data/frame/de.rs", "rank": 20, "score": 99063.69305892668 }, { "content": "fn parse_string(val: syn::Lit, span: Span, field: &str) -> Result<String, syn::Error> {\n\n match val {\n\n syn::Lit::Str(s) => Ok(s.value()),\n\n syn::Lit::Verbatim(s) => Ok(s.to_string()),\n\n _ => Err(syn::Error::new(\n\n span,\n\n format!(\"Failed to parse value of `{}` as string.\", field),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "grafana-plugin-sdk-macros/src/lib.rs", "rank": 22, "score": 96195.17888301889 }, { "content": "#[derive(Debug)]\n\nstruct DeArray<T, U, V = ()> {\n\n array: T,\n\n u: PhantomData<U>,\n\n v: PhantomData<V>,\n\n}\n\n\n\n// Deserialization for mutable Arrow arrays.\n\n//\n\n// Constructs a mutable array from a sequence of valid values.\n\n// All of the `Mutable` variants of Arrow arrays implement `TryPush<Option<U>>`\n\n// for some relevant `U`, and here we just impose that the `U` is `Deserialize`\n\n// and gradually build up the array.\n\nimpl<'de, T, U, V> Deserialize<'de> for DeArray<T, U, V>\n\nwhere\n\n T: Default + TryPush<Option<U>> + WithCapacity,\n\n U: Deserialize<'de>,\n\n V: ElementProcessor<U>,\n\n{\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n", "file_path": "src/data/frame/de.rs", "rank": 23, "score": 94906.64090321153 }, { "content": "fn serialize_floats_and_collect_entities<S, T>(\n\n serializer: S,\n\n array: &dyn Array,\n\n entities_ref: &RefCell<Option<Entities>>,\n\n) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n T: NativeType + Float + Serialize,\n\n{\n\n let array = array.as_any().downcast_ref::<PrimitiveArray<T>>().unwrap();\n\n let mut seq = serializer.serialize_seq(Some(array.len()))?;\n\n let mut entities = Entities::default();\n\n for (i, el) in array.iter().enumerate() {\n\n seq.serialize_element(&el)?;\n\n match el {\n\n Some(x) if x.is_nan() => entities.nan.push(i),\n\n Some(x) if x.is_infinite() && x.is_sign_positive() => entities.inf.push(i),\n\n Some(x) if x.is_infinite() && x.is_sign_negative() => entities.neg_inf.push(i),\n\n _ => {}\n\n }\n", "file_path": "src/data/frame/ser.rs", "rank": 24, "score": 93844.34083205441 }, { "content": "/// Parse a JSON array containing elements of `U` into a mutable Arrow array `T`,\n\n/// then substitutes any sentinel `entities`.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an error if the string is invalid JSON, if the elements of\n\n/// the array are not of type `U`, or if the Arrow buffer could not be\n\n/// created.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if any of the indexes in `entities` are greater than the length\n\n/// of the parsed array.\n\nfn parse_array_with_entities<'de, T, U>(\n\n s: &'de str,\n\n entities: Option<Entities>,\n\n) -> Result<T, serde_json::Error>\n\nwhere\n\n T: Default + MutableArray + SetArray<Option<U>> + TryPush<Option<U>> + WithCapacity,\n\n U: Deserialize<'de> + Float,\n\n{\n\n let mut arr = from_str::<DeArray<T, U>>(s)?.array;\n\n if let Some(e) = entities {\n\n e.nan.iter().for_each(|idx| arr.set(*idx, Some(U::nan())));\n\n e.inf\n\n .iter()\n\n .for_each(|idx| arr.set(*idx, Some(U::infinity())));\n\n e.neg_inf\n\n .iter()\n\n .for_each(|idx| arr.set(*idx, Some(U::neg_infinity())));\n\n }\n\n Ok(arr)\n\n}\n\n\n", "file_path": "src/data/frame/de.rs", "rank": 26, "score": 91860.37133735274 }, { "content": "#[derive(Clone, Debug)]\n\nstruct MyPlugin;\n\n\n\n/// An error that may occur during a query.\n\n///\n\n/// This must store the `ref_id` of the query so that Grafana can line it up.\n", "file_path": "src/backend/mod.rs", "rank": 27, "score": 88770.61608026629 }, { "content": "/// Create a `tracing` [`Layer`][tracing_subscriber::Layer] configured to log events in a format understood by Grafana.\n\n///\n\n/// The returned layer should be installed into the tracing subscriber registry, with an optional env filter.\n\n///\n\n/// # Example\n\n///\n\n/// Installing the layer with the default `EnvFilter` (using the `RUST_LOG` environment variable):\n\n///\n\n/// ```rust\n\n/// use grafana_plugin_sdk::backend;\n\n/// use tracing_subscriber::{prelude::*, EnvFilter};\n\n///\n\n/// tracing_subscriber::registry()\n\n/// .with(backend::layer())\n\n/// .with(EnvFilter::from_default_env())\n\n/// .init();\n\n/// ```\n\npub fn layer<S: tracing::Subscriber + for<'a> LookupSpan<'a>>(\n\n) -> tracing_subscriber::fmt::Layer<S, JsonFields, tracing_fmt::HCLogJson, fn() -> io::Stderr> {\n\n tracing_subscriber::fmt::layer()\n\n .with_timer(UtcTime::new(HCLOG_TIME_FORMAT))\n\n .with_writer(io::stderr as fn() -> std::io::Stderr)\n\n .event_format(tracing_fmt::HCLogJson::default())\n\n .fmt_fields(JsonFields::new())\n\n}\n\n\n\n/// Errors returned by plugin backends.\n\n#[derive(Debug, Error)]\n\n#[non_exhaustive]\n\npub enum Error {\n\n /// An error occurred converting data from Grafana.\n\n #[error(\"error converting from Grafana: {0}\")]\n\n ConvertFrom(#[from] ConvertFromError),\n\n /// An error occurred converting data to Grafana.\n\n #[error(\"error converting to Grafana: {0}\")]\n\n ConvertTo(#[from] ConvertToError),\n\n /// An error occurred while starting the plugin.\n", "file_path": "src/backend/mod.rs", "rank": 28, "score": 87320.36045257938 }, { "content": "struct ShutdownHandler {\n\n address: SocketAddr,\n\n}\n\n\n\nimpl ShutdownHandler {\n\n #[must_use]\n\n fn new(address: SocketAddr) -> Self {\n\n Self { address }\n\n }\n\n\n\n fn spawn(self) -> impl std::future::Future<Output = ()> {\n\n tokio::spawn(async move {\n\n let listener = TcpListener::bind(&self.address).await.map_err(|e| {\n\n tracing::warn!(\"Error creating shutdown handler: {}\", e);\n\n e\n\n })?;\n\n tracing::debug!(address = %self.address, \"Shutdown handler started on {}\", &self.address);\n\n Ok::<_, std::io::Error>(listener.accept().await.map(|_| ()))\n\n })\n\n .map(|_| ())\n", "file_path": "src/backend/mod.rs", "rank": 29, "score": 85983.77254180558 }, { "content": "struct SerializableArray<'a>(&'a dyn Array, &'a RefCell<Option<Entities>>);\n\n\n\nimpl<'a> Serialize for SerializableArray<'a> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let array = self.0;\n\n let len = array.len();\n\n match array.data_type() {\n\n DataType::Null => {\n\n serializer.collect_seq(std::iter::repeat::<Option<()>>(None).take(len))\n\n }\n\n DataType::Boolean => serializer.collect_seq(\n\n array\n\n .as_any()\n\n .downcast_ref::<BooleanArray>()\n\n .unwrap()\n\n .iter(),\n\n ),\n", "file_path": "src/data/frame/ser.rs", "rank": 30, "score": 81264.37537409685 }, { "content": "/// A bridge between `fmt::Write` and `io::Write`.\n\n///\n\n/// This is needed because tracing-subscriber's FormatEvent expects a fmt::Write\n\n/// while serde_json's Serializer expects an io::Write.\n\nstruct WriteAdaptor<'a> {\n\n fmt_write: &'a mut dyn fmt::Write,\n\n}\n\n\n\nimpl<'a> WriteAdaptor<'a> {\n\n fn new(fmt_write: &'a mut dyn fmt::Write) -> Self {\n\n Self { fmt_write }\n\n }\n\n}\n\n\n\nimpl<'a> io::Write for WriteAdaptor<'a> {\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n let s =\n\n std::str::from_utf8(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;\n\n\n\n self.fmt_write\n\n .write_str(s)\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;\n\n\n\n Ok(s.as_bytes().len())\n", "file_path": "src/backend/tracing_fmt.rs", "rank": 31, "score": 80011.3883269194 }, { "content": "fn serialize_conf_float64<S: Serializer>(val: &Option<f64>, s: S) -> Result<S::Ok, S::Error> {\n\n if let Some(f) = val {\n\n if f.is_nan() || f.is_infinite() {\n\n s.serialize_none()\n\n } else {\n\n s.serialize_f64(*f)\n\n }\n\n } else {\n\n s.serialize_none()\n\n }\n\n}\n", "file_path": "src/data/mod.rs", "rank": 32, "score": 77787.36146628794 }, { "content": "fn main() {}\n\n\n", "file_path": "grafana-plugin-sdk-macros/tests/ui-pass/init_subscriber.rs", "rank": 33, "score": 76604.143374898 }, { "content": "fn main() {}\n\n\n\n\n", "file_path": "grafana-plugin-sdk-macros/tests/ui-fail/non_impl_service.rs", "rank": 34, "score": 74836.56791982913 }, { "content": "fn main() {}\n", "file_path": "grafana-plugin-sdk-macros/tests/ui-fail/invalid_init_subscriber.rs", "rank": 35, "score": 74803.57102842497 }, { "content": "#[proc_macro_attribute]\n\npub fn main(args: TokenStream, item: TokenStream) -> TokenStream {\n\n // If any of the steps for this macro fail, we still want to expand to an item that is as close\n\n // to the expected output as possible. This helps out IDEs such that completions and other\n\n // related features keep working.\n\n let input: syn::ItemFn = match syn::parse(item.clone()) {\n\n Ok(it) => it,\n\n Err(e) => return token_stream_with_error(item, e),\n\n };\n\n\n\n let config = if input.sig.ident != \"plugin\" {\n\n let msg = \"the plugin function must be named 'plugin'\";\n\n Err(syn::Error::new_spanned(&input.sig.ident, msg))\n\n } else if !input.sig.inputs.is_empty() {\n\n let msg = \"the plugin function cannot accept arguments\";\n\n Err(syn::Error::new_spanned(&input.sig.inputs, msg))\n\n } else {\n\n AttributeArgs::parse_terminated\n\n .parse(args)\n\n .and_then(|args| build_config(input.clone(), args))\n\n };\n\n\n\n match config {\n\n Ok(c) => parse_knobs(input, c),\n\n Err(e) => token_stream_with_error(parse_knobs(input, DEFAULT_ERROR_CONFIG), e),\n\n }\n\n}\n", "file_path": "grafana-plugin-sdk-macros/src/lib.rs", "rank": 36, "score": 70989.00578054553 }, { "content": "#[tonic::async_trait]\n\npub trait StreamService {\n\n /// Handle requests to begin a subscription to a plugin or datasource managed channel path.\n\n ///\n\n ///\n\n /// This function is called for _every_ subscriber to a stream. Implementations should\n\n /// check the subscribe permissions of the incoming request, and can choose to return some\n\n /// initial data to prepopulate the stream.\n\n ///\n\n /// `run_stream` will generally be called shortly after returning a response with\n\n /// [`SubscribeStreamStatus::Ok`]; this is responsible for streaming any data after\n\n /// the [`initial_data`][SubscribeStreamResponse::initial_data].\n\n async fn subscribe_stream(\n\n &self,\n\n request: SubscribeStreamRequest,\n\n ) -> Result<SubscribeStreamResponse, Self::Error>;\n\n\n\n /// The type of JSON values returned by this stream service.\n\n ///\n\n /// Each [`StreamPacket`] can return either a [`data::Frame`] or some arbitary JSON. This\n\n /// associated type allows the JSON value to be statically typed, if desired.\n", "file_path": "src/backend/stream.rs", "rank": 37, "score": 68485.59160944431 }, { "content": "fn build_config(input: syn::ItemFn, args: AttributeArgs) -> Result<FinalConfig, syn::Error> {\n\n if input.sig.asyncness.is_none() {\n\n let msg = \"the `async` keyword is missing from the function declaration\";\n\n return Err(syn::Error::new_spanned(input.sig.fn_token, msg));\n\n }\n\n let mut config = Configuration::new();\n\n for arg in &args {\n\n match arg {\n\n syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) => {\n\n let ident = namevalue\n\n .path\n\n .get_ident()\n\n .ok_or_else(|| {\n\n syn::Error::new_spanned(&namevalue, \"Must have specified ident\")\n\n })?\n\n .to_string()\n\n .to_lowercase();\n\n match ident.as_str() {\n\n \"init_subscriber\" => config.set_init_subscriber(\n\n namevalue.lit.clone(),\n", "file_path": "grafana-plugin-sdk-macros/src/lib.rs", "rank": 38, "score": 68269.2293333682 }, { "content": "#[cfg_attr(docsrs, doc(notable_trait))]\n\npub trait IntoFrame {\n\n /// Create a [`Frame`] with the given name from `self`.\n\n fn into_frame(self, name: impl Into<String>) -> Frame;\n\n}\n\n\n\nimpl<T> IntoFrame for T\n\nwhere\n\n T: IntoIterator<Item = Field>,\n\n{\n\n fn into_frame(self, name: impl Into<String>) -> Frame {\n\n Frame {\n\n name: name.into(),\n\n fields: self.into_iter().collect(),\n\n meta: None,\n\n ref_id: None,\n\n }\n\n }\n\n}\n\n\n\n/// Convenience trait for creating a [`Frame`] from an iterator of [`Field`]s.\n\n///\n\n/// This is the inverse of [`IntoFrame`] and is defined for all implementors of that trait.\n", "file_path": "src/data/frame/mod.rs", "rank": 39, "score": 67188.6428278448 }, { "content": "///\n\n/// type Error = StreamError;\n\n/// type Stream = backend::BoxRunStream<Self::Error>;\n\n///\n\n/// /// Begin streaming data for a request.\n\n/// ///\n\n/// /// This example just creates an in-memory `Frame` in each loop iteration,\n\n/// /// sends an updated version of the frame once per second, and updates a loop variable\n\n/// /// so that each frame is different.\n\n/// async fn run_stream(&self, _request: backend::RunStreamRequest) -> Result<Self::Stream, Self::Error> {\n\n/// info!(\"Running stream\");\n\n/// let mut x = 0u32;\n\n/// let n = 3;\n\n/// let mut frame = data::Frame::new(\"foo\").with_field((x..x + n).into_field(\"x\"));\n\n/// Ok(Box::pin(\n\n/// async_stream::try_stream! {\n\n/// loop {\n\n/// frame.fields_mut()[0].set_values(x..x + n);\n\n/// let packet = backend::StreamPacket::from_frame(frame.check()?)?;\n\n/// debug!(\"Yielding frame from {} to {}\", x, x + n);\n", "file_path": "src/backend/stream.rs", "rank": 40, "score": 67142.36684742499 }, { "content": "\n\n/// A packet of data to be streamed back to the subscribed client.\n\n///\n\n/// Such data can be:\n\n/// - a [`Frame`][data::Frame], which will be serialized to JSON before being sent back to the client\n\n/// - arbitrary JSON\n\n/// - arbitrary bytes.\n\n///\n\n/// The `J` type parameter on this enum is only relevant when JSON data\n\n/// is being streamed back,\n\n#[derive(Debug)]\n\npub struct StreamPacket<J = ()> {\n\n data: Vec<u8>,\n\n _p: std::marker::PhantomData<J>,\n\n}\n\n\n\nimpl<J> StreamPacket<J> {\n\n /// Create a `StreamPacket` representing a `Frame`.\n\n pub fn from_frame(frame: data::CheckedFrame<'_>) -> Result<Self, ConvertToError> {\n\n Ok(Self {\n", "file_path": "src/backend/stream.rs", "rank": 41, "score": 67135.52448911524 }, { "content": " /// If the request originates from a datasource instance, this will\n\n /// include details about the datasource instance in the\n\n /// `data_source_instance_settings` field.\n\n pub plugin_context: PluginContext,\n\n\n\n /// The subscription channel path that the request wishes to subscribe to.\n\n pub path: Path,\n\n\n\n /// Optional raw data.\n\n ///\n\n /// This may be used as an extra payload supplied upon subscription;\n\n /// for example, this may contain a JSON query object. This will be\n\n /// empty if not supplied in the query.\n\n pub data: Bytes,\n\n}\n\n\n\nimpl TryFrom<pluginv2::SubscribeStreamRequest> for SubscribeStreamRequest {\n\n type Error = ConvertFromError;\n\n fn try_from(other: pluginv2::SubscribeStreamRequest) -> Result<Self, Self::Error> {\n\n Ok(Self {\n", "file_path": "src/backend/stream.rs", "rank": 42, "score": 67135.07811644916 }, { "content": " };\n\n response.set_status(other.status.into());\n\n Ok(response)\n\n }\n\n}\n\n\n\n/// Trait for plugins that wish to provide uni- or bi-directional streaming.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use std::{sync::Arc, time::Duration};\n\n///\n\n/// use grafana_plugin_sdk::{backend, data, prelude::*};\n\n/// use thiserror::Error;\n\n/// use tokio::sync::RwLock;\n\n/// use tokio_stream::StreamExt;\n\n/// use tracing::{debug, info};\n\n///\n\n/// struct MyPlugin;\n", "file_path": "src/backend/stream.rs", "rank": 43, "score": 67134.81951865499 }, { "content": "//! SDK types and traits relevant to plugins that stream data.\n\nuse std::pin::Pin;\n\n\n\nuse futures_util::{Stream, StreamExt, TryStreamExt};\n\nuse prost::bytes::Bytes;\n\nuse serde::Serialize;\n\n\n\nuse crate::{\n\n backend::{ConvertFromError, ConvertToError, PluginContext},\n\n data,\n\n live::Path,\n\n pluginv2,\n\n};\n\n\n\n/// A request to subscribe to a stream.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub struct SubscribeStreamRequest {\n\n /// Details of the plugin instance from which the request originated.\n\n ///\n", "file_path": "src/backend/stream.rs", "rank": 44, "score": 67134.3909960657 }, { "content": " pub fn from_frame(\n\n frame: data::CheckedFrame<'_>,\n\n include: data::FrameInclude,\n\n ) -> Result<Self, ConvertToError> {\n\n Ok(Self {\n\n data: frame\n\n .to_json(include)\n\n .map_err(|source| ConvertToError::InvalidFrame { source })?,\n\n })\n\n }\n\n\n\n /// Create some initial data representing some JSON.\n\n pub fn from_json(json: &serde_json::Value) -> Result<Self, ConvertToError> {\n\n Ok(Self {\n\n data: serde_json::to_vec(json).map_err(|err| ConvertToError::InvalidJson { err })?,\n\n })\n\n }\n\n}\n\n\n\n/// The response to a stream subscription request.\n", "file_path": "src/backend/stream.rs", "rank": 45, "score": 67134.38480306676 }, { "content": " ///\n\n /// This may be used as an extra payload supplied upon subscription;\n\n /// for example, this may contain a JSON query object. This will be\n\n /// empty if not supplied in the query.\n\n pub data: Bytes,\n\n}\n\n\n\nimpl TryFrom<pluginv2::RunStreamRequest> for RunStreamRequest {\n\n type Error = ConvertFromError;\n\n fn try_from(other: pluginv2::RunStreamRequest) -> Result<Self, Self::Error> {\n\n Ok(Self {\n\n plugin_context: other\n\n .plugin_context\n\n .ok_or(ConvertFromError::MissingPluginContext)\n\n .and_then(TryInto::try_into)?,\n\n path: Path::new(other.path)?,\n\n data: other.data,\n\n })\n\n }\n\n}\n", "file_path": "src/backend/stream.rs", "rank": 46, "score": 67133.77267792325 }, { "content": "/// ///\n\n/// /// If we're not returning JSON we can just use `()`.\n\n/// type JsonValue = ();\n\n///\n\n/// /// Handle a request to subscribe to a stream.\n\n/// ///\n\n/// /// Here we just check that the path matches some fixed value\n\n/// /// and return `NotFound` if not.\n\n/// async fn subscribe_stream(\n\n/// &self,\n\n/// request: backend::SubscribeStreamRequest,\n\n/// ) -> Result<backend::SubscribeStreamResponse, Self::Error> {\n\n/// let response = if request.path.as_str() == \"stream\" {\n\n/// backend::SubscribeStreamResponse::ok(None)\n\n/// } else {\n\n/// backend::SubscribeStreamResponse::not_found()\n\n/// };\n\n/// info!(path = %request.path, \"Subscribing to stream\");\n\n/// Ok(response)\n\n/// }\n", "file_path": "src/backend/stream.rs", "rank": 47, "score": 67133.27935535811 }, { "content": " data: frame\n\n .to_json(data::FrameInclude::All)\n\n .map_err(|source| ConvertToError::InvalidFrame { source })?,\n\n _p: std::marker::PhantomData,\n\n })\n\n }\n\n\n\n /// Create a `StreamPacket` representing some JSON.\n\n pub fn from_json(json: &J) -> Result<Self, ConvertToError>\n\n where\n\n J: Serialize,\n\n {\n\n Ok(Self {\n\n data: serde_json::to_vec(json).map_err(|err| ConvertToError::InvalidJson { err })?,\n\n _p: std::marker::PhantomData,\n\n })\n\n }\n\n\n\n /// Create a `StreamPacket` from arbitrary bytes.\n\n pub fn from_bytes(data: Vec<u8>) -> Self {\n", "file_path": "src/backend/stream.rs", "rank": 48, "score": 67133.23370633654 }, { "content": "///\n\n/// #[derive(Debug, Error)]\n\n/// #[error(\"Error streaming data\")]\n\n/// struct StreamError;\n\n///\n\n/// impl From<data::Error> for StreamError {\n\n/// fn from(_other: data::Error) -> StreamError {\n\n/// StreamError\n\n/// }\n\n/// }\n\n///\n\n/// impl From<backend::ConvertToError> for StreamError {\n\n/// fn from(_other: backend::ConvertToError) -> StreamError {\n\n/// StreamError\n\n/// }\n\n/// }\n\n///\n\n/// #[backend::async_trait]\n\n/// impl backend::StreamService for MyPlugin {\n\n/// /// The type of JSON value we might return in our `initial_data`.\n", "file_path": "src/backend/stream.rs", "rank": 49, "score": 67132.05740716233 }, { "content": "}\n\n\n\nimpl From<SubscribeStreamStatus> for pluginv2::subscribe_stream_response::Status {\n\n fn from(other: SubscribeStreamStatus) -> Self {\n\n match other {\n\n SubscribeStreamStatus::Ok => Self::Ok,\n\n SubscribeStreamStatus::NotFound => Self::NotFound,\n\n SubscribeStreamStatus::PermissionDenied => Self::PermissionDenied,\n\n }\n\n }\n\n}\n\n\n\n/// Data returned from an initial request to subscribe to a stream.\n\n#[derive(Debug)]\n\npub struct InitialData {\n\n data: Vec<u8>,\n\n}\n\n\n\nimpl InitialData {\n\n /// Create some initial data representing a `Frame`.\n", "file_path": "src/backend/stream.rs", "rank": 50, "score": 67131.73348141623 }, { "content": " /// include details about the datasource instance in the\n\n /// `data_source_instance_settings` field.\n\n pub plugin_context: PluginContext,\n\n /// The subscription path; see module level comments for details.\n\n pub path: Path,\n\n /// Data to be published to the stream.\n\n pub data: serde_json::Value,\n\n}\n\n\n\nimpl TryFrom<pluginv2::PublishStreamRequest> for PublishStreamRequest {\n\n type Error = ConvertFromError;\n\n fn try_from(other: pluginv2::PublishStreamRequest) -> Result<Self, Self::Error> {\n\n Ok(Self {\n\n plugin_context: other\n\n .plugin_context\n\n .ok_or(ConvertFromError::MissingPluginContext)\n\n .and_then(TryInto::try_into)?,\n\n path: Path::new(other.path)?,\n\n data: super::read_json(&other.data)?,\n\n })\n", "file_path": "src/backend/stream.rs", "rank": 51, "score": 67129.28835518716 }, { "content": " T: Send + Sync + StreamService + 'static,\n\n{\n\n #[tracing::instrument(skip(self), level = \"debug\")]\n\n async fn subscribe_stream(\n\n &self,\n\n request: tonic::Request<pluginv2::SubscribeStreamRequest>,\n\n ) -> Result<tonic::Response<pluginv2::SubscribeStreamResponse>, tonic::Status> {\n\n let request = request\n\n .into_inner()\n\n .try_into()\n\n .map_err(ConvertFromError::into_tonic_status)?;\n\n let response = StreamService::subscribe_stream(self, request)\n\n .await\n\n .map_err(|e| tonic::Status::internal(e.to_string()))?\n\n .into();\n\n Ok(tonic::Response::new(response))\n\n }\n\n\n\n type RunStreamStream = Pin<\n\n Box<dyn futures_core::Stream<Item = Result<pluginv2::StreamPacket, tonic::Status>> + Send>,\n", "file_path": "src/backend/stream.rs", "rank": 52, "score": 67128.90844265553 }, { "content": "/// yield packet;\n\n/// x += n;\n\n/// }\n\n/// }\n\n/// .throttle(Duration::from_secs(1)),\n\n/// ))\n\n/// }\n\n///\n\n/// /// Handle a request to publish data to a stream.\n\n/// ///\n\n/// /// Currently unimplemented in this example, but the functionality _should_ work.\n\n/// async fn publish_stream(\n\n/// &self,\n\n/// _request: backend::PublishStreamRequest,\n\n/// ) -> Result<backend::PublishStreamResponse, Self::Error> {\n\n/// info!(\"Publishing to stream\");\n\n/// todo!()\n\n/// }\n\n/// }\n\n/// ```\n\n#[tonic::async_trait]\n", "file_path": "src/backend/stream.rs", "rank": 53, "score": 67128.37624818954 }, { "content": " initial_data: None,\n\n }\n\n }\n\n\n\n /// Create a `SubscribeStreamResponse` with status [`SubscribeStreamStatus::PermissionDenied`].\n\n ///\n\n /// This should be returned when the caller is not permitted to access the requested path.\n\n pub fn permission_denied() -> Self {\n\n Self {\n\n status: SubscribeStreamStatus::PermissionDenied,\n\n initial_data: None,\n\n }\n\n }\n\n}\n\n\n\nimpl From<SubscribeStreamResponse> for pluginv2::SubscribeStreamResponse {\n\n fn from(other: SubscribeStreamResponse) -> Self {\n\n let mut response = pluginv2::SubscribeStreamResponse {\n\n status: 0,\n\n data: other.initial_data.map(|x| x.data).unwrap_or_default(),\n", "file_path": "src/backend/stream.rs", "rank": 54, "score": 67127.71361686893 }, { "content": "///\n\n/// This includes a status and some optional initial data for the stream.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub struct SubscribeStreamResponse {\n\n /// The status of the response.\n\n pub status: SubscribeStreamStatus,\n\n /// Optional initial data to return to the client, used to pre-populate the stream.\n\n pub initial_data: Option<InitialData>,\n\n}\n\n\n\nimpl SubscribeStreamResponse {\n\n /// Create a new `SubscribeStreamResponse`.\n\n #[deprecated(\n\n since = \"1.3.0\",\n\n note = \"use ok/not_found/permission_denied constructors instead\"\n\n )]\n\n pub fn new(status: SubscribeStreamStatus, initial_data: Option<InitialData>) -> Self {\n\n Self {\n\n status,\n", "file_path": "src/backend/stream.rs", "rank": 55, "score": 67127.70958355858 }, { "content": " /// [`SubscribeStreamStatus::Ok`] for a specific [`Channel`][crate::live::Channel]).\n\n /// Grafana will then multiplex the returned stream to any future subscribers.\n\n ///\n\n /// When Grafana detects that there are no longer any subscribers to a channel, the stream\n\n /// will be terminated until the next active subscriber appears. Stream termination can\n\n /// may be slightly delayed, generally by a few seconds.\n\n async fn run_stream(&self, request: RunStreamRequest) -> Result<Self::Stream, Self::Error>;\n\n\n\n /// Handle requests to publish to a plugin or datasource managed channel path (currently unimplemented).\n\n ///\n\n /// Implementations should check the publish permissions of the incoming request.\n\n async fn publish_stream(\n\n &self,\n\n request: PublishStreamRequest,\n\n ) -> Result<PublishStreamResponse, Self::Error>;\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl<T> pluginv2::stream_server::Stream for T\n\nwhere\n", "file_path": "src/backend/stream.rs", "rank": 56, "score": 67127.31140152423 }, { "content": " plugin_context: other\n\n .plugin_context\n\n .ok_or(ConvertFromError::MissingPluginContext)\n\n .and_then(TryInto::try_into)?,\n\n path: Path::new(other.path)?,\n\n data: other.data,\n\n })\n\n }\n\n}\n\n\n\n/// The status of a subscribe stream response.\n\n#[derive(Clone, Copy, Debug)]\n\n#[non_exhaustive]\n\npub enum SubscribeStreamStatus {\n\n /// The request to subscribe was accepted.\n\n Ok,\n\n /// The requested path was not found.\n\n NotFound,\n\n /// The user did not have permission to subscribe to the requested stream.\n\n PermissionDenied,\n", "file_path": "src/backend/stream.rs", "rank": 57, "score": 67127.15651319084 }, { "content": " }\n\n }\n\n\n\n /// Create a `PublishStreamResponse` with status [`PublishStreamStatus::PermissionDenied`].\n\n ///\n\n /// This should be returned when the caller is not permitted to access the requested path.\n\n pub fn permission_denied(details: serde_json::Value) -> Self {\n\n Self {\n\n status: PublishStreamStatus::PermissionDenied,\n\n data: details,\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<PublishStreamResponse> for pluginv2::PublishStreamResponse {\n\n type Error = serde_json::Error;\n\n fn try_from(other: PublishStreamResponse) -> Result<Self, Self::Error> {\n\n let mut response = pluginv2::PublishStreamResponse {\n\n status: 0,\n\n data: serde_json::to_vec(&other.data)?,\n", "file_path": "src/backend/stream.rs", "rank": 58, "score": 67125.52633246413 }, { "content": " }\n\n\n\n #[tracing::instrument(skip(self), level = \"debug\")]\n\n async fn publish_stream(\n\n &self,\n\n request: tonic::Request<pluginv2::PublishStreamRequest>,\n\n ) -> Result<tonic::Response<pluginv2::PublishStreamResponse>, tonic::Status> {\n\n let request = request\n\n .into_inner()\n\n .try_into()\n\n .map_err(ConvertFromError::into_tonic_status)?;\n\n let response = StreamService::publish_stream(self, request)\n\n .await\n\n .map_err(|e| tonic::Status::internal(e.to_string()))?\n\n .try_into()\n\n .map_err(|e: serde_json::Error| tonic::Status::internal(e.to_string()))?;\n\n Ok(tonic::Response::new(response))\n\n }\n\n}\n", "file_path": "src/backend/stream.rs", "rank": 59, "score": 67125.49906609737 }, { "content": " >;\n\n\n\n #[tracing::instrument(skip(self), level = \"debug\")]\n\n async fn run_stream(\n\n &self,\n\n request: tonic::Request<pluginv2::RunStreamRequest>,\n\n ) -> Result<tonic::Response<Self::RunStreamStream>, tonic::Status> {\n\n let request = request\n\n .into_inner()\n\n .try_into()\n\n .map_err(ConvertFromError::into_tonic_status)?;\n\n let stream = StreamService::run_stream(self, request)\n\n .await\n\n .map_err(|e| tonic::Status::internal(e.to_string()))?\n\n .map_ok(|packet: StreamPacket<T::JsonValue>| packet.into_plugin_packet())\n\n .map(|res| match res {\n\n Ok(x) => Ok(x),\n\n Err(e) => Err(tonic::Status::internal(e.to_string())),\n\n });\n\n Ok(tonic::Response::new(Box::pin(stream)))\n", "file_path": "src/backend/stream.rs", "rank": 60, "score": 67125.28963932439 }, { "content": " initial_data,\n\n }\n\n }\n\n\n\n /// Create a `SubscribeStreamResponse` with status [`SubscribeStreamStatus::Ok`].\n\n ///\n\n /// This is the happy path to be used when a subscription request succeeded.\n\n pub fn ok(initial_data: Option<InitialData>) -> Self {\n\n Self {\n\n status: SubscribeStreamStatus::Ok,\n\n initial_data,\n\n }\n\n }\n\n\n\n /// Create a `SubscribeStreamResponse` with status [`SubscribeStreamStatus::NotFound`].\n\n ///\n\n /// This should be returned when the caller requested an unknown path.\n\n pub fn not_found() -> Self {\n\n Self {\n\n status: SubscribeStreamStatus::NotFound,\n", "file_path": "src/backend/stream.rs", "rank": 61, "score": 67123.8974076026 }, { "content": " Self {\n\n data,\n\n _p: std::marker::PhantomData,\n\n }\n\n }\n\n\n\n fn into_plugin_packet(self) -> pluginv2::StreamPacket {\n\n pluginv2::StreamPacket { data: self.data }\n\n }\n\n}\n\n\n\n/// Type alias for a pinned, boxed stream of stream packets with a custom error type.\n\npub type BoxRunStream<E, T = ()> = Pin<Box<dyn Stream<Item = Result<StreamPacket<T>, E>> + Send>>;\n\n\n\n/// A request to publish data to a stream.\n\n#[non_exhaustive]\n\npub struct PublishStreamRequest {\n\n /// Details of the plugin instance from which the request originated.\n\n ///\n\n /// If the request originates from a datasource instance, this will\n", "file_path": "src/backend/stream.rs", "rank": 62, "score": 67122.93266755174 }, { "content": " }\n\n }\n\n}\n\n\n\n/// The response to a stream publish request.\n\n#[non_exhaustive]\n\npub struct PublishStreamResponse {\n\n /// The status of the response.\n\n pub status: PublishStreamStatus,\n\n /// Data returned in response to publishing.\n\n pub data: serde_json::Value,\n\n}\n\n\n\nimpl PublishStreamResponse {\n\n /// Create a new `PublishStreamResponse`.\n\n #[deprecated(\n\n since = \"1.3.0\",\n\n note = \"use ok/not_found/permission_denied constructors instead\"\n\n )]\n\n pub fn new(status: PublishStreamStatus, data: serde_json::Value) -> Self {\n", "file_path": "src/backend/stream.rs", "rank": 63, "score": 67121.88940998375 }, { "content": " Self { status, data }\n\n }\n\n\n\n /// Create a `PublishStreamResponse` with status [`PublishStreamStatus::Ok`].\n\n ///\n\n /// This is the happy path to be used when a publish request succeeded.\n\n pub fn ok(data: serde_json::Value) -> Self {\n\n Self {\n\n status: PublishStreamStatus::Ok,\n\n data,\n\n }\n\n }\n\n\n\n /// Create a `PublishStreamResponse` with status [`PublishStreamStatus::NotFound`].\n\n ///\n\n /// This should be returned when the caller requested an unknown path.\n\n pub fn not_found(details: serde_json::Value) -> Self {\n\n Self {\n\n status: PublishStreamStatus::NotFound,\n\n data: details,\n", "file_path": "src/backend/stream.rs", "rank": 64, "score": 67118.98579420739 }, { "content": " ///\n\n /// If the implementation does not intend to return JSON variants, this\n\n /// can be set to `()`. If the structure of the returned JSON is not statically known, this\n\n /// should be set to [`serde_json::Value`].\n\n type JsonValue: Serialize;\n\n\n\n /// The type of error that can occur while fetching a stream packet.\n\n type Error: std::error::Error;\n\n\n\n /// The type of stream returned by `run_stream`.\n\n ///\n\n /// This will generally be impossible to name directly, so returning the\n\n /// [`BoxRunStream`] type alias will probably be more convenient.\n\n type Stream: futures_core::Stream<Item = Result<StreamPacket<Self::JsonValue>, Self::Error>>\n\n + Send;\n\n\n\n /// Begin sending stream packets to a client.\n\n ///\n\n /// This will only be called once per channel, shortly after the first successful subscription\n\n /// to that channel by the first client (after `subscribe_stream` returns a response with\n", "file_path": "src/backend/stream.rs", "rank": 65, "score": 67116.40179916333 }, { "content": " }\n\n}\n\n\n\n/// The status of a publish stream response.\n\n#[non_exhaustive]\n\npub enum PublishStreamStatus {\n\n /// The request to publish was accepted.\n\n Ok,\n\n /// The requested path was not found.\n\n NotFound,\n\n /// The user did not have permission to publish to the requested stream.\n\n PermissionDenied,\n\n}\n\n\n\nimpl From<PublishStreamStatus> for pluginv2::publish_stream_response::Status {\n\n fn from(other: PublishStreamStatus) -> Self {\n\n match other {\n\n PublishStreamStatus::Ok => Self::Ok,\n\n PublishStreamStatus::NotFound => Self::NotFound,\n\n PublishStreamStatus::PermissionDenied => Self::PermissionDenied,\n", "file_path": "src/backend/stream.rs", "rank": 66, "score": 67115.79890862766 }, { "content": " };\n\n response.set_status(other.status.into());\n\n response\n\n }\n\n}\n\n\n\n/// A request to 'run' a stream, i.e. begin streaming data.\n\n///\n\n/// This is made by Grafana _after_ a stream subscription request has been accepted,\n\n/// and will include the same `path` as the subscription request.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub struct RunStreamRequest {\n\n /// Metadata about the plugin from which the request originated.\n\n pub plugin_context: PluginContext,\n\n\n\n /// The subscription path; see module level comments for details.\n\n pub path: Path,\n\n\n\n /// Optional raw data.\n", "file_path": "src/backend/stream.rs", "rank": 67, "score": 67115.26279651935 }, { "content": " \"Data type mismatch in field {} (existing: {existing:?}, new: {new:?})\",\n\n field\n\n )]\n\n DataTypeMismatch {\n\n /// The existing datatype of the field.\n\n existing: DataType,\n\n /// The datatype of the new data.\n\n new: DataType,\n\n /// The name of the field.\n\n field: String,\n\n },\n\n\n\n /// Occurs when a frame had mismatched field lengths while checking.\n\n #[error(\n\n \"Frame field length mismatch: {}\",\n\n .lengths.iter().map(|x| format!(\"{} ({})\", x.0, x.1)).join(\", \")\n\n )]\n\n FieldLengthMismatch {\n\n /// The names and lengths of the fields in the `Frame`.\n\n lengths: Vec<(String, usize)>,\n\n },\n\n\n\n /// A field was created using an Arrow array with a datatype unsupported by Grafana.\n\n #[error(\"Unsupported Arrow data type: {0:?}\")]\n\n UnsupportedArrowDataType(DataType),\n\n}\n", "file_path": "src/data/error.rs", "rank": 68, "score": 67016.57199671636 }, { "content": "//! Error types returned by the SDK.\n\nuse arrow2::datatypes::DataType;\n\nuse itertools::Itertools;\n\nuse thiserror::Error;\n\n\n\nuse super::frame::to_arrow;\n\n\n\n/// Errors that can occur when interacting with the Grafana plugin SDK.\n\n#[derive(Debug, Error)]\n\n#[non_exhaustive]\n\npub enum Error {\n\n /// An error has occurred when serializing to Arrow IPC format.\n\n #[error(\"Arrow serialization error: {0}\")]\n\n ArrowSerialization(#[from] to_arrow::Error),\n\n\n\n /// There is a datatype mismatch in a field.\n\n ///\n\n /// This can happen when calling [`Field::set_values`][crate::data::Field::set_values] with an array whose datatype\n\n /// does not match the existing array.\n\n #[error(\n", "file_path": "src/data/error.rs", "rank": 69, "score": 67005.87814050629 }, { "content": "/// Indicates that a type can be converted to one that is [`FieldType`], and holds associated metadata.\n\n///\n\n/// For example, [`DateTime<T>`]s are valid `Field` values, but must first be converted\n\n/// to nanoseconds-since-the-epoch in i64 values; the original type and corresponding\n\n/// [`TypeInfoType`] is stored here.\n\n///\n\n/// This trait mainly exists to enable smoother APIs when creating [`Field`]s.\n\n///\n\n/// [`Field`]: crate::data::Field\n\npub trait IntoFieldType {\n\n /// The type to which `Self` will be converted when storing values in a `Field`.\n\n type ElementType;\n\n /// The corresponding [`TypeInfoType`] for this original data type.\n\n const TYPE_INFO_TYPE: TypeInfoType;\n\n /// Convert this type into an (optional) field type.\n\n fn into_field_type(self) -> Option<Self::ElementType>;\n\n}\n\n\n\n// Optional impl - a no-op.\n\nimpl<T> IntoFieldType for Option<T>\n\nwhere\n\n T: IntoFieldType,\n\n{\n\n type ElementType = T;\n\n const TYPE_INFO_TYPE: TypeInfoType = T::TYPE_INFO_TYPE;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n self\n\n }\n\n}\n", "file_path": "src/data/field_type.rs", "rank": 70, "score": 66330.7097357407 }, { "content": "/// Indicates that a type is can be stored in an Arrow array.\n\npub trait FieldType {\n\n /// The type of arrow array this field type is stored in.\n\n type Array;\n\n /// The logical arrow data type that an arrow array of this data should have.\n\n const ARROW_DATA_TYPE: DataType;\n\n\n\n /// Convert the logical type of `Self::Array`, if needed.\n\n ///\n\n /// The default implementation is a no-op, but some field types may need to\n\n /// implement this to ensure the underlying boxed Arrow array can be downcast correctly.\n\n fn convert_arrow_array(array: Self::Array, _data_type: DataType) -> Self::Array {\n\n array\n\n }\n\n}\n\n\n", "file_path": "src/data/field_type.rs", "rank": 71, "score": 66321.0654098946 }, { "content": "#[derive(Debug)]\n\nstruct HCLogSerdeMapVisitor<S: SerializeMap> {\n\n serializer: S,\n\n state: Result<(), S::Error>,\n\n}\n\n\n\nimpl<S> HCLogSerdeMapVisitor<S>\n\nwhere\n\n S: SerializeMap,\n\n{\n\n /// Create a new map visitor.\n\n pub fn new(serializer: S) -> Self {\n\n Self {\n\n serializer,\n\n state: Ok(()),\n\n }\n\n }\n\n\n\n /// Completes serializing the visited object, returning ownership of the underlying serializer\n\n /// if all fields were serialized correctly, or `Err(S::Error)` if a field could not be\n\n /// serialized.\n", "file_path": "src/backend/tracing_fmt.rs", "rank": 72, "score": 65678.67600728074 }, { "content": "#[tonic::async_trait]\n\npub trait DataService {\n\n /// The error type that can be returned by individual queries.\n\n ///\n\n /// This must implement [`DataQueryError`], which allows the SDK to\n\n /// align queries up with any failed requests.\n\n type QueryError: DataQueryError;\n\n\n\n /// The type of stream returned by the `query_data` method.\n\n ///\n\n /// This will generally be impossible to name directly, so returning the\n\n /// [`BoxDataResponseStream`] type alias will probably be more convenient.\n\n type Stream: Stream<Item = Result<DataResponse, Self::QueryError>> + Send;\n\n\n\n /// Query data for an input request.\n\n ///\n\n /// The request will contain zero or more queries, as well as information about the\n\n /// origin of the queries (such as the datasource instance) in the `plugin_context` field.\n\n async fn query_data(&self, request: QueryDataRequest) -> Self::Stream;\n\n}\n\n\n", "file_path": "src/backend/data.rs", "rank": 73, "score": 65611.0195394446 }, { "content": "\n\n/// The results from a [`DataQuery`].\n\n#[derive(Debug)]\n\npub struct DataResponse {\n\n /// The unique identifier of the query, set by the frontend call.\n\n ///\n\n /// This is used to align queries in the request to data in the response,\n\n /// and can be obtained from the [`DataQuery`].\n\n ref_id: String,\n\n\n\n /// The data returned from the query.\n\n frames: Result<Vec<Vec<u8>>, data::Error>,\n\n}\n\n\n\nimpl DataResponse {\n\n /// Create a new [`DataResponse`] with the given `ref_id` and `frames`.\n\n #[must_use]\n\n pub fn new(ref_id: String, frames: Vec<data::CheckedFrame<'_>>) -> Self {\n\n Self {\n\n ref_id: ref_id.clone(),\n\n frames: to_arrow(frames, &Some(ref_id)),\n\n }\n\n }\n\n}\n\n\n\n/// Error supertrait used in [`DataService::query_data`].\n", "file_path": "src/backend/data.rs", "rank": 74, "score": 64656.03839600456 }, { "content": "/// .queries\n\n/// .into_iter()\n\n/// .map(|x| async {\n\n/// // Here we create a single response Frame for each query.\n\n/// // Frames can be created from iterators of fields using [`IntoFrame`].\n\n/// Ok(backend::DataResponse::new(\n\n/// x.ref_id.clone(),\n\n/// // Return zero or more frames.\n\n/// // A real implementation would fetch this data from a database\n\n/// // or something.\n\n/// vec![[\n\n/// [1_u32, 2, 3].into_field(\"x\"),\n\n/// [\"a\", \"b\", \"c\"].into_field(\"y\"),\n\n/// ]\n\n/// .into_frame(\"foo\")\n\n/// .check()\n\n/// .map_err(|source| QueryError {\n\n/// ref_id: x.ref_id,\n\n/// source,\n\n/// })?],\n\n/// ))\n\n/// })\n\n/// .collect::<FuturesOrdered<_>>(),\n\n/// )\n\n/// }\n\n/// }\n\n/// ```\n", "file_path": "src/backend/data.rs", "rank": 75, "score": 64653.44926384675 }, { "content": "/// /// The type of error that could be returned by an individual query.\n\n/// type QueryError = QueryError;\n\n///\n\n/// /// The type of iterator we're returning.\n\n/// ///\n\n/// /// In general the concrete type will be impossible to name in advance,\n\n/// /// so the `backend::BoxDataResponseStream` type alias will be useful.\n\n/// type Stream = backend::BoxDataResponseStream<Self::QueryError>;\n\n///\n\n/// /// Respond to a request for data from Grafana.\n\n/// ///\n\n/// /// This request will contain zero or more queries, as well as information\n\n/// /// about the datasource instance on behalf of which this request is made,\n\n/// /// such as address, credentials, etc.\n\n/// ///\n\n/// /// Our plugin must respond to each query and return an iterator of `DataResponse`s,\n\n/// /// which themselves can contain zero or more `Frame`s.\n\n/// async fn query_data(&self, request: backend::QueryDataRequest) -> Self::Stream {\n\n/// Box::pin(\n\n/// request\n", "file_path": "src/backend/data.rs", "rank": 76, "score": 64649.23721774324 }, { "content": " ///\n\n /// If the request originates from a datasource instance, this will\n\n /// include details about the datasource instance in the\n\n /// `data_source_instance_settings` field.\n\n pub plugin_context: backend::PluginContext,\n\n /// Headers included along with the request by Grafana.\n\n pub headers: HashMap<String, String>,\n\n /// The queries requested by a user or alert.\n\n ///\n\n /// Each [`DataQuery`] contains a unique `ref_id` field which identifies\n\n /// the query to the frontend; this should be included in the corresponding\n\n /// `DataResponse` for each query.\n\n pub queries: Vec<DataQuery>,\n\n}\n\n\n\nimpl TryFrom<pluginv2::QueryDataRequest> for QueryDataRequest {\n\n type Error = ConvertFromError;\n\n fn try_from(other: pluginv2::QueryDataRequest) -> Result<Self, Self::Error> {\n\n Ok(Self {\n\n plugin_context: other\n", "file_path": "src/backend/data.rs", "rank": 77, "score": 64649.12373059916 }, { "content": "/// Type alias for a boxed iterator of query responses, useful for returning from [`DataService::query_data`].\n\npub type BoxDataResponseStream<E> =\n\n Pin<Box<dyn Stream<Item = Result<backend::DataResponse, E>> + Send>>;\n\n\n\n/// Serialize a slice of frames to Arrow IPC format.\n\n///\n\n/// If `ref_id` is provided, it is passed down to the various conversion\n\n/// function and takes precedence over any `ref_id`s set on the individual frames.\n\npub(crate) fn to_arrow<'a>(\n\n frames: impl IntoIterator<Item = data::CheckedFrame<'a>>,\n\n ref_id: &Option<String>,\n\n) -> Result<Vec<Vec<u8>>, data::Error> {\n\n frames\n\n .into_iter()\n\n .map(|frame| Ok(frame.to_arrow(ref_id.clone())?))\n\n .collect()\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl<T> pluginv2::data_server::Data for T\n", "file_path": "src/backend/data.rs", "rank": 78, "score": 64648.87435625484 }, { "content": "//! SDK types and traits relevant to plugins that query data.\n\nuse std::{collections::HashMap, pin::Pin, time::Duration};\n\n\n\nuse futures_core::Stream;\n\nuse futures_util::StreamExt;\n\nuse serde_json::Value;\n\n\n\nuse crate::{\n\n backend::{self, ConvertFromError, TimeRange},\n\n data, pluginv2,\n\n};\n\n\n\n/// A request for data made by Grafana.\n\n///\n\n/// Details of the request source can be found in `plugin_context`,\n\n/// while the actual plugins themselves are in `queries`.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub struct QueryDataRequest {\n\n /// Details of the plugin instance from which the request originated.\n", "file_path": "src/backend/data.rs", "rank": 79, "score": 64648.573749532006 }, { "content": "///\n\n/// /// An error that may occur during a query.\n\n/// ///\n\n/// /// This must store the `ref_id` of the query so that Grafana can line it up.\n\n/// #[derive(Debug, Error)]\n\n/// #[error(\"Error querying backend for query {ref_id}: {source}\")]\n\n/// struct QueryError {\n\n/// source: data::Error,\n\n/// ref_id: String,\n\n/// }\n\n///\n\n/// impl backend::DataQueryError for QueryError {\n\n/// fn ref_id(self) -> String {\n\n/// self.ref_id\n\n/// }\n\n/// }\n\n///\n\n/// #[backend::async_trait]\n\n/// impl backend::DataService for MyPlugin {\n\n///\n", "file_path": "src/backend/data.rs", "rank": 80, "score": 64648.551003874316 }, { "content": "where\n\n T: DataService + Send + Sync + 'static,\n\n{\n\n #[tracing::instrument(skip(self), level = \"debug\")]\n\n async fn query_data(\n\n &self,\n\n request: tonic::Request<pluginv2::QueryDataRequest>,\n\n ) -> Result<tonic::Response<pluginv2::QueryDataResponse>, tonic::Status> {\n\n let responses = DataService::query_data(\n\n self,\n\n request\n\n .into_inner()\n\n .try_into()\n\n .map_err(ConvertFromError::into_tonic_status)?,\n\n )\n\n .await\n\n .map(|resp| match resp {\n\n Ok(x) => {\n\n let ref_id = x.ref_id;\n\n x.frames.map_or_else(\n", "file_path": "src/backend/data.rs", "rank": 81, "score": 64642.980901072646 }, { "content": " /// This contains all of the other properties, as well as custom properties.\n\n pub json: Value,\n\n}\n\n\n\nimpl TryFrom<pluginv2::DataQuery> for DataQuery {\n\n type Error = ConvertFromError;\n\n fn try_from(other: pluginv2::DataQuery) -> Result<Self, Self::Error> {\n\n Ok(Self {\n\n ref_id: other.ref_id,\n\n query_type: other.query_type,\n\n max_data_points: other.max_data_points,\n\n interval: Duration::from_millis(other.interval_ms as u64),\n\n time_range: other\n\n .time_range\n\n .map(TimeRange::from)\n\n .ok_or(ConvertFromError::MissingTimeRange)?,\n\n json: backend::read_json(&other.json)?,\n\n })\n\n }\n\n}\n", "file_path": "src/backend/data.rs", "rank": 82, "score": 64642.59547303367 }, { "content": " )\n\n }\n\n Err(e) => {\n\n let err_string = e.to_string();\n\n (\n\n e.ref_id(),\n\n pluginv2::DataResponse {\n\n frames: vec![],\n\n error: err_string,\n\n json_meta: vec![],\n\n },\n\n )\n\n }\n\n })\n\n .collect()\n\n .await;\n\n Ok(tonic::Response::new(pluginv2::QueryDataResponse {\n\n responses,\n\n }))\n\n }\n\n}\n", "file_path": "src/backend/data.rs", "rank": 83, "score": 64639.96952303355 }, { "content": " .plugin_context\n\n .ok_or(ConvertFromError::MissingPluginContext)\n\n .and_then(TryInto::try_into)?,\n\n headers: other.headers,\n\n queries: other\n\n .queries\n\n .into_iter()\n\n .map(DataQuery::try_from)\n\n .collect::<Result<Vec<_>, _>>()?,\n\n })\n\n }\n\n}\n\n\n\n/// A query made by Grafana to the plugin as part of a [`QueryDataRequest`].\n\n///\n\n/// The `json` field contains any fields set by the plugin's UI.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub struct DataQuery {\n\n /// The unique identifier of the query, set by the frontend call.\n", "file_path": "src/backend/data.rs", "rank": 84, "score": 64637.077667631325 }, { "content": " |e| {\n\n (\n\n ref_id.clone(),\n\n pluginv2::DataResponse {\n\n frames: vec![],\n\n error: e.to_string(),\n\n json_meta: vec![],\n\n },\n\n )\n\n },\n\n |frames| {\n\n (\n\n ref_id.clone(),\n\n pluginv2::DataResponse {\n\n frames,\n\n error: \"\".to_string(),\n\n json_meta: vec![],\n\n },\n\n )\n\n },\n", "file_path": "src/backend/data.rs", "rank": 85, "score": 64636.0765207694 }, { "content": " ///\n\n /// This should be included in the corresponding [`DataResponse`].\n\n pub ref_id: String,\n\n\n\n /// An identifier for the type of query.\n\n ///\n\n /// This can be used to distinguish different types of queries.\n\n pub query_type: String,\n\n\n\n /// The maximum number of datapoints that should be returned from a time series query.\n\n pub max_data_points: i64,\n\n\n\n /// The suggested duration between time points in a time series query.\n\n pub interval: Duration,\n\n\n\n /// The start and end of the query requested by the frontend.\n\n pub time_range: TimeRange,\n\n\n\n /// The raw JSON query.\n\n ///\n", "file_path": "src/backend/data.rs", "rank": 86, "score": 64634.33750497753 }, { "content": "\n\nimpl FieldType for NaiveDateTime {\n\n type Array = PrimitiveArray<i64>;\n\n const ARROW_DATA_TYPE: DataType = DataType::Timestamp(TimeUnit::Nanosecond, None);\n\n\n\n /// Convert the logical type of `Self::Array` to `DataType::Timestamp`.\n\n fn convert_arrow_array(array: Self::Array, data_type: DataType) -> Self::Array {\n\n array.to(data_type)\n\n }\n\n}\n\n\n\nimpl IntoFieldType for NaiveDateTime {\n\n type ElementType = i64;\n\n const TYPE_INFO_TYPE: TypeInfoType = TypeInfoType::Time;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n Some(self.timestamp_nanos())\n\n }\n\n}\n\n\n\n// String impls.\n", "file_path": "src/data/field_type.rs", "rank": 87, "score": 64040.442107128925 }, { "content": "\n\nimpl FieldType for &str {\n\n type Array = Utf8Array<i32>;\n\n const ARROW_DATA_TYPE: DataType = DataType::Utf8;\n\n}\n\n\n\nimpl<'a> IntoFieldType for &'a str {\n\n type ElementType = &'a str;\n\n const TYPE_INFO_TYPE: TypeInfoType = TypeInfoType::String;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n Some(self)\n\n }\n\n}\n\n\n\nimpl FieldType for String {\n\n type Array = Utf8Array<i32>;\n\n const ARROW_DATA_TYPE: DataType = DataType::Utf8;\n\n}\n\n\n\nimpl IntoFieldType for String {\n\n type ElementType = String;\n\n const TYPE_INFO_TYPE: TypeInfoType = TypeInfoType::String;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n Some(self)\n\n }\n\n}\n", "file_path": "src/data/field_type.rs", "rank": 88, "score": 64039.298734393335 }, { "content": " }\n\n}\n\n\n\nimpl IntoFieldType for bool {\n\n type ElementType = bool;\n\n const TYPE_INFO_TYPE: TypeInfoType = TypeInfoType::Bool;\n\n\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n Some(self)\n\n }\n\n}\n\n\n\n// DateTime impls.\n\n\n\nimpl FieldType for SystemTime {\n\n type Array = PrimitiveArray<i64>;\n\n const ARROW_DATA_TYPE: DataType = DataType::Timestamp(TimeUnit::Nanosecond, None);\n\n\n\n /// Convert the logical type of `Self::Array` to `DataType::Timestamp`.\n\n fn convert_arrow_array(array: Self::Array, data_type: DataType) -> Self::Array {\n", "file_path": "src/data/field_type.rs", "rank": 89, "score": 64038.705488007356 }, { "content": " }\n\n}\n\n\n\nimpl FieldType for NaiveDate {\n\n type Array = PrimitiveArray<i64>;\n\n const ARROW_DATA_TYPE: DataType = DataType::Timestamp(TimeUnit::Nanosecond, None);\n\n\n\n /// Convert the logical type of `Self::Array` to `DataType::Timestamp`.\n\n fn convert_arrow_array(array: Self::Array, data_type: DataType) -> Self::Array {\n\n array.to(data_type)\n\n }\n\n}\n\n\n\nimpl IntoFieldType for NaiveDate {\n\n type ElementType = i64;\n\n const TYPE_INFO_TYPE: TypeInfoType = TypeInfoType::Time;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n Some(self.and_hms(0, 0, 0).timestamp_nanos())\n\n }\n\n}\n", "file_path": "src/data/field_type.rs", "rank": 90, "score": 64038.02609655463 }, { "content": " const ARROW_DATA_TYPE: DataType = DataType::Timestamp(TimeUnit::Nanosecond, None);\n\n\n\n /// Convert the logical type of `Self::Array` to `DataType::Timestamp`.\n\n fn convert_arrow_array(array: Self::Array, data_type: DataType) -> Self::Array {\n\n array.to(data_type)\n\n }\n\n}\n\n\n\nimpl<T> IntoFieldType for DateTime<T>\n\nwhere\n\n T: Offset + TimeZone,\n\n{\n\n type ElementType = i64;\n\n const TYPE_INFO_TYPE: TypeInfoType = TypeInfoType::Time;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n Some(self.timestamp_nanos())\n\n }\n\n}\n\n\n\nimpl<T> FieldType for Date<T>\n", "file_path": "src/data/field_type.rs", "rank": 91, "score": 64037.87894504896 }, { "content": "\n\nmacro_rules! impl_fieldtype_for_primitive {\n\n ($ty: ty, $arrow_ty: expr, $type_info: expr) => {\n\n impl FieldType for $ty {\n\n type Array = PrimitiveArray<$ty>;\n\n const ARROW_DATA_TYPE: DataType = $arrow_ty;\n\n fn convert_arrow_array(array: Self::Array, data_type: DataType) -> Self::Array {\n\n array.to(data_type)\n\n }\n\n }\n\n\n\n impl IntoFieldType for $ty {\n\n type ElementType = $ty;\n\n const TYPE_INFO_TYPE: TypeInfoType = $type_info;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n Some(self)\n\n }\n\n }\n\n };\n\n}\n", "file_path": "src/data/field_type.rs", "rank": 92, "score": 64037.79729319514 }, { "content": "where\n\n T: Offset + TimeZone,\n\n{\n\n type Array = PrimitiveArray<i64>;\n\n const ARROW_DATA_TYPE: DataType = DataType::Timestamp(TimeUnit::Nanosecond, None);\n\n\n\n /// Convert the logical type of `Self::Array` to `DataType::Timestamp`.\n\n fn convert_arrow_array(array: Self::Array, data_type: DataType) -> Self::Array {\n\n array.to(data_type)\n\n }\n\n}\n\n\n\nimpl<T> IntoFieldType for Date<T>\n\nwhere\n\n T: Offset + TimeZone,\n\n{\n\n type ElementType = i64;\n\n const TYPE_INFO_TYPE: TypeInfoType = TypeInfoType::Time;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n Some(self.and_hms(0, 0, 0).timestamp_nanos())\n", "file_path": "src/data/field_type.rs", "rank": 93, "score": 64036.76407256681 }, { "content": "\n\nimpl_fieldtype_for_primitive!(i8, DataType::Int8, TypeInfoType::Int8);\n\nimpl_fieldtype_for_primitive!(i16, DataType::Int16, TypeInfoType::Int16);\n\nimpl_fieldtype_for_primitive!(i32, DataType::Int32, TypeInfoType::Int32);\n\nimpl_fieldtype_for_primitive!(i64, DataType::Int64, TypeInfoType::Int64);\n\nimpl_fieldtype_for_primitive!(u8, DataType::UInt8, TypeInfoType::UInt8);\n\nimpl_fieldtype_for_primitive!(u16, DataType::UInt16, TypeInfoType::UInt16);\n\nimpl_fieldtype_for_primitive!(u32, DataType::UInt32, TypeInfoType::UInt32);\n\nimpl_fieldtype_for_primitive!(u64, DataType::UInt64, TypeInfoType::UInt64);\n\nimpl_fieldtype_for_primitive!(f32, DataType::Float32, TypeInfoType::Float32);\n\nimpl_fieldtype_for_primitive!(f64, DataType::Float64, TypeInfoType::Float64);\n\n\n\n// Boolean impl.\n\n\n\nimpl FieldType for bool {\n\n type Array = BooleanArray;\n\n const ARROW_DATA_TYPE: DataType = DataType::Boolean;\n\n\n\n fn convert_arrow_array(array: Self::Array, _data_type: DataType) -> Self::Array {\n\n array\n", "file_path": "src/data/field_type.rs", "rank": 94, "score": 64036.59432054892 }, { "content": "//! Types of field understood by the Grafana plugin SDK.\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\n\n\nuse arrow2::{\n\n array::{BooleanArray, PrimitiveArray, Utf8Array},\n\n datatypes::{DataType, TimeUnit},\n\n};\n\nuse chrono::prelude::*;\n\n\n\nuse crate::data::TypeInfoType;\n\n\n\n/// Indicates that a type is can be stored in an Arrow array.\n", "file_path": "src/data/field_type.rs", "rank": 95, "score": 64035.203079076564 }, { "content": " array.to(data_type)\n\n }\n\n}\n\n\n\nimpl IntoFieldType for SystemTime {\n\n type ElementType = i64;\n\n const TYPE_INFO_TYPE: TypeInfoType = TypeInfoType::Time;\n\n fn into_field_type(self) -> Option<Self::ElementType> {\n\n // This won't overflow for about 300 years so we're probably fine.\n\n self.duration_since(UNIX_EPOCH)\n\n .ok()\n\n .and_then(|x| x.as_nanos().try_into().ok())\n\n }\n\n}\n\n\n\nimpl<T> FieldType for DateTime<T>\n\nwhere\n\n T: Offset + TimeZone,\n\n{\n\n type Array = PrimitiveArray<i64>;\n", "file_path": "src/data/field_type.rs", "rank": 96, "score": 64031.603452696916 }, { "content": "fn parse_knobs(input: syn::ItemFn, config: FinalConfig) -> TokenStream {\n\n // If type mismatch occurs, the current rustc points to the last statement.\n\n let (last_stmt_start_span, _) = {\n\n let mut last_stmt = input\n\n .block\n\n .stmts\n\n .last()\n\n .map(ToTokens::into_token_stream)\n\n .unwrap_or_default()\n\n .into_iter();\n\n // `Span` on stable Rust has a limitation that only points to the first\n\n // token, not the whole tokens. We can work around this limitation by\n\n // using the first/last span of the tokens like\n\n // `syn::Error::new_spanned` does.\n\n let start = last_stmt.next().map_or_else(Span::call_site, |t| t.span());\n\n let end = last_stmt.last().map_or(start, |t| t.span());\n\n (start, end)\n\n };\n\n\n\n let body = input.block;\n", "file_path": "grafana-plugin-sdk-macros/src/lib.rs", "rank": 97, "score": 63947.886637805306 }, { "content": "fn body_to_response(body: Bytes) -> pluginv2::CallResourceResponse {\n\n pluginv2::CallResourceResponse {\n\n code: 200,\n\n headers: std::collections::HashMap::new(),\n\n body,\n\n }\n\n}\n\n\n\n/// Type alias for a pinned, boxed future with a fallible HTTP response as output, with a custom error type.\n\npub type BoxResourceFuture<E> =\n\n Pin<Box<dyn std::future::Future<Output = Result<Response<Bytes>, E>>>>;\n\n\n\n/// Type alias for a pinned, boxed stream of HTTP responses with a custom error type.\n\npub type BoxResourceStream<E> = Pin<Box<dyn futures_core::Stream<Item = Result<Bytes, E>> + Send>>;\n\n\n\n/// Trait for plugins that can handle arbitrary resource requests.\n\n///\n\n/// Implementing this trait allows plugins to handle a wide variety of use cases beyond\n\n/// 'just' responding to requests for data and returning dataframes.\n\n///\n", "file_path": "src/backend/resource.rs", "rank": 98, "score": 63411.996111250395 }, { "content": "#[cfg_attr(docsrs, doc(notable_trait))]\n\npub trait FromFields<T: IntoFrame> {\n\n /// Create a [`Frame`] with the given name from `fields`.\n\n fn from_fields(name: impl Into<String>, fields: T) -> Frame;\n\n}\n\n\n\nimpl<T: IntoFrame> FromFields<T> for Frame {\n\n fn from_fields(name: impl Into<String>, fields: T) -> Frame {\n\n fields.into_frame(name)\n\n }\n\n}\n\n\n\n/// Options for customizing the way a [`Frame`] is serialized.\n\n#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[non_exhaustive]\n\npub enum FrameInclude {\n\n /// Serialize both the schema and data.\n\n All,\n\n /// Serialize just the data.\n\n DataOnly,\n", "file_path": "src/data/frame/mod.rs", "rank": 99, "score": 63001.75115604622 } ]
Rust
src/command/last.rs
wojexe/baca-cli
c024bd3d95a2e54dfc25bdc0dec352551a3b4e7c
use crate::api::baca_api::BacaApi; use crate::command::details::Details; use crate::command::Command; use crate::error::{Error, Result}; use crate::model::Submit; use crate::workspace::{ConfigObject, ConnectionConfig, Workspace}; use clap::ArgMatches; pub struct Last { task_id: Option<String>, } impl Last { pub fn new() -> Self { Self { task_id: None } } pub fn with_filter(task_id: &str) -> Self { Self { task_id: Some(task_id.to_string()), } } fn get_last_submit<A>(&self, connection_config: &ConnectionConfig, api: &A) -> Result<Submit> where A: BacaApi, { let results = if let Some(task_id) = &self.task_id { api.get_results_by_task(connection_config, task_id)? } else { api.get_results(connection_config)? }; Ok(results.submits.first().ok_or(Error::NoSubmitsYet)?.clone()) } } impl Command for Last { fn execute<W, A>(self, workspace: &W, api: &A) -> Result<()> where W: Workspace, A: BacaApi, { let connection_config = ConnectionConfig::read_config(workspace)?; let last = self.get_last_submit(&connection_config, api)?; Details::new(&last.id).execute(workspace, api) } } impl From<&ArgMatches<'_>> for Last { fn from(args: &ArgMatches) -> Self { if let Some(task_id) = args.value_of("task") { return Last::with_filter(task_id); } Last::new() } } #[cfg(test)] mod tests { use super::*; use crate::api::baca_api::MockBacaApi; use crate::model::SubmitStatus; use crate::model::{Results, Submit}; use crate::workspace::{ConnectionConfig, MockWorkspace}; #[test] fn no_submits() { let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(|_| Ok(Results { submits: vec![] })); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_err()); assert!(matches!(result.err().unwrap(), Error::NoSubmitsYet)); } #[test] fn one_submit() { let expected = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "3".to_string(), max_points: None, problem_name: "Test Problem".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); let results = Results { submits: vec![expected.clone()], }; mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(move |_| Ok(results.clone())); let submit = expected; mock_api .expect_get_submit_details() .withf(|x, id| *x == ConnectionConfig::default() && id == "3") .returning(move |_, _| Ok(submit.clone())); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_ok()); } #[test] fn three_submits() { let submit1 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "1".to_string(), max_points: None, problem_name: "Test Problem 1".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let submit2 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "2".to_string(), max_points: None, problem_name: "Test Problem 2".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let submit3 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "3".to_string(), max_points: None, problem_name: "Test Problem 3".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let all_submits = vec![submit1.clone(), submit2, submit3]; let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); let results = Results { submits: all_submits, }; mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(move |_| Ok(results.clone())); let submit = submit1; mock_api .expect_get_submit_details() .withf(|x, id| *x == ConnectionConfig::default() && id == "1") .returning(move |_, _| Ok(submit.clone())); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_ok()); } }
use crate::api::baca_api::BacaApi; use crate::command::details::Details; use crate::command::Command; use crate::error::{Error, Result}; use crate::model::Submit; use crate::workspace::{ConfigObject, ConnectionConfig, Workspace}; use clap::ArgMatches; pub struct Last { task_id: Option<String>, } impl Last { pub fn new() -> Self { Self { task_id: None } } pub fn with_filter(task_id: &str) -> Self { Self { task_id: Some(task_id.to_string()), } } fn get_last_submit<A>(&self, connection_config: &ConnectionConfig, api: &A) -> Result<Submit> where A: BacaApi, { let results =
; Ok(results.submits.first().ok_or(Error::NoSubmitsYet)?.clone()) } } impl Command for Last { fn execute<W, A>(self, workspace: &W, api: &A) -> Result<()> where W: Workspace, A: BacaApi, { let connection_config = ConnectionConfig::read_config(workspace)?; let last = self.get_last_submit(&connection_config, api)?; Details::new(&last.id).execute(workspace, api) } } impl From<&ArgMatches<'_>> for Last { fn from(args: &ArgMatches) -> Self { if let Some(task_id) = args.value_of("task") { return Last::with_filter(task_id); } Last::new() } } #[cfg(test)] mod tests { use super::*; use crate::api::baca_api::MockBacaApi; use crate::model::SubmitStatus; use crate::model::{Results, Submit}; use crate::workspace::{ConnectionConfig, MockWorkspace}; #[test] fn no_submits() { let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(|_| Ok(Results { submits: vec![] })); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_err()); assert!(matches!(result.err().unwrap(), Error::NoSubmitsYet)); } #[test] fn one_submit() { let expected = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "3".to_string(), max_points: None, problem_name: "Test Problem".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); let results = Results { submits: vec![expected.clone()], }; mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(move |_| Ok(results.clone())); let submit = expected; mock_api .expect_get_submit_details() .withf(|x, id| *x == ConnectionConfig::default() && id == "3") .returning(move |_, _| Ok(submit.clone())); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_ok()); } #[test] fn three_submits() { let submit1 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "1".to_string(), max_points: None, problem_name: "Test Problem 1".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let submit2 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "2".to_string(), max_points: None, problem_name: "Test Problem 2".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let submit3 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "3".to_string(), max_points: None, problem_name: "Test Problem 3".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let all_submits = vec![submit1.clone(), submit2, submit3]; let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); let results = Results { submits: all_submits, }; mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(move |_| Ok(results.clone())); let submit = submit1; mock_api .expect_get_submit_details() .withf(|x, id| *x == ConnectionConfig::default() && id == "1") .returning(move |_, _| Ok(submit.clone())); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_ok()); } }
if let Some(task_id) = &self.task_id { api.get_results_by_task(connection_config, task_id)? } else { api.get_results(connection_config)? }
if_condition
[ { "content": "pub fn assert_contains_pattern(command: &[&str], pattern: &str) -> Result<(), Box<dyn Error>> {\n\n let (dir, mut cmd) = set_up_with_dir()?;\n\n\n\n cmd.args(command);\n\n cmd.assert()\n\n // .failure() // todo: exit codes\n\n .stdout(predicate::str::contains(pattern));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 0, "score": 188666.06201985216 }, { "content": "pub fn assert_fails_if_not_initialized(command: &[&str]) -> Result<(), Box<dyn Error>> {\n\n let pattern = \"not initialized\";\n\n assert_contains_pattern(command, pattern)\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 1, "score": 181697.46762187552 }, { "content": "pub fn execute<W, Api>(\n\n workspace: &W,\n\n api: &Api,\n\n command: &str,\n\n matches: &ArgMatches,\n\n) -> error::Result<()>\n\nwhere\n\n W: Workspace,\n\n Api: BacaApi,\n\n{\n\n match command {\n\n \"init\" => Init::from(matches).execute(workspace, api),\n\n \"details\" => Details::from(matches).execute(workspace, api),\n\n \"refresh\" => Refresh::new().execute(workspace, api),\n\n \"log\" => Log::from(matches).execute(workspace, api),\n\n \"tasks\" => Tasks::new().execute(workspace, api),\n\n \"submit\" => Submit::try_from(matches)?.execute(workspace, api),\n\n \"last\" => Last::from(matches).execute(workspace, api),\n\n \"config\" => {\n\n ConfigEditor::new().edit::<W, ConnectionConfig>(workspace)?;\n\n Ok(())\n\n }\n\n \"clear\" => workspace.remove_workspace(),\n\n _ => panic!(\"error!\"),\n\n }\n\n}\n", "file_path": "src/command/mod.rs", "rank": 2, "score": 163899.2908632916 }, { "content": "pub fn zip_file(path: &Path) -> Result<&Path, error::Error> {\n\n _zip_file(path).map_err(|e| error::Error::Zipping(e.into()))\n\n}\n\n\n", "file_path": "src/workspace/zip.rs", "rank": 3, "score": 161435.24912512116 }, { "content": "fn to_int(n: &str) -> Result<usize> {\n\n n.parse().map_err(|_| Error::InvalidArgument)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::api::baca_api::MockBacaApi;\n\n use crate::model::Results;\n\n use crate::workspace::{ConnectionConfig, MockWorkspace};\n\n\n\n #[test]\n\n fn no_submits() {\n\n let mut mock_workspace = MockWorkspace::new();\n\n mock_workspace\n\n .expect_read_config_object()\n\n .returning(|| Ok(ConnectionConfig::default()));\n\n\n\n let mut mock_api = MockBacaApi::new();\n\n mock_api\n\n .expect_get_results()\n\n .withf(|x| *x == ConnectionConfig::default())\n\n .returning(|_| Ok(Results::default()));\n\n\n\n let log = Log::new(\"10\");\n\n let result = log.execute(&mock_workspace, &mock_api);\n\n assert!(result.is_ok())\n\n }\n\n}\n", "file_path": "src/command/log.rs", "rank": 4, "score": 155651.21835482772 }, { "content": "pub fn initialize_correct_workspace() -> Result<TempDir, Box<dyn std::error::Error>> {\n\n let (login, pass, host) = get_baca_credentials();\n\n let (dir, mut cmd) = set_up_with_dir()?;\n\n\n\n cmd.arg(\"init\")\n\n .args(&[\"-h\", &host, \"-p\", &pass, \"-l\", &login]);\n\n cmd.assert();\n\n Ok(dir)\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 5, "score": 150947.05478393787 }, { "content": "fn submit<W, A>(workspace: &W, api: &A, mut submit_config: SubmitConfig) -> error::Result<()>\n\nwhere\n\n W: Workspace,\n\n A: BacaApi,\n\n{\n\n let connection_config = ConnectionConfig::read_config(workspace)?;\n\n let tasks = api.get_tasks(&connection_config)?;\n\n let task_id = submit_config.id().unwrap();\n\n let mut task = tasks.get_by_id(task_id)?.clone();\n\n task.language = submit_config.language.unwrap();\n\n\n\n let original_filename = submit_config\n\n .file()\n\n .unwrap()\n\n .file_name()\n\n .unwrap()\n\n .to_str()\n\n .unwrap()\n\n .to_string();\n\n\n", "file_path": "src/command/submit.rs", "rank": 6, "score": 145994.68507502496 }, { "content": "pub fn permutation() -> String {\n\n PERMUTATION.to_string()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn permutation_function_should_return_same_string() {\n\n assert_eq!(permutation(), PERMUTATION);\n\n }\n\n}\n", "file_path": "src/api/details.rs", "rank": 7, "score": 141855.9859768273 }, { "content": "#[cfg_attr(test, automock)]\n\npub trait BacaApi {\n\n fn get_cookie(&self, connection_config: &ConnectionConfig) -> Result<String>;\n\n fn get_submit_details(\n\n &self,\n\n connection_config: &ConnectionConfig,\n\n submit_id: &str,\n\n ) -> Result<Submit>;\n\n fn get_results(&self, connection_config: &ConnectionConfig) -> Result<Results>;\n\n fn get_results_by_task(\n\n &self,\n\n connection_config: &ConnectionConfig,\n\n task_id: &str,\n\n ) -> Result<Results>;\n\n fn get_tasks(&self, connection_config: &ConnectionConfig) -> Result<Tasks>;\n\n fn submit(\n\n &self,\n\n connection_config: &ConnectionConfig,\n\n task: &Task,\n\n file_path: &str,\n\n ) -> Result<()>;\n\n fn get_allowed_language(\n\n &self,\n\n connection_config: &ConnectionConfig,\n\n task_id: &str,\n\n ) -> Result<Option<Language>>;\n\n}\n", "file_path": "src/api/baca_api.rs", "rank": 8, "score": 138282.13786142526 }, { "content": "pub fn set_up_with_dir() -> Result<(TempDir, Command), Box<dyn Error>> {\n\n let dir = assert_fs::TempDir::new()?;\n\n let cmd = set_up_command(&dir)?;\n\n Ok((dir, cmd))\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 9, "score": 131923.07094518398 }, { "content": "pub fn set_up_command(dir: &TempDir) -> Result<Command, Box<dyn Error>> {\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n cmd.current_dir(&dir);\n\n cmd.arg(\"-u\");\n\n Ok(cmd)\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 10, "score": 127715.38163593921 }, { "content": "fn save_version<W: Workspace>(workspace: &W) -> error::Result<()> {\n\n let version = BacaRelease::new(env!(\"CARGO_PKG_VERSION\"), \"\");\n\n version.save_config(workspace)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::api;\n\n use crate::api::baca_api::MockBacaApi;\n\n use crate::command::prompt::MockPrompt;\n\n use crate::workspace::{ConnectionConfig, MockWorkspace};\n\n\n\n // todo: tests::utils\n\n fn make_mock_connection_config() -> ConnectionConfig {\n\n ConnectionConfig {\n\n host: \"host\".to_string(),\n\n login: \"login\".to_string(),\n\n password: \"pass\".to_string(),\n\n permutation: api::details::permutation(),\n", "file_path": "src/command/init.rs", "rank": 11, "score": 125042.51847949788 }, { "content": "fn check_response_status(response: &Response) -> Result<()> {\n\n if response.status().as_str() == \"404\" {\n\n return Err(Error::InvalidHost);\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/api/baca_service.rs", "rank": 12, "score": 122888.13590580408 }, { "content": "fn extract_cookie(response: &Response) -> Result<String> {\n\n let cookie = response\n\n .cookies()\n\n .next()\n\n .ok_or(Error::InvalidLoginOrPassword)?;\n\n\n\n debug!(\"Cookie: {} = {}\", cookie.name(), cookie.value());\n\n Ok(cookie.value().to_string())\n\n}\n\n\n", "file_path": "src/api/baca_service.rs", "rank": 13, "score": 121116.6386053047 }, { "content": "#[test]\n\nfn on_correct_repo_should_print_last_submit() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"last\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"[G] Funkcje sklejane\"))\n\n .stdout(predicate::str::contains(\"C++\"))\n\n .stdout(predicate::str::contains(\"2020-05-17 18:53:09\"))\n\n .stdout(predicate::str::contains(\"4334\"))\n\n .stdout(predicate::str::contains(\"100%\"))\n\n .stdout(predicate::str::contains(\"4/4\"))\n\n .stdout(predicate::str::contains(\"Ok\"))\n\n .stdout(predicate::str::contains(\"test0/0\"))\n\n .stdout(predicate::str::contains(\"test1/0\"))\n\n .stdout(predicate::str::contains(\"test2/0\"))\n\n .stdout(predicate::str::contains(\"test3/0\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/last.rs", "rank": 14, "score": 119318.18927481274 }, { "content": "fn _zip_file(path: &Path) -> Result<&Path, Error> {\n\n let filename = path.file_name().unwrap().to_str().ok_or(ErrorKind::Other)?;\n\n let path = path.to_str().ok_or(ErrorKind::Other)?;\n\n\n\n println!(\"Zipping {}.\", filename);\n\n tracing::debug!(\"Relative path: {}.\", path);\n\n\n\n let source = read(path)?;\n\n let buf = File::create(\"source.zip\")?;\n\n let mut zip = zip::ZipWriter::new(buf);\n\n\n\n let options =\n\n zip::write::FileOptions::default().compression_method(zip::CompressionMethod::DEFLATE);\n\n zip.start_file(filename, options)?;\n\n zip.write_all(source.as_ref())?;\n\n zip.finish()?;\n\n\n\n Ok(Path::new(\"source.zip\"))\n\n}\n", "file_path": "src/workspace/zip.rs", "rank": 15, "score": 119175.929707696 }, { "content": "fn check_for_empty_response(resp: String) -> Result<String> {\n\n if resp == EMPTY_RESPONSE {\n\n Err(Error::LoggedOut)\n\n } else {\n\n Ok(resp)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::api;\n\n use crate::model::Language::Unsupported;\n\n use std::fmt::Debug;\n\n\n\n fn make_correct_baca_invalid_session() -> ConnectionConfig {\n\n ConnectionConfig {\n\n host: \"mn2020\".to_string(),\n\n login: \"login\".to_string(),\n\n password: \"pass\".to_string(),\n", "file_path": "src/api/baca_service.rs", "rank": 16, "score": 118659.25911839481 }, { "content": "#[test]\n\nfn filter() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"last\").arg(\"-t\").arg(\"1\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"[A] Zera funkcji\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/last.rs", "rank": 17, "score": 118270.73976686597 }, { "content": "#[test]\n\nfn tasks_not_initialized() -> Result<(), Box<dyn std::error::Error>> {\n\n assert_fails_if_not_initialized(&[\"last\"])\n\n}\n\n\n", "file_path": "tests/commands/last.rs", "rank": 18, "score": 115806.73928404166 }, { "content": "pub fn make_input_file_cpp(dir: &TempDir) -> Result<ChildPath, Box<dyn std::error::Error>> {\n\n let input_file = dir.child(\"source.cpp\");\n\n input_file.touch()?;\n\n input_file.write_str(\n\n r#\"\n\n \\\\ Hubert Jaremko\n\n #include <iostream>\n\n int main() {\n\n std::cout << \"Hello world\" << std::endl;\n\n return 0;\n\n }\n\n \"#,\n\n )?;\n\n Ok(input_file)\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 19, "score": 114974.63260346532 }, { "content": "pub fn make_input_file_dummy(dir: &TempDir) -> Result<ChildPath, Box<dyn std::error::Error>> {\n\n let input_file = dir.child(\"dummy.txt\");\n\n input_file.touch()?;\n\n input_file.write_str(\n\n r#\"\n\n \\\\ Hubert Jaremko\n\n Dummy text file\n\n \"#,\n\n )?;\n\n Ok(input_file)\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 20, "score": 114974.63260346532 }, { "content": "#[test]\n\nfn saved_task_should_be_used() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n make_input_file_dummy(&dir).unwrap();\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-f\",\n\n \"dummy.txt\",\n\n \"-t\",\n\n \"2\",\n\n \"-l\",\n\n \"Java\",\n\n \"--save\",\n\n ]);\n\n cmd.assert();\n\n\n\n let mut cmd = set_up_command(&dir)?;\n\n cmd.arg(\"submit\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Submitting dummy.txt\"))\n\n .stdout(predicate::str::contains(\"Java\"))\n\n .stdout(predicate::str::contains(\"[B] Metoda Newtona\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 21, "score": 113704.26749504838 }, { "content": "#[test]\n\nfn with_given_1_should_print_last_1() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"log\").arg(\"1\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"[G] Funkcje sklejane\"))\n\n .stdout(predicate::str::contains(\"[A] Zera funkcji\").not())\n\n .stdout(predicate::str::contains(\"[B] Metoda Newtona\").not())\n\n .stdout(\n\n predicate::str::contains(r#\"[C] FAD\\x3Csup\\x3E2\\x3C/sup\\x3E - Pochodne mieszane\"#)\n\n .not(),\n\n )\n\n .stdout(predicate::str::contains(\"[D] Skalowany Gauss\").not())\n\n .stdout(predicate::str::contains(\"[E] Metoda SOR\").not())\n\n .stdout(predicate::str::contains(\"4334\"))\n\n .stdout(predicate::str::contains(\"4328\").not())\n\n .stdout(predicate::str::contains(\"4326\").not())\n\n .stdout(predicate::str::contains(\"4325\").not());\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/log.rs", "rank": 22, "score": 113479.6762350115 }, { "content": "fn add_emoji(str: &str, status: &SubmitStatus) -> String {\n\n match status {\n\n SubmitStatus::Ok => format!(\" ✔️{}\", str),\n\n _ => format!(\" ❌{}\", str),\n\n }\n\n}\n\n\n", "file_path": "src/model/submit.rs", "rank": 23, "score": 111901.8568883361 }, { "content": "#[test]\n\nfn on_corrupted_repo_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n fs::remove_file(dir.baca_config_file_path())?;\n\n\n\n cmd.arg(\"last\");\n\n cmd.assert().stdout(predicate::str::contains(\"corrupted\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/last.rs", "rank": 24, "score": 111278.44375570613 }, { "content": "#[test]\n\nfn no_argument_should_print_last_three() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"log\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"[G] Funkcje sklejane\"))\n\n .stdout(predicate::str::contains(\"[A] Zera funkcji\").not())\n\n .stdout(predicate::str::contains(\"[B] Metoda Newtona\").not())\n\n .stdout(\n\n predicate::str::contains(r#\"[C] FAD\\x3Csup\\x3E2\\x3C/sup\\x3E - Pochodne mieszane\"#)\n\n .not(),\n\n )\n\n .stdout(predicate::str::contains(\"[D] Skalowany Gauss\").not())\n\n .stdout(predicate::str::contains(\"[E] Metoda SOR\").not())\n\n .stdout(predicate::str::contains(\"4334\"))\n\n .stdout(predicate::str::contains(\"4328\"))\n\n .stdout(predicate::str::contains(\"4326\"))\n\n .stdout(predicate::str::contains(\"4325\").not());\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/log.rs", "rank": 25, "score": 111278.44375570613 }, { "content": "fn from_partial_str(input: &str) -> SubmitStatus {\n\n const POSSIBLE_STATUSES: [(&str, SubmitStatus); 11] = [\n\n (\"zaakceptowany\", SubmitStatus::Ok),\n\n (\"przetworzenie\", SubmitStatus::Processing),\n\n (\"odpowiedz\", SubmitStatus::WrongAnswer),\n\n (\"czas\", SubmitStatus::TimeExceeded),\n\n (\"real time\", SubmitStatus::RealTimeExceeded),\n\n (\"brak\", SubmitStatus::NoHeader),\n\n (\"kompilacji\", SubmitStatus::CompileError),\n\n (\"wykonania\", SubmitStatus::RuntimeError),\n\n (\"odrzucone\", SubmitStatus::ManuallyRejected),\n\n (\"testerki\", SubmitStatus::InternalError),\n\n (\"wyjscia\", SubmitStatus::OutputSizeExceeded),\n\n ];\n\n\n\n for (status_str, status) in POSSIBLE_STATUSES {\n\n if input.contains(status_str) {\n\n return status;\n\n }\n\n }\n", "file_path": "src/model/submit_status.rs", "rank": 26, "score": 109585.40305335048 }, { "content": "fn apply_color_according_to_status(str: &str, status: &SubmitStatus) -> ColoredString {\n\n match status {\n\n SubmitStatus::Ok => str.green().bold(),\n\n SubmitStatus::Processing => str.bright_yellow().bold(),\n\n SubmitStatus::InQueue => str.bright_yellow().bold(),\n\n SubmitStatus::WrongAnswer => str.yellow().bold(),\n\n SubmitStatus::TimeExceeded => str.yellow().bold(),\n\n SubmitStatus::CompileError => str.yellow().bold(),\n\n SubmitStatus::NoHeader => str.blue().bold(),\n\n SubmitStatus::RealTimeExceeded => str.yellow().bold(),\n\n SubmitStatus::ManuallyRejected => str.magenta().bold(),\n\n SubmitStatus::RuntimeError => str.yellow().bold(),\n\n SubmitStatus::InternalError => str.red().bold(),\n\n SubmitStatus::OutputSizeExceeded => str.yellow().bold(),\n\n }\n\n}\n", "file_path": "src/model/submit.rs", "rank": 27, "score": 107620.94215244352 }, { "content": "#[test]\n\nfn filter_given_invalid_argument_should_print_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"last\").arg(\"-t\").arg(\"asd\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"asd does not exist\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n", "file_path": "tests/commands/last.rs", "rank": 28, "score": 107214.74074117222 }, { "content": "#[cfg_attr(test, automock)]\n\npub trait Workspace {\n\n fn initialize(&self) -> Result<()>;\n\n fn check_if_initialized(&self) -> Result<()>;\n\n fn remove_workspace(&self) -> Result<()>;\n\n fn save_config_object<T: ConfigObject + 'static>(&self, object: &T) -> Result<()>;\n\n fn read_config_object<T: ConfigObject + 'static>(&self) -> Result<T>;\n\n fn remove_config_object<T: ConfigObject + 'static>(&self) -> Result<()>;\n\n fn get_paths(&self) -> WorkspacePaths;\n\n}\n", "file_path": "src/workspace/mod.rs", "rank": 29, "score": 106780.10852728438 }, { "content": "#[test]\n\nfn filter_given_invalid_task_id_should_print_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"last\").arg(\"-t\").arg(\"1123\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"1123 does not exist\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/last.rs", "rank": 30, "score": 105335.32670583802 }, { "content": "pub fn init_logging(level: Level) {\n\n let subscriber = FmtSubscriber::builder()\n\n .without_time()\n\n .with_max_level(level)\n\n .finish();\n\n tracing::subscriber::set_global_default(subscriber).expect(\"setting default subscriber failed\");\n\n tracing::debug!(\"Log level: {}\", level);\n\n}\n", "file_path": "src/log.rs", "rank": 31, "score": 104409.3257927208 }, { "content": "fn remove_outer_layer(data: &str) -> String {\n\n data.chars().skip(5).take(data.len() - 13).collect()\n\n}\n\n\n", "file_path": "src/parse/mod.rs", "rank": 32, "score": 101399.5203502595 }, { "content": "fn deserialize(data: &str) -> Vec<String> {\n\n if data.len() < 18 {\n\n return Vec::new();\n\n }\n\n\n\n let data = remove_outer_layer(data);\n\n let data = split_raw(data);\n\n let keys = get_keys(&data);\n\n let values = get_values(&data, keys.len());\n\n map_serialized(&keys, &values)\n\n}\n\n\n", "file_path": "src/parse/mod.rs", "rank": 33, "score": 101252.04477220346 }, { "content": "fn fetch_updates() -> error::Result<UpdateStatus> {\n\n let owner = env::var(\"GITHUB_USER\").unwrap_or_else(|_| \"hjaremko\".to_string());\n\n let repo = env::var(\"GITHUB_REPO\").unwrap_or_else(|_| \"baca-cli\".to_string());\n\n\n\n let gh_service = GithubReleases::new(&owner, &repo);\n\n let checker = UpdateChecker::new(gh_service, update::CURRENT_VERSION);\n\n checker.check_for_updates()\n\n}\n", "file_path": "src/main.rs", "rank": 34, "score": 97185.08885352282 }, { "content": "pub fn get_baca_credentials() -> (String, String, String) {\n\n let login = env::var(\"TEST_BACA_LOGIN\").expect(\"No TEST_BACA_LOGIN provided\");\n\n let pass = env::var(\"TEST_BACA_PASSWORD\").expect(\"No TEST_BACA_PASSWORD provided\");\n\n let host = env::var(\"TEST_BACA_HOST\").expect(\"No TEST_BACA_HOST provided\");\n\n (login, pass, host)\n\n}\n", "file_path": "tests/util/mod.rs", "rank": 35, "score": 96852.52094262866 }, { "content": "#[cfg_attr(test, automock)]\n\npub trait EditorSpawner {\n\n fn default_editor() -> OsString;\n\n fn name(&self) -> &String;\n\n fn spawn_and_wait(&self, path: &Path) -> io::Result<ExitStatus>;\n\n}\n\n\n\npub struct Spawner {\n\n name: String,\n\n}\n\n\n\nimpl Spawner {\n\n pub fn new() -> Self {\n\n Self {\n\n name: Self::default_editor().into_string().unwrap(),\n\n }\n\n }\n\n}\n\n\n\nimpl EditorSpawner for Spawner {\n\n fn default_editor() -> OsString {\n", "file_path": "src/workspace/config_editor.rs", "rank": 36, "score": 93442.65033118712 }, { "content": "#[test]\n\nfn filter() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"log\").arg(\"100\").arg(\"-t\").arg(\"2\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"[A] Zera funkcji\").not())\n\n .stdout(predicate::str::contains(\"[B] Metoda Newtona\"))\n\n .stdout(\n\n predicate::str::contains(r#\"[C] FAD\\x3Csup\\x3E2\\x3C/sup\\x3E - Pochodne mieszane\"#)\n\n .not(),\n\n )\n\n .stdout(predicate::str::contains(\"[D] Skalowany Gauss\").not())\n\n .stdout(predicate::str::contains(\"[E] Metoda SOR\").not())\n\n .stdout(predicate::str::contains(\"[G] Funkcje sklejane\").not());\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/log.rs", "rank": 37, "score": 88789.18871475155 }, { "content": "#[test]\n\nfn success() -> Result<(), Box<dyn std::error::Error>> {\n\n let (login, pass, host) = get_baca_credentials();\n\n let temp = assert_fs::TempDir::new()?;\n\n\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n\n\n cmd.current_dir(&temp);\n\n cmd.arg(\"-u\");\n\n cmd.arg(\"init\")\n\n .args(&[\"--host\", &host, \"-p\", &pass, \"-l\", &login]);\n\n cmd.assert().code(0);\n\n\n\n assert!(baca_dir_exists(&temp));\n\n assert!(config_exists(&temp));\n\n temp.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/init.rs", "rank": 38, "score": 88789.18871475155 }, { "content": "#[test]\n\nfn tasks_not_initialized() -> Result<(), Box<dyn std::error::Error>> {\n\n assert_fails_if_not_initialized(&[\"tasks\"])\n\n}\n\n\n", "file_path": "tests/commands/tasks.rs", "rank": 39, "score": 87167.82534548384 }, { "content": "#[test]\n\nfn invalid_password() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp = assert_fs::TempDir::new()?;\n\n\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n\n\n cmd.current_dir(&temp);\n\n cmd.arg(\"-u\");\n\n cmd.arg(\"init\")\n\n .args(&[\"--host\", \"mn2020\", \"--login\", \"jaremko\", \"-p\", \"invalid\"]);\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Invalid login or password\"));\n\n\n\n assert!(!baca_dir_exists(&temp));\n\n temp.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/init.rs", "rank": 40, "score": 87167.82534548384 }, { "content": "#[test]\n\nfn tasks_not_initialized() -> Result<(), Box<dyn std::error::Error>> {\n\n assert_fails_if_not_initialized(&[\"log\"])\n\n}\n\n\n", "file_path": "tests/commands/log.rs", "rank": 41, "score": 87167.82534548384 }, { "content": "#[test]\n\nfn invalid_host() -> Result<(), Box<dyn std::error::Error>> {\n\n let (login, pass, _) = get_baca_credentials();\n\n let temp = assert_fs::TempDir::new()?;\n\n\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n\n\n cmd.current_dir(&temp);\n\n cmd.arg(\"-u\");\n\n cmd.arg(\"init\")\n\n .args(&[\"--host\", \"invalid\", \"--login\", &login, \"-p\", &pass]);\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Invalid host\"));\n\n\n\n assert!(!baca_dir_exists(&temp));\n\n temp.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/init.rs", "rank": 42, "score": 87167.82534548384 }, { "content": "#[test]\n\nfn zip_should_zip() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n let input_file = make_input_file_cpp(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n input_file.path().to_str().unwrap(),\n\n \"--zip\",\n\n \"--no-save\",\n\n ]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Zipping source.cpp\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 43, "score": 87167.82534548384 }, { "content": "#[test]\n\nfn tasks_not_initialized() -> Result<(), Box<dyn std::error::Error>> {\n\n assert_fails_if_not_initialized(&[\"details\", \"123\"])\n\n}\n\n\n", "file_path": "tests/commands/details.rs", "rank": 44, "score": 87167.82534548384 }, { "content": "#[test]\n\nfn should_save_version() -> Result<(), Box<dyn std::error::Error>> {\n\n let (login, pass, host) = get_baca_credentials();\n\n let temp = assert_fs::TempDir::new()?;\n\n\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n\n\n cmd.current_dir(&temp);\n\n cmd.arg(\"-u\");\n\n cmd.arg(\"init\")\n\n .args(&[\"--host\", &host, \"-p\", &pass, \"-l\", &login]);\n\n cmd.assert().code(0);\n\n\n\n let version_path = temp.path().join(\".baca/version\");\n\n assert!(predicate::path::exists().eval(&version_path));\n\n let saved_version = read_to_string(version_path).unwrap();\n\n assert!(predicate::str::contains(env!(\"CARGO_PKG_VERSION\")).eval(&saved_version));\n\n temp.close()?;\n\n Ok(())\n\n}\n", "file_path": "tests/commands/init.rs", "rank": 45, "score": 87167.82534548384 }, { "content": "#[test]\n\nfn no_file_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.args(&[\"submit\", \"-l\", \"C++\", \"-t\", \"2\"]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"provide file\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 46, "score": 85636.56964485624 }, { "content": "#[test]\n\nfn no_argument_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"details\");\n\n cmd.assert().stderr(predicate::str::contains(\n\n \"required arguments were not provided\",\n\n ));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/details.rs", "rank": 47, "score": 85636.56964485624 }, { "content": "#[test]\n\nfn no_verbose_should_not_enable_logs() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n\n\n cmd.arg(\"-u\");\n\n cmd.arg(\"tasks\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"INFO\").not())\n\n .stdout(predicate::str::contains(\"DEBUG\").not())\n\n .stdout(predicate::str::contains(\"TRACE\").not())\n\n .stdout(predicate::str::contains(\"ERROR\").not());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/verbose.rs", "rank": 48, "score": 85636.56964485624 }, { "content": "#[test]\n\nfn with_given_more_than_available_should_print_all() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"log\").arg(\"1000000\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"[G] Funkcje sklejane\"))\n\n .stdout(predicate::str::contains(\"[A] Zera funkcji\"))\n\n .stdout(predicate::str::contains(\"[B] Metoda Newtona\"))\n\n .stdout(predicate::str::contains(\n\n r#\"[C] FAD\\x3Csup\\x3E2\\x3C/sup\\x3E - Pochodne mieszane\"#,\n\n ))\n\n .stdout(predicate::str::contains(\"[D] Skalowany Gauss\"))\n\n .stdout(predicate::str::contains(\"[E] Metoda SOR\"))\n\n .stdout(predicate::str::contains(\"4334\"))\n\n .stdout(predicate::str::contains(\"4328\"))\n\n .stdout(predicate::str::contains(\"4326\"))\n\n .stdout(predicate::str::contains(\"532\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/log.rs", "rank": 49, "score": 85636.56964485624 }, { "content": "#[test]\n\nfn when_rename_as_same_name_then_do_not_rename() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n let input_file = make_input_file_cpp(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n input_file.path().to_str().unwrap(),\n\n \"--rename\",\n\n \"source.cpp\",\n\n \"--no-save\",\n\n ]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Submitting source.cpp to task\"))\n\n .stdout(predicate::str::contains(\"Is the task still active?\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 50, "score": 85636.56964485624 }, { "content": "#[test]\n\nfn on_not_initialized_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n assert_fails_if_not_initialized(&[\"refresh\"])\n\n}\n\n\n", "file_path": "tests/commands/refresh.rs", "rank": 51, "score": 85636.56964485624 }, { "content": "#[test]\n\nfn with_invalid_argument_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"log\").arg(\"nan\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Invalid argument\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/log.rs", "rank": 52, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn on_correct_repo_should_refresh_cookie() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"refresh\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"New session obtained\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/refresh.rs", "rank": 53, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn one_verbose_should_enable_info() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n\n\n cmd.arg(\"-u\");\n\n cmd.arg(\"-v\").arg(\"tasks\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"INFO\"))\n\n .stdout(predicate::str::contains(\"DEBUG\").not())\n\n .stdout(predicate::str::contains(\"TRACE\").not());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/verbose.rs", "rank": 54, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn clear_on_already_clear_should_do_nothing() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"submit\").arg(\"clear\");\n\n cmd.assert();\n\n\n\n assert_submit_config_file_does_not_exist(&dir);\n\n assert_config_file_exists(&dir);\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 55, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn given_absolute_path_should_be_saved() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n let input_file = make_input_file_cpp(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n input_file.path().to_str().unwrap(),\n\n \"--save\",\n\n ]);\n\n\n\n cmd.assert();\n\n assert_submit_config_file_exists(&dir);\n\n\n\n let submit_config_contents = fs::read_to_string(dir.baca_submit_config_file_path()).unwrap();\n", "file_path": "tests/commands/submit.rs", "rank": 56, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn invalid_filename_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-f\",\n\n \"dummy.txt\",\n\n \"-t\",\n\n \"2\",\n\n \"-l\",\n\n \"C++\",\n\n \"--no-save\",\n\n ]);\n\n\n\n cmd.assert().stdout(predicate::str::contains(\"Error\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 57, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn two_verbose_should_enable_debug() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n\n\n cmd.arg(\"-u\");\n\n cmd.arg(\"-vv\").arg(\"tasks\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"INFO\"))\n\n .stdout(predicate::str::contains(\"DEBUG\"))\n\n .stdout(predicate::str::contains(\"TRACE\").not());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/verbose.rs", "rank": 58, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn on_correct_argument_should_print_task() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"details\").arg(\"2796\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"[D] Skalowany Gauss\"))\n\n .stdout(predicate::str::contains(\"C++\"))\n\n .stdout(predicate::str::contains(\"2020-04-20 15:39:42\"))\n\n .stdout(predicate::str::contains(\"2796\"))\n\n .stdout(predicate::str::contains(\"74\"))\n\n .stdout(predicate::str::contains(\"2.95\"))\n\n .stdout(predicate::str::contains(\"WrongAnswer\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/details.rs", "rank": 59, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn clear_should_remove_saved_config() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n make_input_file_dummy(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-f\",\n\n \"dummy.txt\",\n\n \"-t\",\n\n \"2\",\n\n \"-l\",\n\n \"Java\",\n\n \"--save\",\n\n ]);\n\n cmd.assert();\n\n\n\n assert_submit_config_file_exists(&dir);\n\n\n\n let mut cmd = set_up_command(&dir)?;\n\n cmd.arg(\"submit\").arg(\"clear\");\n\n cmd.assert();\n\n\n\n assert_submit_config_file_does_not_exist(&dir);\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 60, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn inactive_task_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n let input_file = make_input_file_cpp(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n input_file.path().to_str().unwrap(),\n\n \"--no-save\",\n\n ]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"source.cpp\"))\n\n .stdout(predicate::str::contains(\"[A] Zera funkcji\"))\n\n .stdout(predicate::str::contains(\"C++\"))\n\n .stdout(predicate::str::contains(\"Error sending submit\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 61, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn three_verbose_should_enable_trace() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n\n\n cmd.arg(\"-u\");\n\n cmd.arg(\"-vvv\").arg(\"tasks\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"INFO\"))\n\n .stdout(predicate::str::contains(\"DEBUG\"))\n\n .stdout(predicate::str::contains(\"TRACE\"));\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/commands/verbose.rs", "rank": 62, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn when_zipping_renamed_then_zip_renamed() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n let input_file = make_input_file_cpp(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n input_file.path().to_str().unwrap(),\n\n \"--rename\",\n\n \"rename.haa\",\n\n \"--zip\",\n\n \"--no-save\",\n\n ]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\n\n \"Submitting source.cpp as rename.haa to task\",\n\n ))\n\n .stdout(predicate::str::contains(\"Zipping rename.haa\"))\n\n .stdout(predicate::str::contains(\"Is the task still active?\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 63, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn on_corrupted_repo_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n fs::remove_file(dir.baca_config_file_path())?;\n\n\n\n cmd.arg(\"details\").arg(\"123\");\n\n cmd.assert().stdout(predicate::str::contains(\"corrupted\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n", "file_path": "tests/commands/details.rs", "rank": 64, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn on_corrupted_repo_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n fs::remove_file(dir.baca_config_file_path())?;\n\n\n\n cmd.arg(\"tasks\");\n\n cmd.assert().stdout(predicate::str::contains(\"corrupted\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n", "file_path": "tests/commands/tasks.rs", "rank": 65, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn invalid_task_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n make_input_file_dummy(&dir).unwrap();\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-f\",\n\n \"dummy.txt\",\n\n \"-t\",\n\n \"2123123\",\n\n \"-l\",\n\n \"C++\",\n\n \"--no-save\",\n\n ]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Task no. 2123123 does not exist\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 66, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn on_corrupted_repo_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n fs::remove_file(dir.baca_config_file_path())?;\n\n\n\n cmd.arg(\"refresh\");\n\n cmd.assert().stdout(predicate::str::contains(\"corrupted\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n", "file_path": "tests/commands/refresh.rs", "rank": 67, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn default_option_should_save_config() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n make_input_file_dummy(&dir).unwrap();\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-f\",\n\n \"dummy.txt\",\n\n \"-t\",\n\n \"2\",\n\n \"-l\",\n\n \"Java\",\n\n \"--save\",\n\n ]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Submit config has been saved.\"))\n\n .stdout(predicate::str::contains(\"Submitting dummy.txt\"))\n\n .stdout(predicate::str::contains(\"Java\"))\n", "file_path": "tests/commands/submit.rs", "rank": 68, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn on_correct_repo_should_print_tasks() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"tasks\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"[A] Zera funkcji\"))\n\n .stdout(predicate::str::contains(\"[B] Metoda Newtona\"))\n\n .stdout(predicate::str::contains(\n\n r#\"[C] FAD\\x3Csup\\x3E2\\x3C/sup\\x3E - Pochodne mieszane\"#,\n\n ))\n\n .stdout(predicate::str::contains(\"[D] Skalowany Gauss\"))\n\n .stdout(predicate::str::contains(\"[E] Metoda SOR\"))\n\n .stdout(predicate::str::contains(\"[F] Interpolacja\"))\n\n .stdout(predicate::str::contains(\"[G] Funkcje sklejane\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/tasks.rs", "rank": 69, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn invalid_language_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n make_input_file_dummy(&dir).unwrap();\n\n\n\n cmd.args(&[\"submit\", \"-f\", \"dummy.txt\", \"-t\", \"2\", \"-l\", \"CPlusPlus\"]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"cplusplus is not yet supported\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 70, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn on_corrupted_repo_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n fs::remove_file(dir.baca_config_file_path())?;\n\n\n\n cmd.arg(\"log\");\n\n cmd.assert().stdout(predicate::str::contains(\"corrupted\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/log.rs", "rank": 71, "score": 84188.11306618822 }, { "content": "#[test]\n\nfn cmd_options_should_override_saved_task() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n make_input_file_cpp(&dir)?;\n\n make_input_file_dummy(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-f\",\n\n \"dummy.txt\",\n\n \"-t\",\n\n \"2\",\n\n \"-l\",\n\n \"Java\",\n\n \"--save\",\n\n ]);\n\n cmd.assert();\n\n\n\n let mut cmd = set_up_command(&dir)?;\n\n cmd.args(&[\"submit\", \"-f\", \"source.cpp\", \"-l\", \"C++\", \"--no-save\"]);\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Submitting source.cpp\"))\n\n .stdout(predicate::str::contains(\"C++\"))\n\n .stdout(predicate::str::contains(\"[B] Metoda Newtona\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 72, "score": 82815.9166418198 }, { "content": "#[test]\n\nfn when_rename_option_then_submit_renamed_file() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n let input_file = make_input_file_cpp(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n input_file.path().to_str().unwrap(),\n\n \"--rename\",\n\n \"hello.cxx\",\n\n \"--no-save\",\n\n ]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"source.cpp as hello.cxx\"))\n\n .stdout(predicate::str::contains(\"Is the task still active?\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 73, "score": 82815.9166418198 }, { "content": "#[test]\n\nfn given_relative_path_absolute_should_be_saved() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let nested_dir = dir.join(\"test_nested_dir\");\n\n fs::create_dir(nested_dir)?;\n\n let nested_dir = dir.child(\"test_nested_dir\");\n\n\n\n let input_file = nested_dir.child(\"source.cpp\");\n\n input_file.touch()?;\n\n\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n \"./test_nested_dir/source.cpp\",\n\n \"--save\",\n", "file_path": "tests/commands/submit.rs", "rank": 74, "score": 82815.9166418198 }, { "content": "#[test]\n\nfn no_language_on_expired_task_should_report_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n make_input_file_dummy(&dir).unwrap();\n\n\n\n cmd.args(&[\"submit\", \"-f\", \"dummy.txt\", \"-t\", \"2\"]);\n\n\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"still active?\"));\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 75, "score": 82815.9166418198 }, { "content": "#[test]\n\nfn filter_given_invalid_argument_should_print_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"log\").arg(\"-t\").arg(\"asd\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"asd does not exist\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n", "file_path": "tests/commands/log.rs", "rank": 76, "score": 81514.11228685563 }, { "content": "#[test]\n\nfn update_check_error_if_invalid_repo() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp = assert_fs::TempDir::new()?;\n\n let mut cmd = baca_verbose(&temp)?;\n\n cmd.env(\"GITHUB_REPO\", \"does_not_exists\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Error checking for updates\"))\n\n .success();\n\n\n\n temp.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/update_timestamps/mod.rs", "rank": 77, "score": 81514.11228685563 }, { "content": "#[test]\n\nfn given_just_filename_absolute_path_should_be_saved() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n let input_file = make_input_file_cpp(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n \"source.cpp\",\n\n \"--save\",\n\n ]);\n\n\n\n cmd.assert().stdout(predicate::str::contains(\"source.cpp\"));\n\n\n\n assert_submit_config_file_exists(&dir);\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 78, "score": 81514.11228685563 }, { "content": "#[test]\n\nfn update_check_timestamp_should_not_be_saved_if_update() -> Result<(), Box<dyn std::error::Error>>\n\n{\n\n let (login, pass, host) = get_baca_credentials();\n\n let temp = assert_fs::TempDir::new()?;\n\n\n\n let mut cmd = baca_verbose_dummy_repo(&temp)?;\n\n cmd.arg(\"init\")\n\n .args(&[\"--host\", &host, \"-p\", &pass, \"-l\", &login]);\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"New version\"))\n\n .success();\n\n\n\n let mut cmd = baca_verbose_dummy_repo(&temp)?;\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"New version\"))\n\n .success();\n\n\n\n let mut cmd = baca_verbose_dummy_repo(&temp)?;\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"New version\"))\n\n .success();\n\n\n\n temp.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/update_timestamps/mod.rs", "rank": 79, "score": 81514.11228685563 }, { "content": "#[test]\n\nfn update_check_timestamp_should_be_saved_if_no_update() -> Result<(), Box<dyn std::error::Error>> {\n\n let (login, pass, host) = get_baca_credentials();\n\n let temp = assert_fs::TempDir::new()?;\n\n\n\n let mut cmd = baca_verbose(&temp)?;\n\n cmd.arg(\"init\")\n\n .args(&[\"--host\", &host, \"-p\", &pass, \"-l\", &login]);\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Checking for updates\"))\n\n .success();\n\n\n\n let mut cmd = baca_verbose(&temp)?;\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Checking for updates\"))\n\n .success();\n\n\n\n let mut cmd = baca_verbose(&temp)?;\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Checking for updates\").not())\n\n .success();\n\n\n\n temp.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/update_timestamps/mod.rs", "rank": 80, "score": 81514.11228685563 }, { "content": "fn as_config_read_error(e: io::Error) -> Error {\n\n match e.kind() {\n\n ErrorKind::NotFound => Error::WorkspaceCorrupted,\n\n _ => Error::OpeningWorkspace(e.into()),\n\n }\n\n}\n\n\n", "file_path": "src/workspace/workspace_dir.rs", "rank": 81, "score": 80833.24765528066 }, { "content": "fn check_for_updates(workspace: &WorkspaceDir, matches: &ArgMatches) {\n\n if matches.is_present(\"noupdate\") {\n\n info!(\"Update check disabled.\");\n\n return;\n\n }\n\n\n\n let now = UpdateCheckTimestamp::now();\n\n let last_check = UpdateCheckTimestamp::read_config(workspace).unwrap();\n\n\n\n if matches.is_present(\"force-update\") || last_check.is_expired(&now) {\n\n let updates = fetch_updates();\n\n\n\n if let Err(e) = updates {\n\n error!(\"Error checking for updates: {}\", e);\n\n return;\n\n }\n\n\n\n match updates.unwrap() {\n\n UpdateStatus::NoUpdates => {\n\n info!(\"No updates available.\");\n", "file_path": "src/main.rs", "rank": 82, "score": 80833.24765528066 }, { "content": "fn as_config_create_error(e: io::Error) -> Error {\n\n Error::CreatingWorkspace(e.into())\n\n}\n\n\n", "file_path": "src/workspace/workspace_dir.rs", "rank": 83, "score": 80833.24765528066 }, { "content": "fn as_config_remove_error(e: io::Error) -> Error {\n\n Error::RemovingWorkspace(e.into())\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod tests {\n\n use super::*;\n\n use crate::workspace::ConnectionConfig;\n\n use assert_fs::TempDir;\n\n\n\n pub fn make_temp_workspace(\n\n ) -> std::result::Result<(TempDir, WorkspacePaths, WorkspaceDir), Box<dyn std::error::Error>>\n\n {\n\n let temp_dir = assert_fs::TempDir::new()?;\n\n let mock_paths = WorkspacePaths::_with_root(temp_dir.path());\n\n let workspace = WorkspaceDir::_with_paths(mock_paths.clone());\n\n Ok((temp_dir, mock_paths, workspace))\n\n }\n\n\n\n pub fn make_baca() -> ConnectionConfig {\n", "file_path": "src/workspace/workspace_dir.rs", "rank": 84, "score": 80833.24765528066 }, { "content": "#[test]\n\nfn filter_given_invalid_task_id_should_print_error() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n\n\n cmd.arg(\"log\").arg(\"-t\").arg(\"1123\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"1123 does not exist\"));\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/log.rs", "rank": 85, "score": 80277.41891206958 }, { "content": "pub trait ConfigObject: Serialize + DeserializeOwned + Debug + Sized {\n\n fn save_config<W: Workspace>(&self, workspace: &W) -> Result<()>;\n\n fn read_config<W: Workspace>(workspace: &W) -> Result<Self>;\n\n fn remove_config<W: Workspace>(workspace: &W) -> Result<()>;\n\n fn config_filename() -> String;\n\n}\n", "file_path": "src/workspace/config_object.rs", "rank": 86, "score": 79158.97390616284 }, { "content": "fn baca_verbose(temp: &TempDir) -> Result<Command, Box<dyn std::error::Error>> {\n\n let mut cmd = Command::cargo_bin(\"baca\")?;\n\n cmd.current_dir(&temp);\n\n cmd.arg(\"-v\");\n\n Ok(cmd)\n\n}\n\n\n", "file_path": "tests/update_timestamps/mod.rs", "rank": 87, "score": 76554.47939367584 }, { "content": "fn baca_verbose_dummy_repo(temp: &TempDir) -> Result<Command, Box<dyn std::error::Error>> {\n\n let mut cmd = baca_verbose(temp)?;\n\n cmd.env(\"GITHUB_REPO\", \"dummy\");\n\n Ok(cmd)\n\n}\n", "file_path": "tests/update_timestamps/mod.rs", "rank": 88, "score": 74257.81679608724 }, { "content": "fn log_response_details(login_response: &Response) {\n\n for (name, val) in login_response.headers() {\n\n debug!(\"Response header: {} = {:?}\", name, val);\n\n }\n\n\n\n debug!(\"Status code: {}\", login_response.status());\n\n}\n\n\n", "file_path": "src/api/baca_service.rs", "rank": 89, "score": 72956.256852521 }, { "content": "#[cfg_attr(test, automock)]\n\npub trait Prompt {\n\n fn interact(&self) -> error::Result<String>;\n\n}\n\n\n\npub struct Input(pub &'static str);\n\n\n\nimpl Prompt for Input {\n\n fn interact(&self) -> error::Result<String> {\n\n Ok(dialoguer::Input::<String>::new()\n\n .with_prompt(self.0)\n\n .interact()?)\n\n }\n\n}\n\n\n\npub struct Password;\n\n\n\nimpl Prompt for Password {\n\n fn interact(&self) -> error::Result<String> {\n\n Ok(dialoguer::Password::new()\n\n .with_prompt(\"Password\")\n", "file_path": "src/command/prompt.rs", "rank": 90, "score": 63954.53014616307 }, { "content": "fn merge_left<T>(left: &mut Option<T>, right: Option<T>) {\n\n if let Some(right) = right {\n\n let _ = left.insert(right);\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Merge, Clone)]\n\npub struct SubmitConfig {\n\n #[merge(strategy = merge_left)]\n\n pub id: Option<String>,\n\n #[merge(strategy = merge_left)]\n\n file: Option<PathBuf>,\n\n #[merge(strategy = merge::bool::overwrite_false)]\n\n pub to_zip: bool,\n\n #[merge(strategy = merge_left)]\n\n pub language: Option<Language>,\n\n #[merge(strategy = merge_left)]\n\n pub rename_as: Option<String>,\n\n}\n\n\n", "file_path": "src/workspace/submit_config.rs", "rank": 91, "score": 62859.280540180844 }, { "content": "pub trait BacaDirectoryPaths {\n\n fn baca_config_file_path(&self) -> Box<Path>;\n\n fn baca_submit_config_file_path(&self) -> Box<Path>;\n\n}\n\n\n\nimpl BacaDirectoryPaths for TempDir {\n\n fn baca_config_file_path(&self) -> Box<Path> {\n\n self.path().join(\".baca/connection\").into_boxed_path()\n\n }\n\n\n\n fn baca_submit_config_file_path(&self) -> Box<Path> {\n\n self.path().join(\".baca/submit\").into_boxed_path()\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 92, "score": 61724.50462395424 }, { "content": "#[cfg_attr(test, automock)]\n\npub trait ReleaseService {\n\n fn get_last_release(&self) -> Result<BacaRelease>;\n\n}\n", "file_path": "src/update/release_service.rs", "rank": 93, "score": 61724.50462395424 }, { "content": "pub trait FromBacaOutput {\n\n fn from_baca_output(connection_config: &ConnectionConfig, data: &str) -> Self;\n\n}\n", "file_path": "src/parse/from_baca_output.rs", "rank": 94, "score": 61724.50462395424 }, { "content": "fn main() {\n\n let yaml = load_yaml!(\"cli.yml\");\n\n let app = App::from_yaml(yaml)\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .setting(AppSettings::ArgRequiredElseHelp);\n\n let matches = app.get_matches();\n\n let workspace = WorkspaceDir::new();\n\n let baca_api = BacaService::default();\n\n\n\n set_logging_level(&matches);\n\n check_for_updates(&workspace, &matches);\n\n\n\n if let (command, Some(sub_matches)) = matches.subcommand() {\n\n if let Err(e) = command::execute(&workspace, &baca_api, command, sub_matches) {\n\n error!(\"{:?}\", e);\n\n println!(\"{}\", format!(\"{}\", e).bright_red());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 95, "score": 52857.84730469645 }, { "content": "#[test]\n\nfn not_initialized() {\n\n let (dir, mut cmd) = set_up_with_dir().unwrap();\n\n make_input_file_cpp(&dir).unwrap();\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n \"source.cpp\",\n\n \"--no-save\",\n\n ]);\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"not initialized\"));\n\n\n\n dir.close().unwrap();\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 96, "score": 51590.526051684385 }, { "content": "#[test]\n\nfn clear_when_init_then_remove_directory() {\n\n let dir = initialize_correct_workspace().unwrap();\n\n let mut cmd = set_up_command(&dir).unwrap();\n\n\n\n assert!(predicate::path::exists().eval(&dir.path().join(\".baca\")));\n\n cmd.arg(\"-v\");\n\n cmd.arg(\"clear\");\n\n cmd.assert();\n\n\n\n assert!(predicate::path::missing().eval(&dir.path().join(\".baca\")));\n\n dir.close().unwrap();\n\n}\n\n\n", "file_path": "tests/commands/clear.rs", "rank": 97, "score": 48279.34019453089 }, { "content": "#[test]\n\nfn clear_when_not_init_then_print_error() {\n\n let (dir, mut cmd) = set_up_with_dir().unwrap();\n\n\n\n cmd.arg(\"-v\");\n\n cmd.arg(\"clear\");\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"not initialized\"));\n\n\n\n assert!(predicate::path::missing().eval(&dir.path().join(\".baca\")));\n\n dir.close().unwrap();\n\n}\n", "file_path": "tests/commands/clear.rs", "rank": 98, "score": 48279.34019453089 }, { "content": "#[test]\n\nfn given_already_saved_when_submit_then_do_not_ask_for_save(\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = initialize_correct_workspace()?;\n\n let mut cmd = set_up_command(&dir)?;\n\n let input_file = make_input_file_cpp(&dir)?;\n\n\n\n cmd.args(&[\n\n \"submit\",\n\n \"-t\",\n\n \"1\",\n\n \"-l\",\n\n \"C++\",\n\n \"-f\",\n\n input_file.path().to_str().unwrap(),\n\n \"--save\",\n\n ]);\n\n cmd.assert();\n\n\n\n let mut cmd = set_up_command(&dir)?;\n\n cmd.args(&[\"submit\"]);\n\n cmd.assert()\n\n .stdout(predicate::str::contains(\"Save submit configuration? [Y/n]\").not());\n\n\n\n dir.close()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/commands/submit.rs", "rank": 99, "score": 46406.4755004453 } ]
Rust
src/cmus_status/output/mod.rs
Noah2610/cmus-status-line
04727c8994b3e53c3d715f01500a226711e030d0
mod builder; mod format; pub use format::prelude::*; use super::data::prelude::*; use crate::error::prelude::*; use std::fmt; use builder::StatusOutputBuilder; const OVERFLOW_STR: &str = "..."; pub struct StatusOutput { data: CmusData, format: Format, } impl StatusOutput { pub fn builder() -> StatusOutputBuilder { StatusOutputBuilder::default() } fn get_format_text_for_parts<'a>( &self, parts: Vec<&'a FormatPart>, ) -> String { parts .iter() .filter_map(|part| self.get_format_text(part)) .collect::<Vec<String>>() .join("") } fn get_format_text(&self, part: &FormatPart) -> Option<String> { match part { FormatPart::Text(text) => Some(text.to_string()), FormatPart::Title => self.data.get_title(), FormatPart::Status => Some(self.data.get_status().to_string()), FormatPart::Tag(tag_name) => self.data.get_tag(tag_name), FormatPart::MatchStatus(playback_status, text) => { if self.data.is_status(playback_status) { Some(text.to_string()) } else { None } } FormatPart::Truncate(format_part_inner, max) => { let max = *max; self.get_format_text(format_part_inner.as_ref()) .map(|text| { let mut text = text.to_string(); if text.len() > max { let overflow_str_len = OVERFLOW_STR.len(); if max >= overflow_str_len * 2 { text.truncate(max - overflow_str_len); text.push_str(OVERFLOW_STR); } else { text.truncate(max); } } text }) } FormatPart::HtmlEscape(format_part_inner) => self .get_format_text(format_part_inner.as_ref()) .map(|text| htmlescape::encode_minimal(text.as_str())), FormatPart::ProgressBar(bar_config) => { if let Some(time) = self.data.get_time() { let width = bar_config.inner_width(); let percent_complete = time.completion_percentage(); let characters = (width as f32 * percent_complete).round() as usize; Some(bar_config.text_with_filled(characters)) } else { None } } FormatPart::Container(format_parts_inner) => Some( self.get_format_text_for_parts( format_parts_inner .iter() .map(std::ops::Deref::deref) .collect(), ), ), FormatPart::If(expression, format_part_inner) => { if self.is_expression_true(expression) { self.get_format_text(format_part_inner) } else { None } } FormatPart::IfElse( expression, format_part_true, format_part_false, ) => { if self.is_expression_true(expression) { self.get_format_text(format_part_true) } else { self.get_format_text(format_part_false) } } } } fn is_expression_true(&self, expression: &FormatExpression) -> bool { match expression { FormatExpression::True => true, FormatExpression::False => false, FormatExpression::And(expr_one, expr_two) => { self.is_expression_true(expr_one) && self.is_expression_true(expr_two) } FormatExpression::Or(expr_one, expr_two) => { self.is_expression_true(expr_one) || self.is_expression_true(expr_two) } FormatExpression::Not(expr) => !self.is_expression_true(expr), FormatExpression::IsStatus(playback_status) => { self.data.is_status(&playback_status) } FormatExpression::HasTag(tag_name) => self.data.has_tag(&tag_name), } } } impl fmt::Display for StatusOutput { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", self.get_format_text_for_parts(self.format.iter().collect()) ) } }
mod builder; mod format; pub use format::prelude::*; use super::data::prelude::*; use crate::error::prelude::*; use std::fmt; use builder::StatusOutputBuilder; const OVERFLOW_STR: &str = "..."; pub struct StatusOutput { data: CmusData, format: Format, } impl StatusOutput { pub fn builder() -> StatusOutputBuilder { StatusOutputBuilder::default() } fn get_format_text_for_parts<'a>( &self, parts: Vec<&'a FormatPart>, ) -> String { parts .iter() .filter_map(|part| self.get_format_text(part)) .collect::<Vec<String>>() .join("") } fn get_format_text(&self, part: &FormatPart) -> Option<String> { match part { FormatPart::Text(text) => Some(text.to_string()), FormatPart::Title => self.data.get_title(), FormatPart::Status => Some(self.data.get_status().to_string()), FormatPart::Tag(tag_name) => self.data.get_tag(tag_name), FormatPart::MatchStatus(playback_status, text) => { if self.data.is_status(playback_status) { Some(text.to_string()) } else { None } } FormatPart::Truncate(format_part_inner, max) => { let max = *max; self.get_format_text(format_part_inner.as_ref()) .map(|text| { let mut text = text.to_string(); if text.len() > max { let overflow_str_len = OVERFLOW_STR.len(); if max >= overflow_str_len * 2 { text.truncate(max - overflow_str_len); text.push_str(OVERFLOW_STR); } else { text.truncate(max); } } text }) } FormatPart::HtmlEscape(format_part_inner) => self .get_format_text(format_part_inner.as_ref()) .map(|text| htmlescape::encode_minimal(text.as_str())), FormatPart::ProgressBar(bar_config) => {
} FormatPart::Container(format_parts_inner) => Some( self.get_format_text_for_parts( format_parts_inner .iter() .map(std::ops::Deref::deref) .collect(), ), ), FormatPart::If(expression, format_part_inner) => { if self.is_expression_true(expression) { self.get_format_text(format_part_inner) } else { None } } FormatPart::IfElse( expression, format_part_true, format_part_false, ) => { if self.is_expression_true(expression) { self.get_format_text(format_part_true) } else { self.get_format_text(format_part_false) } } } } fn is_expression_true(&self, expression: &FormatExpression) -> bool { match expression { FormatExpression::True => true, FormatExpression::False => false, FormatExpression::And(expr_one, expr_two) => { self.is_expression_true(expr_one) && self.is_expression_true(expr_two) } FormatExpression::Or(expr_one, expr_two) => { self.is_expression_true(expr_one) || self.is_expression_true(expr_two) } FormatExpression::Not(expr) => !self.is_expression_true(expr), FormatExpression::IsStatus(playback_status) => { self.data.is_status(&playback_status) } FormatExpression::HasTag(tag_name) => self.data.has_tag(&tag_name), } } } impl fmt::Display for StatusOutput { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", self.get_format_text_for_parts(self.format.iter().collect()) ) } }
if let Some(time) = self.data.get_time() { let width = bar_config.inner_width(); let percent_complete = time.completion_percentage(); let characters = (width as f32 * percent_complete).round() as usize; Some(bar_config.text_with_filled(characters)) } else { None }
if_condition
[ { "content": "pub fn dump_config() {\n\n print!(\n\n r#\"# DEFAULT CONFIG FOR {name}\n\n# To write this config to the proper config file, run something like:\n\n# mkdir -p ~/.config/{name}\n\n# {name} {cmd_dump_config} > ~/.config/{name}/config.toml\n\n\n\n{config}\"#,\n\n name = crate::meta::NAME,\n\n cmd_dump_config = CliCommand::DumpConfig.name(),\n\n config = crate::config::DEFAULT_CONFIG\n\n );\n\n}\n", "file_path": "src/args/mod.rs", "rank": 0, "score": 81307.2402424431 }, { "content": "pub fn print_version() {\n\n println!(\"{} v{}\", crate::meta::NAME, crate::meta::VERSION)\n\n}\n\n\n", "file_path": "src/args/mod.rs", "rank": 1, "score": 81307.2402424431 }, { "content": "pub fn print_help() {\n\n let opt_help = {\n\n let opt = CliOption::Help;\n\n format!(\"-{}, --{}\", opt.name_single(), opt.name_double())\n\n };\n\n let opt_vers = {\n\n let opt = CliOption::Version;\n\n format!(\"-{}, --{}\", opt.name_single(), opt.name_double())\n\n };\n\n let cmd_status = CliCommand::Status.name();\n\n let cmd_help = CliCommand::Help.name();\n\n let cmd_dump_config = CliCommand::DumpConfig.name();\n\n\n\n println!(\n\n r#\"{description}\n\n\n\nUSAGE:\n\n {name} [OPTIONS] [COMMAND]\n\n\n\nOPTIONS:\n", "file_path": "src/args/mod.rs", "rank": 2, "score": 81307.2402424431 }, { "content": "pub fn get_cmus_status() -> MyResult<StatusOutput> {\n\n let output = get_cmus_remote_output()?;\n\n let cmus_data = CmusData::try_from(output)?;\n\n let config = crate::config::get_config()?;\n\n StatusOutput::builder()\n\n .data(cmus_data)\n\n .format(config.format)\n\n .build()\n\n}\n\n\n", "file_path": "src/cmus_status/mod.rs", "rank": 3, "score": 77417.9916278617 }, { "content": "fn get_cmus_remote_output() -> MyResult<String> {\n\n match Command::new(\"cmus-remote\").arg(\"-Q\").output() {\n\n Ok(output) => {\n\n if output.status.success() {\n\n Ok(String::from_utf8(output.stdout).unwrap())\n\n } else {\n\n Err(Error::CmusError {\n\n status: output.status,\n\n stderr: String::from_utf8(output.stderr).unwrap(),\n\n })\n\n }\n\n }\n\n Err(_) => Err(Error::CmusNotInstalled),\n\n }\n\n}\n", "file_path": "src/cmus_status/mod.rs", "rank": 4, "score": 72799.68497583439 }, { "content": "pub fn get_config() -> MyResult<Config> {\n\n if let Some(conf_path) = get_config_file() {\n\n if let Ok(mut file) = File::open(&conf_path) {\n\n let mut file_content = String::new();\n\n file.read_to_string(&mut file_content).unwrap();\n\n Config::from_str(file_content.as_str()).map_err(|e| {\n\n if let Error::FailedParsingConfig(None, msg) = e {\n\n Error::FailedParsingConfig(Some(conf_path), msg)\n\n } else {\n\n e\n\n }\n\n })\n\n } else {\n\n default_config()\n\n }\n\n } else {\n\n default_config()\n\n }\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 5, "score": 72606.89864123146 }, { "content": "pub fn print_cmus_status() -> MyResult<()> {\n\n let cmus_status = get_cmus_status()?;\n\n println!(\"{}\", cmus_status);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cmus_status/mod.rs", "rank": 6, "score": 72105.17169227486 }, { "content": "pub fn run() -> error::MyResult<()> {\n\n use action::prelude::*;\n\n\n\n match action()? {\n\n Action::Status => cmus_status::print_cmus_status(),\n\n Action::Help => {\n\n args::print_help();\n\n Ok(())\n\n }\n\n Action::Version => {\n\n args::print_version();\n\n Ok(())\n\n }\n\n Action::DumpConfig => {\n\n args::dump_config();\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 7, "score": 58242.93720710659 }, { "content": "pub fn action() -> MyResult<Action> {\n\n let args = Args::new()?;\n\n\n\n let mut action_opt = args\n\n .commands\n\n .iter()\n\n .try_fold((None, 0), |(_, cmd_index), cmd| {\n\n let act_or_err: MyResult<Action> = match cmd {\n\n CliCommand::Status => {\n\n if cmd_index == 0 {\n\n if args.options.is_empty() {\n\n Ok(Action::Status)\n\n } else {\n\n Err(Error::CommandTakesNoOptions(\n\n cmd.name().to_string(),\n\n ))\n\n }\n\n } else {\n\n Err(Error::InvalidCommandLen(args.commands.to_string()))\n\n }\n", "file_path": "src/action.rs", "rank": 8, "score": 58242.93720710659 }, { "content": " IfElse(FormatExpression, Box<FormatPart>, Box<FormatPart>),\n\n}\n\n\n\nimpl From<Box<FormatPart>> for FormatPart {\n\n fn from(b: Box<FormatPart>) -> Self {\n\n *b\n\n }\n\n}\n\n\n\nimpl<'a> From<Box<&'a FormatPart>> for &'a FormatPart {\n\n fn from(b: Box<&'a FormatPart>) -> Self {\n\n *b\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n\n#[serde(try_from = \"String\")]\n\npub struct ProgressBarConfig {\n\n pub start: Option<char>,\n\n pub end: Option<char>,\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 9, "score": 48456.99891114499 }, { "content": "use std::convert::TryFrom;\n\n\n\nuse super::FormatExpression;\n\nuse crate::cmus_status::data::CmusPlaybackStatus;\n\nuse crate::error::prelude::*;\n\n\n\n#[derive(Deserialize)]\n\npub enum FormatPart {\n\n /// Just print the given text.\n\n /// This whole variant can be represented as a string.\n\n /// __Config example:__\n\n /// ```toml\n\n /// format = \"Hello from the status-line!\"\n\n /// ```\n\n Text(String),\n\n\n\n /// Prints the currently playing song's name.\n\n Title,\n\n\n\n /// Prints the `CmusPlaybackStatus` of the playing song.\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 10, "score": 48455.08941667049 }, { "content": " pub full: char,\n\n pub empty: char,\n\n total_width: usize,\n\n}\n\n\n\nimpl ProgressBarConfig {\n\n pub fn inner_width(&self) -> usize {\n\n self.total_width\n\n - if self.start.is_some() { 1 } else { 0 }\n\n - if self.end.is_some() { 1 } else { 0 }\n\n }\n\n\n\n pub fn text_with_filled(&self, filled_characters: usize) -> String {\n\n assert!(self.total_width > filled_characters);\n\n\n\n let mut s = String::new();\n\n if let Some(start) = self.start {\n\n s.push(start);\n\n }\n\n s.push_str(self.full.to_string().repeat(filled_characters).as_str());\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 11, "score": 48453.945913690055 }, { "content": " /// The total length of the string is also the printed length.\n\n ProgressBar(ProgressBarConfig),\n\n\n\n /// A list of `FormatPart`s.\n\n /// Useful with `FormatPart::If`.\n\n /// __Config example:__\n\n /// ```toml\n\n /// format = \"\"\"\n\n /// %{ Container([\n\n /// Text(\"Hello \"),\n\n /// Text(\"World! \"),\n\n /// Status,\n\n /// ]) }\n\n /// \"\"\"\n\n /// ```\n\n Container(Vec<Box<FormatPart>>),\n\n\n\n /// `if` conditional. If the `FormatExpression` returns `true`,\n\n /// then `FormatPart` is printed.\n\n /// __Config example:__\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 12, "score": 48453.33203663729 }, { "content": " Status,\n\n\n\n /// Prints the tag value for the given tag name.\n\n /// If the tag doesn't exist, prints nothing.\n\n Tag(String),\n\n\n\n /// TODO: Deprecated, use with `FormatPart::If` conditional.\n\n /// If the first argument's status is the current `CmusPlaybackStatus`,\n\n /// then, print the given string.\n\n /// The `CmusPlaybackStatus` can be one of:\n\n /// - Playing\n\n /// - Paused\n\n /// - Stopped\n\n MatchStatus(CmusPlaybackStatus, String),\n\n\n\n /// Truncate the given `FormatPart` to the given length (`usize`).\n\n /// Max length is inclusive.\n\n /// __Config example:__\n\n /// ```toml\n\n /// format = \"%{ Truncate(Status, 60) }\"\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 13, "score": 48452.432248706864 }, { "content": " /// ```\n\n Truncate(Box<FormatPart>, usize), // Inclusive\n\n\n\n /// Run `htmlescape::encode_minimal` on the wrapped\n\n /// `FormatPart`'s resulting string.\n\n HtmlEscape(Box<FormatPart>),\n\n\n\n /// Prints a ProgressBar with the given `ProgressBarConfig`.\n\n /// `ProgressBarConfig` can be a string such as:\n\n /// __Config example:__\n\n /// ```toml\n\n /// format = \"\"\"\n\n /// %{ ProgressBar(\"<###--->\") }\n\n /// \"\"\"\n\n /// ```\n\n /// ... where the first and last characters (`<`,`>`) are used as the start and end\n\n /// characters of the bar, respectively. The second character in the string (`#`) is used\n\n /// as the \"full\" character, and the second to last as the \"empty\" (`-`) character.\n\n /// The \"full\" characters are printed if the playback percentage of the track has reached that\n\n /// point, the \"empty\" characters if it hasn't.\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 14, "score": 48450.52330609581 }, { "content": " s.push_str(\n\n self.empty\n\n .to_string()\n\n .repeat(self.inner_width() - filled_characters)\n\n .as_str(),\n\n );\n\n if let Some(end) = self.end {\n\n s.push(end);\n\n }\n\n s\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for ProgressBarConfig {\n\n type Error = Error;\n\n fn try_from(s: String) -> MyResult<Self> {\n\n let chars = s.chars().collect::<Vec<char>>();\n\n let len = chars.len();\n\n if len < 2 {\n\n Err(Error::ProgressBarConfigMinLen(2, s))\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 15, "score": 48450.09779751215 }, { "content": " /// ```toml\n\n /// format = \"\"\"\n\n /// %{ If(IsStatus(Playing),\n\n /// Text(\"Cmus is playing a song!\")) }\n\n /// \"\"\"\n\n /// ```\n\n If(FormatExpression, Box<FormatPart>),\n\n\n\n /// `if/else` conditional. If the `FormatExpression` returns `true`,\n\n /// then the _first_ `FormatPart` is printed,\n\n /// otherwise the _second_ `FormatPart` is printed.\n\n /// __Config example:__\n\n /// ```toml\n\n /// format = \"\"\"\n\n /// %{ IfElse(\n\n /// HasTag(\"artist\"),\n\n /// Tag(\"artist\"),\n\n /// Text(\"unknown artist\")) }\n\n /// \"\"\"\n\n /// ```\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 16, "score": 48449.91379722508 }, { "content": " } else if len == 2 {\n\n Ok(ProgressBarConfig {\n\n start: None,\n\n end: None,\n\n full: *chars.get(0).unwrap(),\n\n empty: *chars.get(1).unwrap(),\n\n total_width: len,\n\n })\n\n } else if len == 3 {\n\n Ok(ProgressBarConfig {\n\n start: Some(*chars.get(0).unwrap()),\n\n end: None,\n\n full: *chars.get(1).unwrap(),\n\n empty: *chars.get(2).unwrap(),\n\n total_width: len,\n\n })\n\n } else {\n\n Ok(ProgressBarConfig {\n\n start: Some(*chars.get(0).unwrap()),\n\n end: Some(*chars.get(len - 1).unwrap()),\n\n full: *chars.get(1).unwrap(),\n\n empty: *chars.get(len - 2).unwrap(),\n\n total_width: len,\n\n })\n\n }\n\n }\n\n}\n", "file_path": "src/cmus_status/output/format/format_part.rs", "rank": 17, "score": 48444.07678916523 }, { "content": "fn default_config() -> MyResult<Config> {\n\n Config::from_toml(DEFAULT_CONFIG)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 18, "score": 45930.36948036309 }, { "content": "fn get_config_file() -> Option<PathBuf> {\n\n CONFIG_FILES.iter().find_map(|filepath| {\n\n let path = if filepath.starts_with(KEYWORD_CONFIG_DIR) {\n\n let filepath_without_keyword = filepath\n\n .replace(&format!(\"{}/\", KEYWORD_CONFIG_DIR), \"\")\n\n .replace(KEYWORD_CONFIG_DIR, \"\");\n\n if let Some(mut path) = get_config_dir() {\n\n path.push(filepath_without_keyword);\n\n path\n\n } else {\n\n PathBuf::from(filepath_without_keyword)\n\n }\n\n } else {\n\n PathBuf::from(filepath)\n\n };\n\n if path.is_file() {\n\n Some(path)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 19, "score": 42973.69603500521 }, { "content": "fn get_config_dir() -> Option<PathBuf> {\n\n dirs::config_dir().map(|mut d| {\n\n d.push(crate::meta::NAME);\n\n d\n\n })\n\n}\n", "file_path": "src/config/mod.rs", "rank": 20, "score": 42973.69603500521 }, { "content": " type Error = Error;\n\n\n\n fn try_from(string: String) -> Result<Self, Self::Error> {\n\n const STATUS_NAME: &str = \"status\";\n\n const FILE_NAME: &str = \"file\";\n\n const TIME_DURATION_NAME: &str = \"duration\";\n\n const TIME_POSITION_NAME: &str = \"position\";\n\n const TAG_NAME: &str = \"tag\";\n\n const SETTINGS_NAME: &str = \"set\";\n\n\n\n let mut status = None;\n\n let mut file = None;\n\n let mut time_duration = None;\n\n let mut time_position = None;\n\n let mut tags = HashMap::new();\n\n\n\n for line in string.trim().split(\"\\n\") {\n\n let words = line.split_whitespace().collect::<Vec<&str>>();\n\n let data_name =\n\n words.first().ok_or(Error::CmusParseError(format!(\n", "file_path": "src/cmus_status/data/mod.rs", "rank": 21, "score": 42563.41700518293 }, { "content": " \"Couldn't get data identifier name from `cmus-remote -Q` \\\n\n (first word per line)\\nOutput:\\n{}\",\n\n string\n\n )))?;\n\n let data_words = words\n\n .iter()\n\n .skip(1)\n\n .map(Deref::deref)\n\n .collect::<Vec<&str>>();\n\n let data_line = data_words.join(\" \");\n\n\n\n match *data_name {\n\n STATUS_NAME => {\n\n status =\n\n Some(CmusPlaybackStatus::try_from(data_line.as_str())?);\n\n }\n\n FILE_NAME => {\n\n file = Some(PathBuf::from(data_line.as_str()));\n\n }\n\n TIME_DURATION_NAME => {\n", "file_path": "src/cmus_status/data/mod.rs", "rank": 22, "score": 42561.403028171706 }, { "content": "mod playback_status;\n\nmod time;\n\n\n\npub mod prelude {\n\n pub use super::CmusData;\n\n pub use super::CmusPlaybackStatus;\n\n pub use super::{CmusTime, Seconds};\n\n}\n\n\n\npub use playback_status::CmusPlaybackStatus;\n\npub use time::{CmusTime, Seconds};\n\n\n\nuse crate::error::prelude::*;\n\nuse std::collections::HashMap;\n\nuse std::convert::TryFrom;\n\nuse std::ops::Deref;\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Debug)]\n\npub struct CmusData {\n", "file_path": "src/cmus_status/data/mod.rs", "rank": 23, "score": 42560.31082469158 }, { "content": " }\n\n\n\n pub fn get_tag(&self, tag_name: &str) -> Option<String> {\n\n self.tags.get(tag_name).cloned()\n\n }\n\n\n\n pub fn has_tag(&self, tag_name: &str) -> bool {\n\n self.tags.contains_key(tag_name)\n\n }\n\n\n\n pub fn is_status(&self, other_status: &CmusPlaybackStatus) -> bool {\n\n &self.status == other_status\n\n }\n\n\n\n pub fn get_time(&self) -> &Option<CmusTime> {\n\n &self.time\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for CmusData {\n", "file_path": "src/cmus_status/data/mod.rs", "rank": 24, "score": 42559.96420561601 }, { "content": " status: CmusPlaybackStatus,\n\n file: Option<PathBuf>,\n\n time: Option<CmusTime>,\n\n tags: HashMap<String, String>,\n\n settings: CmusSettings,\n\n}\n\n\n\nimpl CmusData {\n\n pub fn get_title(&self) -> Option<String> {\n\n self.file.as_ref().map(|file| {\n\n file.file_stem()\n\n .unwrap()\n\n .to_str()\n\n .unwrap()\n\n .replace(\"_\", \" \")\n\n })\n\n }\n\n\n\n pub fn get_status(&self) -> &CmusPlaybackStatus {\n\n &self.status\n", "file_path": "src/cmus_status/data/mod.rs", "rank": 25, "score": 42559.00811853469 }, { "content": " time_duration = Some(data_line.parse::<Seconds>().or(\n\n Err(Error::CouldntParseTimeToNumber(line.into())),\n\n )?);\n\n }\n\n TIME_POSITION_NAME => {\n\n time_position = Some(data_line.parse::<Seconds>().or(\n\n Err(Error::CouldntParseTimeToNumber(line.into())),\n\n )?);\n\n }\n\n TAG_NAME => {\n\n let tag_name = data_words\n\n .get(0)\n\n .ok_or(Error::CmusExpectDataArguments(1, line.into()))?\n\n .to_string();\n\n let tag_value = data_words\n\n .iter()\n\n .skip(1)\n\n .map(Deref::deref)\n\n .collect::<Vec<&str>>()\n\n .join(\" \");\n", "file_path": "src/cmus_status/data/mod.rs", "rank": 26, "score": 42558.327934335095 }, { "content": " tags.insert(tag_name, tag_value);\n\n }\n\n SETTINGS_NAME => {\n\n // TODO\n\n }\n\n _ => return Err(Error::CmusUnknownData(line.into())),\n\n }\n\n }\n\n\n\n Ok(Self {\n\n status: status.ok_or(Error::CmusMissingData(STATUS_NAME.into()))?,\n\n file: file,\n\n time: time_duration\n\n .and_then(|duration| {\n\n time_position\n\n .and_then(|position| Some((duration, position)))\n\n })\n\n .map(|(duration, position)| CmusTime {\n\n duration: duration,\n\n position: position,\n", "file_path": "src/cmus_status/data/mod.rs", "rank": 27, "score": 42553.43235790307 }, { "content": " }),\n\n tags,\n\n // TODO\n\n settings: CmusSettings {},\n\n })\n\n }\n\n}\n\n\n\n// TODO\n\n#[derive(Debug)]\n\npub struct CmusSettings {}\n", "file_path": "src/cmus_status/data/mod.rs", "rank": 28, "score": 42551.86788339459 }, { "content": " parts: Vec<FormatPart>,\n\n}\n\n\n\nimpl Format {\n\n pub fn iter(&self) -> std::slice::Iter<FormatPart> {\n\n self.parts.iter()\n\n }\n\n\n\n fn try_from_string(string: String) -> MyResult<Self> {\n\n let re =\n\n Regex::new(r\"(%\\{\\s*(?P<keyword>(.|\\s)+?)\\s*\\})|(?P<text>.+?)\")\n\n .unwrap();\n\n let mut parts = Vec::new();\n\n\n\n for caps in re.captures_iter(string.as_str()) {\n\n if let Some(keyword) = caps.name(\"keyword\") {\n\n let keyword = keyword.as_str();\n\n let part =\n\n ron::de::from_str::<FormatPart>(keyword).or_else(|e| {\n\n Err(Error::FailedParsingConfig(None, format!(\"{}\", e)))\n", "file_path": "src/cmus_status/output/format/mod.rs", "rank": 29, "score": 40681.30951129148 }, { "content": " })?;\n\n parts.push(part);\n\n }\n\n if let Some(text) = caps.name(\"text\") {\n\n if let Some(prev_text) = parts.last_mut().and_then(|last| {\n\n if let FormatPart::Text(prev_text) = last {\n\n Some(prev_text)\n\n } else {\n\n None\n\n }\n\n }) {\n\n prev_text.push_str(text.as_str());\n\n } else {\n\n parts.push(FormatPart::Text(text.as_str().into()));\n\n }\n\n }\n\n }\n\n\n\n Ok(Self { parts })\n\n }\n", "file_path": "src/cmus_status/output/format/mod.rs", "rank": 30, "score": 40676.516747468486 }, { "content": "mod format_expression;\n\nmod format_part;\n\n\n\npub mod prelude {\n\n pub use super::format_expression::FormatExpression;\n\n pub use super::format_part::FormatPart;\n\n pub use super::Format;\n\n}\n\n\n\npub use prelude::*;\n\n\n\nuse crate::error::prelude::*;\n\nuse regex::Regex;\n\nuse std::convert::TryFrom;\n\n\n\nconst DEFAULT_FORMAT: &str = r#\"NO FORMAT\"#;\n\n\n\n#[derive(Deserialize)]\n\n#[serde(default, try_from = \"String\")]\n\npub struct Format {\n", "file_path": "src/cmus_status/output/format/mod.rs", "rank": 31, "score": 40676.45070004878 }, { "content": "}\n\n\n\nimpl TryFrom<&str> for Format {\n\n type Error = Error;\n\n fn try_from(string: &str) -> MyResult<Self> {\n\n Self::try_from_string(string.to_string())\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for Format {\n\n type Error = Error;\n\n fn try_from(string: String) -> MyResult<Self> {\n\n Self::try_from_string(string)\n\n }\n\n}\n\n\n\nimpl Default for Format {\n\n fn default() -> Self {\n\n Format::try_from(DEFAULT_FORMAT).unwrap()\n\n }\n\n}\n", "file_path": "src/cmus_status/output/format/mod.rs", "rank": 32, "score": 40673.81663921063 }, { "content": "fn main() {\n\n use std::process;\n\n\n\n match cmus_status_line::run() {\n\n Ok(_) => (),\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n process::exit(1);\n\n }\n\n }\n\n}\n", "file_path": "src/app.rs", "rank": 33, "score": 33446.916782201384 }, { "content": "use crate::cmus_status::data::CmusPlaybackStatus;\n\n\n\n#[derive(Deserialize)]\n\npub enum FormatExpression {\n\n /// Always returns `true`.\n\n True,\n\n /// Always returns `false`.\n\n False,\n\n /// Returns `true` if both of the given expressions are `true`.\n\n And(Box<FormatExpression>, Box<FormatExpression>),\n\n /// Returns `true` if either of the given expressions are `true`.\n\n Or(Box<FormatExpression>, Box<FormatExpression>),\n\n /// Inverts the given expression.\n\n Not(Box<FormatExpression>),\n\n /// Returns `true` if the given `CmusPlaybackStatus`\n\n /// is the currently playing song's `CmusPlaybackStatus`.\n\n IsStatus(CmusPlaybackStatus),\n\n /// Returns `true` if the given tag is set for the current track.\n\n HasTag(String),\n\n}\n", "file_path": "src/cmus_status/output/format/format_expression.rs", "rank": 34, "score": 27603.151791358752 }, { "content": "use super::*;\n\n\n\n#[derive(Default)]\n\npub struct StatusOutputBuilder {\n\n data: Option<CmusData>,\n\n format: Option<Format>,\n\n}\n\n\n\nimpl StatusOutputBuilder {\n\n pub fn data(mut self, data: CmusData) -> Self {\n\n self.data = Some(data);\n\n self\n\n }\n\n\n\n pub fn format(mut self, format: Format) -> Self {\n\n self.format = Some(format);\n\n self\n\n }\n\n\n\n pub fn build(self) -> MyResult<StatusOutput> {\n\n Ok(StatusOutput {\n\n data: self.data.ok_or(Error::CmusStatusNoData)?,\n\n format: self.format.unwrap_or_else(Default::default),\n\n })\n\n }\n\n}\n", "file_path": "src/cmus_status/output/builder.rs", "rank": 35, "score": 23291.15760832997 }, { "content": "use crate::error::prelude::*;\n\nuse std::convert::TryFrom;\n\nuse std::fmt;\n\n\n\npub type Seconds = u32;\n\n\n\n#[derive(Debug, PartialEq, Deserialize)]\n\npub enum CmusPlaybackStatus {\n\n Playing,\n\n Paused,\n\n Stopped,\n\n}\n\n\n\nimpl TryFrom<&str> for CmusPlaybackStatus {\n\n type Error = Error;\n\n\n\n fn try_from(status_str: &str) -> MyResult<Self> {\n\n const STATUS_PLAYING: &str = \"playing\";\n\n const STATUS_PAUSED: &str = \"paused\";\n\n const STATUS_STOPPED: &str = \"stopped\";\n", "file_path": "src/cmus_status/data/time.rs", "rank": 36, "score": 22417.023462735913 }, { "content": "\n\n match status_str.to_lowercase().as_str() {\n\n STATUS_PLAYING => Ok(CmusPlaybackStatus::Playing),\n\n STATUS_PAUSED => Ok(CmusPlaybackStatus::Paused),\n\n STATUS_STOPPED => Ok(CmusPlaybackStatus::Stopped),\n\n s => Err(Error::CmusUnknownStatus(s.into())),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for CmusPlaybackStatus {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n CmusPlaybackStatus::Playing => \"Playing\",\n\n CmusPlaybackStatus::Paused => \"Paused\",\n\n CmusPlaybackStatus::Stopped => \"Stopped\",\n\n }\n", "file_path": "src/cmus_status/data/time.rs", "rank": 37, "score": 22413.24812010225 }, { "content": " )\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CmusTime {\n\n pub duration: Seconds,\n\n pub position: Seconds,\n\n}\n\n\n\nimpl CmusTime {\n\n pub fn completion_percentage(&self) -> f32 {\n\n self.position as f32 / self.duration as f32\n\n }\n\n}\n", "file_path": "src/cmus_status/data/time.rs", "rank": 38, "score": 22411.431576174597 }, { "content": " /// RON or TOML string\n\n pub fn from_str(s: &str) -> MyResult<Self> {\n\n Self::from_ron(s).or_else(|_| Self::from_toml(s))\n\n }\n\n\n\n fn from_ron(ron: &str) -> MyResult<Self> {\n\n ron::de::from_str(ron)\n\n .map_err(|e| Error::FailedParsingConfig(None, e.to_string()))\n\n }\n\n\n\n fn from_toml(toml: &str) -> MyResult<Self> {\n\n toml::de::from_str(toml)\n\n .map_err(|e| Error::FailedParsingConfig(None, e.to_string()))\n\n }\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 39, "score": 22128.316403798835 }, { "content": "\n\npub use commands::{CliCommand, CliCommands};\n\npub use options::{CliOption, CliOptions};\n\n\n\nuse crate::error::prelude::*;\n\nuse std::convert::TryFrom;\n\nuse std::env;\n\n\n\npub struct Args {\n\n pub commands: CliCommands,\n\n pub options: CliOptions,\n\n}\n\n\n\nimpl Args {\n\n pub fn new() -> MyResult<Self> {\n\n let (commands, options) = env::args().skip(1).try_fold(\n\n (CliCommands::default(), CliOptions::default()),\n\n |(mut commands, mut options), arg| {\n\n if let Ok(opts) = CliOptions::try_from(arg.as_str()) {\n\n options.0.append(&mut opts.into());\n", "file_path": "src/args/mod.rs", "rank": 40, "score": 22127.708915779687 }, { "content": "use crate::cmus_status::output::Format;\n\nuse crate::error::prelude::*;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::path::PathBuf;\n\n\n\npub const DEFAULT_CONFIG: &str = include_str!(\"../../config.toml\");\n\nconst KEYWORD_CONFIG_DIR: &str = \"<CONFIG_DIR>\";\n\nconst CONFIG_FILES: [&str; 3] = [\n\n \"./config.toml\",\n\n \"<CONFIG_DIR>/format.ron\",\n\n \"<CONFIG_DIR>/config.toml\",\n\n];\n\n\n\n#[derive(Deserialize)]\n\npub struct Config {\n\n pub format: Format,\n\n}\n\n\n\nimpl Config {\n", "file_path": "src/config/mod.rs", "rank": 41, "score": 22127.005434765164 }, { "content": "pub mod prelude {\n\n pub use super::Args;\n\n pub use super::CliCommand;\n\n pub use super::CliCommands;\n\n pub use super::CliOption;\n\n pub use super::CliOptions;\n\n}\n\n\n\nmod names {\n\n pub(super) const CMD_HELP: &str = \"help\";\n\n pub(super) const CMD_STATUS: &str = \"status\";\n\n pub(super) const CMD_DUMP_CONFIG: &str = \"dump-config\";\n\n pub(super) const OPT_DOUBLE_HELP: &str = \"help\";\n\n pub(super) const OPT_DOUBLE_VERSION: &str = \"version\";\n\n pub(super) const OPT_SINGLE_HELP: char = 'h';\n\n pub(super) const OPT_SINGLE_VERSION: char = 'v';\n\n}\n\n\n\nmod commands;\n\nmod options;\n", "file_path": "src/args/mod.rs", "rank": 42, "score": 22126.867444586216 }, { "content": " Ok((commands, options))\n\n } else {\n\n if let Ok(cmd) = CliCommand::try_from(arg.as_str()) {\n\n commands.0.push(cmd);\n\n Ok((commands, options))\n\n } else {\n\n Err(Error::InvalidArgument(arg))\n\n }\n\n }\n\n },\n\n )?;\n\n\n\n Ok(Self { commands, options })\n\n }\n\n}\n\n\n", "file_path": "src/args/mod.rs", "rank": 43, "score": 22119.792836225442 }, { "content": " {opt_help:<opt_width$} Print this help message and exit.\n\n {opt_vers:<opt_width$} Print version information and exit.\n\n\n\nCOMMANDS:\n\n {cmd_status}\n\n Print the current cmus playback status\n\n with the format configured in the config.toml file.\n\n This is the default command, so you may omit this argument.\n\n {cmd_dump_config}\n\n Print the default config as TOML to stdout.\n\n To write the default config to the proper config file, run something like:\n\n mkdir -p ~/.config/{name}\n\n {name} {cmd_dump_config} > ~/.config/{name}/config.toml\n\n {cmd_help}\n\n Print this help message and exit.\"#,\n\n description = crate::meta::DESCRIPTION,\n\n name = crate::meta::NAME,\n\n opt_width = 16,\n\n opt_help = opt_help,\n\n opt_vers = opt_vers,\n\n cmd_status = cmd_status,\n\n cmd_help = cmd_help,\n\n cmd_dump_config = cmd_dump_config,\n\n );\n\n}\n\n\n", "file_path": "src/args/mod.rs", "rank": 44, "score": 22115.643465314304 }, { "content": "use crate::error::prelude::*;\n\nuse std::convert::TryFrom;\n\nuse std::fmt;\n\n\n\n#[derive(Debug, PartialEq, Deserialize)]\n\npub enum CmusPlaybackStatus {\n\n Playing,\n\n Paused,\n\n Stopped,\n\n}\n\n\n\nimpl TryFrom<&str> for CmusPlaybackStatus {\n\n type Error = Error;\n\n\n\n fn try_from(status_str: &str) -> Result<Self, Self::Error> {\n\n const STATUS_PLAYING: &str = \"playing\";\n\n const STATUS_PAUSED: &str = \"paused\";\n\n const STATUS_STOPPED: &str = \"stopped\";\n\n\n\n match status_str.to_lowercase().as_str() {\n", "file_path": "src/cmus_status/data/playback_status.rs", "rank": 45, "score": 21462.009057112533 }, { "content": " STATUS_PLAYING => Ok(CmusPlaybackStatus::Playing),\n\n STATUS_PAUSED => Ok(CmusPlaybackStatus::Paused),\n\n STATUS_STOPPED => Ok(CmusPlaybackStatus::Stopped),\n\n s => Err(Error::CmusUnknownStatus(s.into())),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for CmusPlaybackStatus {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n CmusPlaybackStatus::Playing => \"Playing\",\n\n CmusPlaybackStatus::Paused => \"Paused\",\n\n CmusPlaybackStatus::Stopped => \"Stopped\",\n\n }\n\n )\n\n }\n\n}\n", "file_path": "src/cmus_status/data/playback_status.rs", "rank": 46, "score": 21454.24605098353 }, { "content": "mod data;\n\npub mod output;\n\n\n\nuse crate::error::prelude::*;\n\nuse std::convert::TryFrom;\n\nuse std::process::Command;\n\n\n\nuse data::CmusData;\n\nuse output::StatusOutput;\n\n\n", "file_path": "src/cmus_status/mod.rs", "rank": 47, "score": 21094.60494814357 }, { "content": "use super::names;\n\nuse regex::Regex;\n\nuse std::convert::TryFrom;\n\n\n\n#[derive(Default)]\n\npub struct CliCommands(pub(super) Vec<CliCommand>);\n\n\n\nimpl CliCommands {\n\n pub fn iter(&self) -> std::slice::Iter<CliCommand> {\n\n self.0.iter()\n\n }\n\n}\n\n\n\nimpl ToString for CliCommands {\n\n fn to_string(&self) -> String {\n\n self.0\n\n .iter()\n\n .map(CliCommand::name)\n\n .collect::<Vec<&str>>()\n\n .join(\" \")\n", "file_path": "src/args/commands.rs", "rank": 56, "score": 16.386592047856414 }, { "content": " Example: `HtmlEscape(Title)`\n\n\n\n- __`ProgressBar(String)`__ \n\n Returns a progress bar for the playback of the currently playing song. \n\n The given string acts as a config for which characters to use. \n\n The first and last characters of the string are used as the boundary characters of the bar. \n\n The second and second to last characters are used as the _full_ and _empty_ characters. \n\n The total length of the string is the length of the progress bar. \n\n\n\n Example: `ProgressBar(\"<##-->\")` will use `<>` as the bar boundary characters, \n\n the `#` as the _full_ character, and the `-` as the _empty_ character. \n\n The progress bar will have a length of `6` characters.\n\n\n\n- __`Container(Vec<FormatPart>)`__ \n\n This wraps multiple `FormatPart`s into a single one. \n\n Useful in combination with other `FormatPart`s. \n\n\n\n Example:\n\n ```\n\n Truncate(Container([\n\n Text(\"progress: \"),\n\n ProgressBar(\"<##-->\"),\n\n Text(\" title: \"),\n\n Title,\n\n ]), 60)\n\n ```\n\n which will truncate the combined length of the bar, \n\n the song title, and some static text to 60 characters or less.\n\n\n\n- __`If(FormatExpression, FormatPart)`__ \n\n Returns the evaluated `FormatPart`, if the `FormatExpression` returns `true`. \n\n See the section on `FormatExpression` for available expressions. \n\n\n\n Example:\n\n ```\n\n Container([\n\n If(\n\n IsStatus(Playing),\n\n Title,\n\n ),\n\n If(\n\n IsStatus(Paused),\n\n Text(\"PAUSED\"),\n\n ),\n\n ])\n\n ```\n\n\n\n- __`IfElse(FormatExpression, FormatPart, FormatPart)`__ \n\n If the given `FormatExpression` returns `true`, then \n\n returns the _first_ `FormatPart`, otherwise returns the _second_ `FormatPart`.\n\n\n\n Example:\n\n ```\n\n Container([\n\n IfElse(\n\n IsStatus(Playing),\n\n Title,\n\n Text(\"not playing\"),\n\n ),\n\n ])\n\n ```\n\n\n", "file_path": "README.md", "rank": 57, "score": 12.155085820881963 }, { "content": "use std::fmt;\n\nuse std::path::PathBuf;\n\nuse std::process::ExitStatus;\n\n\n\npub mod prelude {\n\n pub use super::Error;\n\n pub use super::MyResult;\n\n}\n\n\n\npub type MyResult<T> = Result<T, Error>;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n CmusNotInstalled,\n\n CmusError { status: ExitStatus, stderr: String },\n\n CmusParseError(String),\n\n CmusUnknownData(String),\n\n CmusExpectDataArguments(usize, String),\n\n CmusUnknownStatus(String),\n\n CouldntParseTimeToNumber(String),\n", "file_path": "src/error.rs", "rank": 58, "score": 11.866939149011031 }, { "content": "use crate::args::prelude::*;\n\nuse crate::error::prelude::*;\n\n\n\npub mod prelude {\n\n pub use super::action;\n\n pub use super::Action;\n\n}\n\n\n\npub enum Action {\n\n Status,\n\n Help,\n\n Version,\n\n DumpConfig,\n\n}\n\n\n\nimpl Default for Action {\n\n fn default() -> Self {\n\n Action::Status\n\n }\n\n}\n\n\n", "file_path": "src/action.rs", "rank": 59, "score": 11.739567016953124 }, { "content": "### The `format` key\n\nThe configuration has a `format` key, which is a string. \n\n\n\nAny plain text in the string is simply printed in the format, \n\nso a `format` string with this value:\n\n```\n\nformat = \"my cmus status!\"\n\n```\n\nwould simply print `my cmus status!`. \n\nAny new-line characters are ignored. \n\nTo add dynamic content, you can use the `%{...}` syntax to inject information, \n\nfor example:\n\n```\n\nformat = \"playing song: %{Title}\"\n\n```\n\nwould replace the `%{Title}` part with the currently playing song's title. \n\nWe call the `Title` part a `FormatPart`.\n\n\n\n### `FormatPart`\n\n[`enum FormatPart`](https://github.com/Noah2610/cmus-status-line/blob/master/src/cmus_status/output/format/format_part.rs#L8) \n\nAny of the following format parts can be used \n\nin the `format` string inside `%{...}` blocks. \n\nThey will be replaced with a string value.\n\n\n\n- __`Text(String)`__ \n\n Returns the given string.\n\n\n\n- __`Title`__ \n\n Returns the currently playing song's title. \n\n Any underscores (`_`) will be replaced with spaces (` `).\n\n\n\n- __`Status`__ \n\n Returns the current playback status (`CmusPlaybackStatus`), \n\n which can be one of:\n\n - `Playing`\n\n - `Paused`\n\n - `Stopped`\n\n\n\n- __`Tag(String)`__\n\n Returns the _tag_ meta value for the given tag name \n\n (such as \"artist\", \"album\", \"tracknumber\"). \n\n Returns nothing if the tag doesn't exist.\n\n\n\n Example: `Tag(\"artist\")`\n\n\n\n- __`Truncate(FormatPart, usize)`__ \n\n Returns the wrapped `FormatPart`'s return string, \n\n truncated to the given `usize` length. \n\n\n\n Example: `Truncate(Title, 20)` \n\n which will return the full title of the song, \n\n if it has less than or exactly `20` characters. \n\n If it has less, the title will be truncated to `20` characters, \n\n with trailing `...` characters.\n\n\n\n- __`HtmlEscape(FormatPart)`__ \n\n Uses the [`htmlescape::encode_minimal`][htmlescape_encode_minimal] function, to escape \n\n any HTML syntax such as `<>&` from the wrapped `FormatPart`. \n\n\n", "file_path": "README.md", "rank": 60, "score": 10.776497260699616 }, { "content": "use super::names;\n\nuse regex::Regex;\n\nuse std::convert::TryFrom;\n\n\n\n#[derive(Default)]\n\npub struct CliOptions(pub(super) Vec<CliOption>);\n\n\n\nimpl CliOptions {\n\n pub fn iter(&self) -> std::slice::Iter<CliOption> {\n\n self.0.iter()\n\n }\n\n\n\n pub fn has(&self, option: &CliOption) -> bool {\n\n self.0.iter().any(|opt| opt == option)\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.0.is_empty()\n\n }\n\n}\n", "file_path": "src/args/options.rs", "rank": 61, "score": 10.352708697320285 }, { "content": " CmusMissingData(String),\n\n CmusStatusNoData,\n\n NoFormat,\n\n InvalidFormatKeyword(String),\n\n ProgressBarConfigMinLen(usize, String),\n\n FailedParsingConfig(Option<PathBuf>, String),\n\n NoConfig,\n\n InvalidArgument(String),\n\n InvalidCommandLen(String),\n\n InvalidCommandOption(String, String),\n\n CommandTakesNoOptions(String),\n\n}\n\n\n\nimpl Error {\n\n fn message(&self) -> String {\n\n match self {\n\n Error::CmusNotInstalled => \"cmus is not installed.\".to_string(),\n\n Error::CmusError { status, stderr } => format!(\n\n \"cmus exited with status code {}\\nstderr: {}\",\n\n status, stderr,\n", "file_path": "src/error.rs", "rank": 62, "score": 9.841860436905662 }, { "content": " ),\n\n Error::CmusMissingData(data_name) => {\n\n format!(\"missing required data from cmus-remote: {}\", data_name)\n\n }\n\n Error::CmusStatusNoData => \"CmusStatusBuilder needs CmusData, set \\\n\n with `CmusStatusBuilder::data` method\"\n\n .to_string(),\n\n Error::NoFormat => {\n\n \"No output format given for status line\".to_string()\n\n }\n\n Error::InvalidFormatKeyword(keyword) => format!(\n\n \"Given format keyword '{}' is not a valid keyword\",\n\n keyword,\n\n ),\n\n Error::ProgressBarConfigMinLen(min_len, config) => format!(\n\n \"ProgressBar config string must be at least {} characters \\\n\n long: {}\",\n\n min_len, config,\n\n ),\n\n Error::FailedParsingConfig(Some(filepath), e) => {\n", "file_path": "src/error.rs", "rank": 63, "score": 9.03559676506702 }, { "content": " }\n\n}\n\n\n\npub enum CliCommand {\n\n Status,\n\n Help,\n\n DumpConfig,\n\n}\n\n\n\nimpl CliCommand {\n\n pub fn name(&self) -> &str {\n\n match self {\n\n CliCommand::Status => names::CMD_STATUS,\n\n CliCommand::Help => names::CMD_HELP,\n\n CliCommand::DumpConfig => names::CMD_DUMP_CONFIG,\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for CliCommand {\n", "file_path": "src/args/commands.rs", "rank": 64, "score": 8.831202495695429 }, { "content": "extern crate dirs;\n\nextern crate htmlescape;\n\nextern crate regex;\n\nextern crate ron;\n\n#[macro_use]\n\nextern crate serde;\n\nextern crate toml;\n\n\n\npub mod action;\n\npub mod args;\n\npub mod cmus_status;\n\npub mod config;\n\npub mod error;\n\npub mod meta;\n\n\n", "file_path": "src/lib.rs", "rank": 65, "score": 8.30621511285106 }, { "content": "\n\nimpl Into<Vec<CliOption>> for CliOptions {\n\n fn into(self) -> Vec<CliOption> {\n\n self.0\n\n }\n\n}\n\n\n\nimpl From<Vec<CliOption>> for CliOptions {\n\n fn from(opts: Vec<CliOption>) -> Self {\n\n Self(opts)\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for CliOptions {\n\n type Error = ();\n\n fn try_from(s: &str) -> Result<Self, Self::Error> {\n\n let re = Regex::new(r#\"^\\s*(?P<dashes>--?)(?P<name>\\S+)\\s*$\"#).unwrap();\n\n if let Some(caps) = re.captures(s) {\n\n if let Some(name) = caps.name(\"name\").map(|m| m.as_str()) {\n\n let dashes = caps.name(\"dashes\").ok_or(())?.as_str().len();\n", "file_path": "src/args/options.rs", "rank": 66, "score": 7.9349344169565885 }, { "content": " Help,\n\n Version,\n\n}\n\n\n\nimpl CliOption {\n\n pub fn name_single(&self) -> char {\n\n match self {\n\n CliOption::Help => names::OPT_SINGLE_HELP,\n\n CliOption::Version => names::OPT_SINGLE_VERSION,\n\n }\n\n }\n\n\n\n pub fn name_double(&self) -> &str {\n\n match self {\n\n CliOption::Help => names::OPT_DOUBLE_HELP,\n\n CliOption::Version => names::OPT_DOUBLE_VERSION,\n\n }\n\n }\n\n}\n", "file_path": "src/args/options.rs", "rank": 67, "score": 7.931969189020963 }, { "content": "### `FormatExpression`\n\n[`enum FormatExpression`](https://github.com/Noah2610/cmus-status-line/blob/master/src/cmus_status/output/format/format_expression.rs#L4) \n\nA `FormatExpression` can be used as the first argument to \n\n`If` `FormatPart`s. They will always evaluate to either `true` or `false`.\n\n\n\n- __`True`__ \n\n Always returns `true`.\n\n\n\n- __`False`__ \n\n Always returns `false`.\n\n\n\n- __`And(FormatExpression, FormatExpression)`__ \n\n Returns `true` if both of the given `FormatExpression`s evaluate to `true`.\n\n\n\n- __`Or(FormatExpression, FormatExpression)`__ \n\n Returns `true` if either of the given `FormatExpression`s evaluate to `true`.\n\n\n\n- __`Not(FormatExpression)`__ \n\n Inverts the given expression.\n\n\n\n- __`IsStatus(CmusPlaybackStatus)`__ \n\n Returns `true` if the given `CmusPlaybackStatus` \n\n is the currently playing song's status.\n\n `CmusPlaybackStatus` can be one of:\n\n - `Playing`\n\n - `Paused`\n\n - `Stopped`\n\n\n\n Example:\n\n ```\n\n If(\n\n IsStatus(Playing),\n\n Container([\n\n Text(\"playing song: \"),\n\n Title,\n\n ]),\n\n ),\n\n ```\n\n\n\n- __`HasTag(String)`__ \n\n Returns `true` if the given tag name is set for the current track.\n\n Returns `false` if the tag doesn't exist on the track.\n\n\n\n---\n\n\n\n## License\n\nDistributed under the terms of the [MIT license][license].\n\n\n\n[releases]: https://github.com/Noah2610/cmus-status-line/releases\n\n[default_config]: https://github.com/Noah2610/cmus-status-line/blob/master/config.toml\n\n[crates.io]: https://crates.io/crates/cmus-status-line\n\n[htmlescape_encode_minimal]: https://docs.rs/htmlescape/0.3.1/htmlescape/fn.encode_minimal.html\n\n[license]: https://github.com/Noah2610/cmus-status-line/blob/master/LICENSE\n", "file_path": "README.md", "rank": 68, "score": 6.295313379566094 }, { "content": " type Error = ();\n\n fn try_from(s: &str) -> Result<Self, Self::Error> {\n\n let re = Regex::new(r#\"^\\s*(?P<name>\\w+\\S*)\\s*$\"#).unwrap();\n\n if let Some(name) = re\n\n .captures(s)\n\n .and_then(|caps| caps.name(\"name\"))\n\n .map(|m| m.as_str())\n\n {\n\n match name {\n\n names::CMD_STATUS => Ok(CliCommand::Status),\n\n names::CMD_HELP => Ok(CliCommand::Help),\n\n names::CMD_DUMP_CONFIG => Ok(CliCommand::DumpConfig),\n\n _ => Err(()),\n\n }\n\n } else {\n\n Err(())\n\n }\n\n }\n\n}\n", "file_path": "src/args/commands.rs", "rank": 69, "score": 6.100585534438254 }, { "content": "pub const NAME: &str = env!(\"CARGO_PKG_NAME\");\n\npub const VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\npub const DESCRIPTION: &str = env!(\"CARGO_PKG_DESCRIPTION\");\n", "file_path": "src/meta.rs", "rank": 70, "score": 5.965125548850718 }, { "content": "# cmus-status-line\n\n<details>\n\n<summary>\n\n Table of Contents\n\n</summary>\n\n\n\n- [Description](#description)\n\n- [Installation](#installation)\n\n - [Binaries](#binaries)\n\n - [Install from crates.io](#install-from-cratesio)\n\n- [Usage](#usage)\n\n- [Configuration](#configuration)\n\n - [Simple configuration example](#simple-configuration-example)\n\n - [The `format` key](#the-format-key)\n\n - [`FormatPart`](#formatpart)\n\n - [`FormatExpression`](#formatexpression)\n\n- [License](#license)\n\n\n\n---\n\n</details>\n\n\n\n## Description\n\nPrints the current `cmus` playback status in a customizable format to stdout. \n\nExample output with default config:\n\n```\n\n$ cmus-status-line # When PLAYING\n\n Undertale - Megalovania  <###----->\n\n\n\n$ cmus-status-line # When PAUSED\n\n Underta... <#-->\n\n```\n\n\n\n## Installation\n\n### Binaries\n\nBinaries for __Linux__ and __Windows__ are available from the [GitHub releases][releases] page. \n\n__Note:__ Windows binaries are not tested, if there is any problem please let me know by opening an issue!\n\n\n\n### Install from [crates.io]\n\n```\n\ncargo install cmus-status-line\n\n```\n\n\n", "file_path": "README.md", "rank": 71, "score": 5.9356137653714995 }, { "content": " ),\n\n Error::CmusParseError(msg) => {\n\n format!(\"cmus-remote parsing error: {}\", msg)\n\n }\n\n Error::CmusUnknownData(data_line) => {\n\n format!(\"cmus-remote returned unknown data: {}\", data_line)\n\n }\n\n Error::CmusExpectDataArguments(expected_args, data_line) => {\n\n format!(\n\n \"expected {} arguments for data line from cmus-remote: {}\",\n\n expected_args, data_line\n\n )\n\n }\n\n Error::CmusUnknownStatus(status) => {\n\n format!(\"cmus-remote returned unknown status: {}\", status)\n\n }\n\n Error::CouldntParseTimeToNumber(time_string) => format!(\n\n \"couldn't parse string to number, expected to be string of \\\n\n seconds: {}\",\n\n time_string\n", "file_path": "src/error.rs", "rank": 72, "score": 5.608692149148096 }, { "content": " }\n\n CliCommand::Help => Ok(Action::Help),\n\n CliCommand::DumpConfig => Ok(Action::DumpConfig),\n\n };\n\n match act_or_err {\n\n Ok(act) => Ok((Some(act), cmd_index + 1)),\n\n Err(e) => Err(e),\n\n }\n\n })?\n\n .0;\n\n\n\n if action_opt.is_none() {\n\n action_opt = args.options.iter().find_map(|opt| match opt {\n\n CliOption::Help => Some(Action::Help),\n\n CliOption::Version => Some(Action::Version),\n\n });\n\n }\n\n\n\n Ok(action_opt.unwrap_or_else(Action::default))\n\n}\n", "file_path": "src/action.rs", "rank": 73, "score": 5.356057479829445 }, { "content": "}\n\n\n\nimpl std::error::Error for Error {\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"ERROR:\\n{}\\nExiting.\", self.message())\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 74, "score": 5.121369965457641 }, { "content": "\n\n match dashes {\n\n // DOUBLE\n\n 2 => match name {\n\n names::OPT_DOUBLE_HELP => {\n\n Ok(vec![CliOption::Help].into())\n\n }\n\n names::OPT_DOUBLE_VERSION => {\n\n Ok(vec![CliOption::Version].into())\n\n }\n\n _ => Err(()),\n\n },\n\n // SINGLE\n\n 1 => Ok(name\n\n .chars()\n\n .try_fold(Vec::new(), |mut opts, c| match c {\n\n names::OPT_SINGLE_HELP => {\n\n opts.push(CliOption::Help);\n\n Ok(opts)\n\n }\n", "file_path": "src/args/options.rs", "rank": 75, "score": 4.4802459904231196 }, { "content": "## Usage\n\nSimply run the command without any arguments \n\nto get the formatted cmus playback status:\n\n```\n\n$ cmus-status-line\n\n Undertale - Megalovania  <###----->\n\n```\n\n\n\nFor more details, see `cmus-status-line --help`:\n\n```\n\nPrints cmus playback information in a configurable format to stdout\n\n\n\nUSAGE:\n\n cmus-status-line [OPTIONS] [COMMAND]\n\n\n\nOPTIONS:\n\n -h, --help Print this help message and exit.\n\n -v, --version Print version information and exit.\n\n\n\nCOMMANDS:\n\n status\n\n Print the current cmus playback status\n\n with the format configured in the config.toml file.\n\n This is the default command, so you may omit this argument.\n\n dump-config\n\n Print the default config as TOML to stdout.\n\n To write the default config to the proper config file, run something like:\n\n mkdir -p ~/.config/cmus-status-line\n\n cmus-status-line dump-config > ~/.config/cmus-status-line/config.toml\n\n help\n\n Print this help message and exit.\n\n```\n\n\n\n## Configuration\n\nThe goal for this project, was to make the status line's format highly configurable. \n\nYou can configure the format as a string in the `config.toml` file. \n\nTo get started, run the following to dump the default config to the proper config directory: \n\n(This assumes you are on Linux, for Windows or MacOS find your appropriate config directory here: \n\nhttps://docs.rs/dirs/2.0.2/dirs/fn.config_dir.html)\n\n```\n\nmkdir -p ~/.config/cmus-status-line\n\ncmus-status-line dump-config > ~/.config/cmus-status-line/config.toml\n\n```\n\n\n\nThe default configuration is in the [`config.toml`][default_config] file.\n\n\n\n### Simple configuration example\n\nHere's a small and simple configuration example to get you started, \n\nif you don't want to / don't have the time to read the details:\n\n```\n\nformat = \"\"\"\n\n%{Title} - %{ProgressBar(\"<####---->\")}\n\n\"\"\"\n\n```\n\n\n", "file_path": "README.md", "rank": 76, "score": 3.465113683061059 }, { "content": " format!(\"failed parsing config file at {:?}\\n{}\", filepath, e)\n\n }\n\n Error::FailedParsingConfig(None, e) => {\n\n format!(\"failed parsing config TOML\\n{}\", e)\n\n }\n\n Error::NoConfig => \"no config was given\".to_string(),\n\n Error::InvalidArgument(arg) => {\n\n format!(\"invalid argument '{}'\", arg)\n\n }\n\n Error::InvalidCommandLen(arg) => {\n\n format!(\"invalid command arguments '{}'\", arg)\n\n }\n\n Error::InvalidCommandOption(cmd, opt) => {\n\n format!(\"invalid option '{}' for command '{}'\", opt, cmd)\n\n }\n\n Error::CommandTakesNoOptions(cmd) => {\n\n format!(\"command '{}' takes no options\", cmd)\n\n }\n\n }\n\n }\n", "file_path": "src/error.rs", "rank": 77, "score": 2.8268675918207067 }, { "content": " names::OPT_SINGLE_VERSION => {\n\n opts.push(CliOption::Version);\n\n Ok(opts)\n\n }\n\n _ => Err(()),\n\n })?\n\n .into()),\n\n _ => Err(()),\n\n }\n\n } else {\n\n Err(())\n\n }\n\n } else {\n\n Err(())\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq)]\n\npub enum CliOption {\n", "file_path": "src/args/options.rs", "rank": 78, "score": 2.081718192361171 } ]
Rust
risc0/zkvm/sdk/rust/serde/src/deserializer.rs
risc0/risc0
2e8a4959a4b4247a1d4b35678af20ab184317931
use serde::de::{Deserialize, DeserializeSeed, IntoDeserializer, Visitor}; use crate::{ align_up, err::{Error, Result}, }; pub fn from_slice<'a, T: Deserialize<'a>>(slice: &'a [u32]) -> Result<T> { let mut deserializer = Deserializer::new(slice); T::deserialize(&mut deserializer) } pub struct Deserializer<'de> { slice: &'de [u32], } struct SeqAccess<'a, 'de> { deserializer: &'a mut Deserializer<'de>, len: usize, } impl<'de, 'a> serde::de::SeqAccess<'de> for SeqAccess<'a, 'de> { type Error = Error; fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>> where T: DeserializeSeed<'de>, { if self.len > 0 { self.len -= 1; Ok(Some(DeserializeSeed::deserialize( seed, &mut *self.deserializer, )?)) } else { Ok(None) } } fn size_hint(&self) -> Option<usize> { Some(self.len) } } impl<'de, 'a> serde::de::VariantAccess<'de> for &'a mut Deserializer<'de> { type Error = Error; fn unit_variant(self) -> Result<()> { Ok(()) } fn newtype_variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<V::Value> { DeserializeSeed::deserialize(seed, self) } fn tuple_variant<V: Visitor<'de>>(self, len: usize, visitor: V) -> Result<V::Value> { serde::de::Deserializer::deserialize_tuple(self, len, visitor) } fn struct_variant<V: Visitor<'de>>( self, fields: &'static [&'static str], visitor: V, ) -> Result<V::Value> { serde::de::Deserializer::deserialize_tuple(self, fields.len(), visitor) } } impl<'de, 'a> serde::de::EnumAccess<'de> for &'a mut Deserializer<'de> { type Error = Error; type Variant = Self; fn variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<(V::Value, Self)> { let tag = self.try_take_word()?; let val = DeserializeSeed::deserialize(seed, tag.into_deserializer())?; Ok((val, self)) } } struct MapAccess<'a, 'de> { deserializer: &'a mut Deserializer<'de>, len: usize, } impl<'a, 'de: 'a> serde::de::MapAccess<'de> for MapAccess<'a, 'de> { type Error = Error; fn next_key_seed<K: DeserializeSeed<'de>>(&mut self, seed: K) -> Result<Option<K::Value>> { if self.len > 0 { self.len -= 1; Ok(Some(DeserializeSeed::deserialize( seed, &mut *self.deserializer, )?)) } else { Ok(None) } } fn next_value_seed<V: DeserializeSeed<'de>>(&mut self, seed: V) -> Result<V::Value> { DeserializeSeed::deserialize(seed, &mut *self.deserializer) } fn size_hint(&self) -> Option<usize> { Some(self.len) } } impl<'de> Deserializer<'de> { pub fn new(slice: &'de [u32]) -> Self { Deserializer { slice } } fn try_take_word(&mut self) -> Result<u32> { if self.slice.len() >= 1 { let (head, tail) = self.slice.split_first().unwrap(); self.slice = tail; Ok(*head) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_dword(&mut self) -> Result<u64> { if self.slice.len() >= 2 { let (head, tail) = self.slice.split_at(2); self.slice = tail; let low: u64 = head[0].into(); let high: u64 = head[1].into(); Ok(low | high << 32) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_n(&mut self, len: usize) -> Result<&'de [u32]> { if self.slice.len() >= len { let (head, tail) = self.slice.split_at(len); self.slice = tail; Ok(head) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_n_bytes(&mut self, len: usize) -> Result<&'de [u8]> { let len_words = align_up(len, 4) / 4; let words: &'de [u32] = self.try_take_n(len_words)?; Ok(&bytemuck::cast_slice(words)[..len]) } } impl<'de, 'a> serde::Deserializer<'de> for &'a mut Deserializer<'de> { type Error = Error; fn is_human_readable(&self) -> bool { false } fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let val = match self.try_take_word()? { 0 => false, 1 => true, _ => return Err(Error::DeserializeBadBool), }; visitor.visit_bool(val) } fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i64(self.try_take_dword()? as i64) } fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u64(self.try_take_dword()?) } fn deserialize_f32<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_f64<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_char<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; if len_bytes > 4 { return Err(Error::DeserializeBadChar); } let bytes: &'de [u8] = self.try_take_n_bytes(len_bytes)?; let character = core::str::from_utf8(&bytes) .map_err(|_| Error::DeserializeBadChar)? .chars() .next() .ok_or(Error::DeserializeBadChar)?; visitor.visit_char(character) } fn deserialize_str<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; let bytes = self.try_take_n_bytes(len_bytes)?; let str = core::str::from_utf8(bytes).map_err(|_| Error::DeserializeBadUtf8)?; visitor.visit_borrowed_str(str) } fn deserialize_string<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_str(visitor) } fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; let bytes = self.try_take_n_bytes(len_bytes)?; visitor.visit_borrowed_bytes(bytes) } fn deserialize_byte_buf<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_bytes(visitor) } fn deserialize_option<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { match self.try_take_word()? { 0 => visitor.visit_none(), 1 => visitor.visit_some(self), _ => Err(Error::DeserializeBadOption), } } fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_unit() } fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_unit(visitor) } fn deserialize_newtype_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_newtype_struct(self) } fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len = self.try_take_word()? as usize; visitor.visit_seq(SeqAccess { deserializer: self, len, }) } fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_seq(SeqAccess { deserializer: self, len, }) } fn deserialize_tuple_struct<V>( self, _name: &'static str, len: usize, visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_tuple(len, visitor) } fn deserialize_map<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len = self.try_take_word()? as usize; visitor.visit_map(MapAccess { deserializer: self, len, }) } fn deserialize_struct<V>( self, _name: &'static str, fields: &'static [&'static str], visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_tuple(fields.len(), visitor) } fn deserialize_enum<V>( self, _name: &'static str, _variants: &'static [&'static str], visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_enum(self) } fn deserialize_identifier<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_ignored_any<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } } #[cfg(test)] mod test { use alloc::string::String; use super::*; #[test] fn test_struct() { use serde::Deserialize; #[derive(Deserialize, PartialEq, Debug)] struct Test { bool: bool, i8: i8, u8: u8, i16: i16, u16: u16, i32: i32, u32: u32, i64: i64, u64: u64, } let words = [ 1, -4_i32 as u32, 4, -5_i32 as u32, 5, -6_i32 as u32, 6, -7_i32 as u32, 0xffffffff, 7, 0x00000000, ]; let expected = Test { bool: true, i8: -4, u8: 4, i16: -5, u16: 5, i32: -6, u32: 6, i64: -7, u64: 7, }; assert_eq!(expected, from_slice(&words).unwrap()); } #[test] fn test_str() { use serde::Deserialize; #[derive(Deserialize, PartialEq, Debug)] struct Test { first: String, second: String, } let words = [1, 0x00000061, 3, 0x00636261]; let expected = Test { first: "a".into(), second: "abc".into(), }; assert_eq!(expected, from_slice(&words).unwrap()); } }
use serde::de::{Deserialize, DeserializeSeed, IntoDeserializer, Visitor}; use crate::{ align_up, err::{Error, Result}, }; pub fn from_slice<'a, T: Deserialize<'a>>(slice: &'a [u32]) -> Result<T> { let mut deserializer = Deserializer::new(slice); T::deserialize(&mut deserializer) } pub struct Deserializer<'de> { slice: &'de [u32], } struct SeqAccess<'a, 'de> { deserializer: &'a mut Deserializer<'de>, len: usize, } impl<'de, 'a> serde::de::SeqAccess<'de> for SeqAccess<'a, 'de> { type Error = Error; fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>> where T: DeserializeSeed<'de>, { if self.len > 0 { self.len -= 1; Ok(Some(DeserializeSeed::deserialize( seed, &mut *self.deserializer, )?)) } else { Ok(None) } } fn size_hint(&self) -> Option<usize> { Some(self.len) } } impl<'de, 'a> serde::de::VariantAccess<'de> for &'a mut Deserializer<'de> { type Error = Error; fn unit_variant(self) -> Result<()> { Ok(()) } fn newtype_variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<V::Value> { DeserializeSeed::deserialize(seed, self) } fn tuple_variant<V: Visitor<'de>>(self, len: usize, visitor: V) -> Result<V::Value> { serde::de::Deserializer::deserialize_tuple(self, len, visitor) } fn struct_variant<V: Visitor<'de>>( self, fields: &'static [&'static str], visitor: V, ) -> Result<V::Value> { serde::de::Deserializer::deserialize_tuple(self, fields.len(), visitor) } } impl<'de, 'a> serde::de::EnumAccess<'de> for &'a mut Deserializer<'de> { type Error = Error; type Variant = Self; fn variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<(V::Value, Self)> { let tag = self.try_take_word()?; let val = DeserializeSeed::deserialize(seed, tag.into_deserializer())?; Ok((val, self)) } } struct MapAccess<'a, 'de> { deserializer: &'a mut Deserializer<'de>, len: usize, } impl<'a, 'de: 'a> serde::de::MapAccess<'de> for MapAccess<'a, 'de> { type Error = Error; fn next_key_seed<K: DeserializeSeed<'de>>(&mut self, seed: K) -> Result<Option<K::Value>> { if self.len > 0 { self.len -= 1; Ok(Some(DeserializeSeed::deserialize( seed, &mut *self.deserializer, )?)) } else { Ok(None) } } fn next_value_seed<V: DeserializeSeed<'de>>(&mut self, seed: V) -> Result<V::Value> { DeserializeSeed::deserialize(seed, &mut *self.deserializer) } fn size_hint(&self) -> Option<usize> { Some(self.len) } } impl<'de> Deserializer<'de> { pub fn new(slice: &'de [u32]) -> Self { Deserializer { slice } } fn try_take_word(&mut self) -> Result<u32> { if self.slice.len() >= 1 { let (head, tail) = self.slice.split_first().unwrap(); self.slice = tail; Ok(*head) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_dword(&mut self) -> Result<u64> { if self.slice.len() >= 2 { let (head, tail) = self.slice.split_at(2); self.slice = tail; let low: u64 = head[0].into(); let high: u64 = head[1].into(); Ok(low | high << 32) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_n(&mut self, len: usize) -> Result<&'de [u32]> { if self.slice.len() >= len { let (head, tail) = self.slice.split_at(len); self.slice = tail; Ok(head) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_n_bytes(&mut self, len: usize) -> Result<&'de [u8]> { let len_words = align_up(len, 4) / 4; let words: &'de [u32] = self.try_take_n(len_words)?; Ok(&bytemuck::cast_slice(words)[..len]) } } impl<'de, 'a> serde::Deserializer<'de> for &'a mut Deserializer<'de> { type Error = Error; fn is_human_readable(&self) -> bool { false } fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let val = match self.try_take_word()? { 0 => false, 1 => true, _ => return Err(Error::DeserializeBadBool), }; visitor.visit_bool(val) } fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i64(self.try_take_dword()? as i64) } fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u64(self.try_take_dword()?) } fn deserialize_f32<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_f64<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_char<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; if len_bytes > 4 { return Err(Error::DeserializeBadChar); } let bytes: &'de [u8] = self.try_take_n_bytes(len_bytes)?;
visitor.visit_char(character) } fn deserialize_str<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; let bytes = self.try_take_n_bytes(len_bytes)?; let str = core::str::from_utf8(bytes).map_err(|_| Error::DeserializeBadUtf8)?; visitor.visit_borrowed_str(str) } fn deserialize_string<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_str(visitor) } fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; let bytes = self.try_take_n_bytes(len_bytes)?; visitor.visit_borrowed_bytes(bytes) } fn deserialize_byte_buf<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_bytes(visitor) } fn deserialize_option<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { match self.try_take_word()? { 0 => visitor.visit_none(), 1 => visitor.visit_some(self), _ => Err(Error::DeserializeBadOption), } } fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_unit() } fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_unit(visitor) } fn deserialize_newtype_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_newtype_struct(self) } fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len = self.try_take_word()? as usize; visitor.visit_seq(SeqAccess { deserializer: self, len, }) } fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_seq(SeqAccess { deserializer: self, len, }) } fn deserialize_tuple_struct<V>( self, _name: &'static str, len: usize, visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_tuple(len, visitor) } fn deserialize_map<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len = self.try_take_word()? as usize; visitor.visit_map(MapAccess { deserializer: self, len, }) } fn deserialize_struct<V>( self, _name: &'static str, fields: &'static [&'static str], visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_tuple(fields.len(), visitor) } fn deserialize_enum<V>( self, _name: &'static str, _variants: &'static [&'static str], visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_enum(self) } fn deserialize_identifier<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_ignored_any<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } } #[cfg(test)] mod test { use alloc::string::String; use super::*; #[test] fn test_struct() { use serde::Deserialize; #[derive(Deserialize, PartialEq, Debug)] struct Test { bool: bool, i8: i8, u8: u8, i16: i16, u16: u16, i32: i32, u32: u32, i64: i64, u64: u64, } let words = [ 1, -4_i32 as u32, 4, -5_i32 as u32, 5, -6_i32 as u32, 6, -7_i32 as u32, 0xffffffff, 7, 0x00000000, ]; let expected = Test { bool: true, i8: -4, u8: 4, i16: -5, u16: 5, i32: -6, u32: 6, i64: -7, u64: 7, }; assert_eq!(expected, from_slice(&words).unwrap()); } #[test] fn test_str() { use serde::Deserialize; #[derive(Deserialize, PartialEq, Debug)] struct Test { first: String, second: String, } let words = [1, 0x00000061, 3, 0x00636261]; let expected = Test { first: "a".into(), second: "abc".into(), }; assert_eq!(expected, from_slice(&words).unwrap()); } }
let character = core::str::from_utf8(&bytes) .map_err(|_| Error::DeserializeBadChar)? .chars() .next() .ok_or(Error::DeserializeBadChar)?;
assignment_statement
[ { "content": "fn into_words(slice: &[u8]) -> Result<Vec<u32>> {\n\n let mut vec = Vec::new();\n\n let chunks = slice.chunks_exact(4);\n\n assert!(chunks.remainder().len() == 0);\n\n for chunk in chunks {\n\n let word = chunk[0] as u32\n\n | (chunk[1] as u32) << 8\n\n | (chunk[2] as u32) << 16\n\n | (chunk[3] as u32) << 24;\n\n vec.push(word);\n\n }\n\n Ok(vec)\n\n}\n\n\n\nimpl Receipt {\n\n /// Verify that the current [Receipt] is a valid result of executing the\n\n /// method associated with the given method ID in a ZKVM.\n\n pub fn verify(&self, method_id: &[u8]) -> Result<()> {\n\n let mut err = ffi::RawError::default();\n\n unsafe {\n", "file_path": "risc0/zkvm/sdk/rust/host/src/lib.rs", "rank": 1, "score": 253987.91618732194 }, { "content": "fn set_word(buf: &mut [u8], idx: usize, word: u32) {\n\n buf[(4 * idx)..(4 * idx + 4)].copy_from_slice(&word.to_le_bytes());\n\n}\n\n\n\nimpl Sha for Impl {\n\n type DigestPtr = Box<Digest>;\n\n\n\n fn hash_bytes(&self, bytes: &[u8]) -> Self::DigestPtr {\n\n let mut hasher = Sha256::new();\n\n hasher.update(bytes);\n\n Box::new(Digest::new(\n\n hasher\n\n .finalize()\n\n .as_slice()\n\n .chunks(4)\n\n .map(|chunk| u32::from_be_bytes(chunk.try_into().unwrap()))\n\n .collect::<Vec<u32>>()\n\n .try_into()\n\n .unwrap(),\n\n ))\n", "file_path": "risc0/zkp/rust/core/src/sha_cpu.rs", "rank": 2, "score": 244722.86162393662 }, { "content": "pub fn to_slice<'a, 'b, T>(value: &'b T, buf: &'a mut [u32]) -> Result<&'a [u32]>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n let mut serializer = Serializer::new(Slice::new(buf));\n\n value.serialize(&mut serializer)?;\n\n serializer.stream.release()\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/serde/src/serializer.rs", "rank": 3, "score": 231860.65485421906 }, { "content": "/// Makes a digest for a slice of bytes.\n\n///\n\n/// Since there are no guarantees on alignment, an internal copy is made.\n\npub fn digest_u8_slice(data: &[u8]) -> &'static Digest {\n\n let len_bytes = data.len();\n\n let cap = compute_capacity_needed(len_bytes);\n\n let mut data_u32 = Vec::<u32>::with_capacity(cap);\n\n\n\n let whole_words = len_bytes / WORD_SIZE;\n\n // First copy in all the words we can.\n\n let words_copied: usize;\n\n match bytemuck::try_cast_slice(&data[..whole_words * WORD_SIZE]) as Result<&[u32], _> {\n\n Ok(words) => {\n\n data_u32.extend_from_slice(words);\n\n words_copied = whole_words;\n\n }\n\n Err(_) => {\n\n words_copied = 0;\n\n }\n\n };\n\n\n\n data_u32.resize(cap, 0);\n\n // Now copy in any remaining bytes.\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/sha.rs", "rank": 4, "score": 214959.80470416165 }, { "content": "/// Produces a machine generated .h file that implements the RISC-V circuit and writes it to a file.\n\npub fn make_circuit(path: &str) -> Result<()> {\n\n let_cxx_string!(path = path);\n\n Ok(ffi::make_circuit(&path)?)\n\n}\n", "file_path": "risc0/zkvm/circuit/lib.rs", "rank": 5, "score": 190353.8585721897 }, { "content": "pub fn make_method_id_from_elf(path: &str) -> Result<MethodId> {\n\n let_cxx_string!(cxx_path = path);\n\n Ok(ffi::make_method_id_from_elf(&cxx_path)?)\n\n}\n", "file_path": "risc0/zkvm/lib.rs", "rank": 6, "score": 173516.15540941834 }, { "content": "pub fn to_vec_with_capacity<'a, T>(value: &'a T, cap: usize) -> Result<alloc::vec::Vec<u32>>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n let vec = AllocVec::with_capacity(cap);\n\n let mut serializer = Serializer::new(vec);\n\n value.serialize(&mut serializer)?;\n\n serializer.stream.release()\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/serde/src/serializer.rs", "rank": 7, "score": 168155.26535804546 }, { "content": "/// Read private data from the host.\n\npub fn read<T: Deserialize<'static>>() -> T {\n\n ENV.get().read()\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/env.rs", "rank": 8, "score": 167125.50807921172 }, { "content": "/// Perform a reverse butterfly transform of a buffer of (1 << n) numbers.\n\n/// The result of this computation is a discrete Fourier transform, but with\n\n/// changed indices. This is described [here](https://en.wikipedia.org/wiki/Cooley%E2%80%93Tukey_FFT_algorithm#Data_reordering,_bit_reversal,_and_in-place_algorithms)\n\n/// The output of rev_butterfly(io, n) at index i is the sum over k from 0 to\n\n/// 2^n-1 of io[k] * ROU_REV[n]^(k*i'), where i' is i bit-reversed as an n-bit\n\n/// number.\n\n///\n\n/// As an example, we'll work through a trace of the rev_butterfly algorithm\n\n/// with n = 3 on a list of length 8. Let w = ROU_REV[3] be the eighth root of\n\n/// unity. We start with\n\n/// [a0, a1, a2, a3, a4, a5, a6, a7]\n\n/// After the loop, before the first round of recursive calls, we have\n\n/// [a0+a4, a1+a5, a2+a6, a3+a7,\n\n/// a0-a4, a1*w-a5*w, a2*w^2-a6*w^2, a3*w^3-a7*w^3]\n\n/// After first round of recursive calls, we have\n\n/// [a0+a4+a2+a6, a1+a5+a3+a7,\n\n/// a0+a4-a2-a6, a1*w^2+a5*w^2-a3*w^2-a7*w^2,\n\n/// a0-a4+a2*w^2-a6*w^2, a1*w-a5*w+a3*w^3-a7*w^3,\n\n/// a0-a4-a2*w^2+a6*w^2, a1*w^3-a5*w^3-a3*w^5+a7*w^5]\n\n/// And after the second round of recursive calls, we have\n\n/// [a0+a4+a2+a6+a1+a5+a3+a7,\n\n/// a0+a4+a2+a6-a1-a5-a3-a7,\n\n/// a0+a4-a2-a6+a1*w^2+a5*w^2-a3*w^2-a7*w^2,\n\n/// a0+a4-a2-a6-a1*w^2-a5*w^2+a3*w^2+a7*w^2,\n\n/// a0-a4+a2*w^2-a6*w^2+a1*w-a5*w+a3*w^3-a7*w^3,\n\n/// a0-a4+a2*w^2-a6*w^2-a1*w+a5*w-a3*w^3+a7*w^3,\n\n/// a0-a4-a2*w^2+a6*w^2+a1*w^3-a5*w^3+a3*w^5-a7*w^5,\n\n/// a0-a4-a2*w^2+a6*w^2-a1*w^3+a5*w^3-a3*w^5+a7*w^5]\n\n/// Rewriting this, we get\n\n/// [sum_k ak w^0,\n\n/// sum_k ak w^4k,\n\n/// sum_k ak w^2k,\n\n/// sum_k ak w^6k,\n\n/// sum_k ak w^1k,\n\n/// sum_k ak w^5k,\n\n/// sum_k ak w^3k,\n\n/// sum_k ak w^7k]\n\n/// The exponent multiplicands in the sum arise from reversing the indices as\n\n/// three-bit numbers. For example, 3 is 011 in binary, which reversed is 110,\n\n/// which is 6. So i' in the exponent of the index-3 value is 6.\n\npub fn rev_butterfly(io: &mut [Fp4], n: usize) {\n\n assert!(io.len() == (1 << n));\n\n if n == 0 {\n\n return;\n\n }\n\n let half = 1 << (n - 1);\n\n let step = Fp4::from_u32(ROU_REV[n]);\n\n let mut cur = Fp4::one();\n\n for i in 0..half {\n\n let a = io[i];\n\n let b = io[i + half];\n\n io[i] = a + b;\n\n io[i + half] = (a - b) * cur;\n\n cur *= step;\n\n }\n\n rev_butterfly(&mut io[..half], n - 1);\n\n rev_butterfly(&mut io[half..], n - 1);\n\n}\n", "file_path": "risc0/zkp/rust/core/src/ntt.rs", "rank": 9, "score": 164986.3726832562 }, { "content": "/// Bit reverses the indices in an array of (1 << n) numbers.\n\n/// This permutes the values in the array so that a value which is previously\n\n/// in index i, will now go in the index i' given by reversing the bits of i.\n\n/// For example, with n=4, the value at index 3=0011 will go to index 12=1100.\n\npub fn bit_reverse(io: &mut [Fp4], n: usize) {\n\n let size: usize = 1 << n;\n\n for i in 0..size {\n\n let rev_idx: usize = (bit_rev_32(i.try_into().unwrap()) >> (32 - n))\n\n .try_into()\n\n .unwrap();\n\n if i < rev_idx {\n\n let tmp = io[i];\n\n io[i] = io[rev_idx];\n\n io[rev_idx] = tmp;\n\n }\n\n }\n\n}\n\n\n", "file_path": "risc0/zkp/rust/core/src/ntt.rs", "rank": 10, "score": 164984.1169136484 }, { "content": "/// Computes the SHA256 digest of a serialized object.\n\npub fn digest<T: Serialize>(val: &T) -> &'static Digest {\n\n // If the object to be serialized is a plain old structure in memory, this\n\n // should be a good guess for the allocation needed.\n\n let cap = compute_capacity_needed(mem::size_of_val(val));\n\n let mut buf = to_vec_with_capacity(val, cap).unwrap();\n\n\n\n let len_bytes = buf.len() * WORD_SIZE;\n\n buf.resize(compute_capacity_needed(len_bytes), 0);\n\n add_trailer(buf.as_mut_slice(), len_bytes, MemoryType::Normal);\n\n raw_digest(buf.as_slice())\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/sha.rs", "rank": 11, "score": 160733.60821971018 }, { "content": "struct ValueU32 {\n\n Value low;\n\n Value high;\n\n static ValueU32 fromConst(uint32_t val) { return {val & 0xffff, val >> 16}; }\n\n};\n\n\n\ninline ValueU32 operator*(Value a, ValueU32 b) {\n\n return {a * b.low, a * b.high};\n\n}\n\n\n\ninline ValueU32 operator+(ValueU32 a, ValueU32 b) {\n\n return {a.low + b.low, a.high + b.high};\n\n}\n\n\n\ninline ValueU32 operator-(ValueU32 a, ValueU32 b) {\n\n return {0x10000 + a.low - b.low, 0xffff + a.high - b.high};\n\n}\n\n\n\ninline ValueU32 operator+(ValueU32 a, int b) {\n\n return {a.low + b, a.high};\n\n}\n\n\n", "file_path": "risc0/zkvm/circuit/types.h", "rank": 12, "score": 159299.29976099284 }, { "content": "/// Reverses the bits in a 32 bit number\n\n/// For example 1011...0100 becomes 0010...1101\n\nfn bit_rev_32(mut x: u32) -> u32 {\n\n x = ((x & 0xaaaaaaaa) >> 1) | ((x & 0x55555555) << 1);\n\n x = ((x & 0xcccccccc) >> 2) | ((x & 0x33333333) << 2);\n\n x = ((x & 0xf0f0f0f0) >> 4) | ((x & 0x0f0f0f0f) << 4);\n\n x = ((x & 0xff00ff00) >> 8) | ((x & 0x00ff00ff) << 8);\n\n (x >> 16) | (x << 16)\n\n}\n\n\n", "file_path": "risc0/zkp/rust/core/src/ntt.rs", "rank": 13, "score": 159005.32603986835 }, { "content": "/// A widgit that converts some value into a boolean.\n\nstruct MakeBoolRegs {\n\n RegBin isNonzero;\n\n Reg maybeInv;\n\n\n\n MakeBoolRegs(BufAlloc& alloc) : isNonzero(alloc), maybeInv(alloc) {}\n\n\n\n Value get() const { return isNonzero.get(); }\n\n void set(Value num) {\n\n BYZ_NONDET {\n\n isNonzero.set(nonzero(num));\n\n maybeInv.set(inv(num));\n\n }\n\n equate(num * maybeInv.get(), isNonzero.get());\n\n equate((1 - isNonzero.get()) * num, 0);\n\n }\n\n};\n\n\n", "file_path": "risc0/zkvm/circuit/types.h", "rank": 14, "score": 154571.84568647706 }, { "content": "/// A widgit that negates a U32 number which is almost just subtraction, except for the case of zero\n\n/// that needs to be handled specially.\n\nstruct NegU32Regs {\n\n MakeBoolRegs lowSafe;\n\n MakeBoolRegs highSafe;\n\n RegU32 store;\n\n\n\n NegU32Regs(BufAlloc& alloc) : lowSafe(alloc), highSafe(alloc), store(alloc) {}\n\n\n\n // Set the input number, gets the output.\n\n ValueU32 set(ValueU32 val) {\n\n // Pick the values for the two 'carry' registers via nondet\n\n ValueU32 out;\n\n Value lowSub = 0x10000 - val.low;\n\n lowSafe.set(lowSub - 0x10000);\n\n out.low = lowSafe.isNonzero.get() * lowSub;\n\n Value highSub = 0x10000 - lowSafe.isNonzero.get() - val.high;\n\n highSafe.set(highSub - 0x10000);\n\n out.high = highSafe.isNonzero.get() * highSub;\n\n store.set(out);\n\n return store.get();\n\n }\n\n};\n\n\n", "file_path": "risc0/zkvm/circuit/types.h", "rank": 15, "score": 154563.1165392643 }, { "content": "pub fn fri_verify<S: Sha, F>(iop: &mut ReadIOP<S>, mut degree: usize, mut f: F)\n\nwhere\n\n F: FnMut(&mut ReadIOP<S>, usize) -> Fp4,\n\n{\n\n let sha = iop.get_sha().clone();\n\n let orig_domain = INV_RATE * degree;\n\n let mut domain = orig_domain;\n\n // Prep the folding verfiers\n\n let mut rounds: Vec<VerifyRoundInfo> = vec![];\n\n while degree > FRI_MIN_DEGREE {\n\n rounds.push(VerifyRoundInfo::new(iop, domain));\n\n domain /= FRI_FOLD;\n\n degree /= FRI_FOLD;\n\n }\n\n // Grab the final coeffs + commit\n\n let mut final_coeffs: Vec<Fp> = vec![Fp::new(0); EXT_SIZE * degree];\n\n iop.read_fps(&mut final_coeffs);\n\n let final_digest = sha.hash_fps(&final_coeffs);\n\n iop.commit(&final_digest);\n\n // Get the generator for the final polynomial evaluations\n", "file_path": "risc0/zkvm/sdk/rust/verify/src/zkp/fri.rs", "rank": 16, "score": 149950.45763888073 }, { "content": "/// For x = (1 << po2), given x, find po2.\n\npub fn to_po2(x: usize) -> usize {\n\n (31 - (x as u32).leading_zeros()) as usize\n\n}\n", "file_path": "risc0/zkp/rust/core/src/lib.rs", "rank": 17, "score": 148983.9818291755 }, { "content": "pub fn to_vec<'a, T>(value: &'a T) -> Result<alloc::vec::Vec<u32>>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n // Use the in-memory size of the value as a guess for the length\n\n // of the serialized value.\n\n let vec = AllocVec::with_capacity(mem::size_of_val(value));\n\n let mut serializer = Serializer::new(vec);\n\n value.serialize(&mut serializer)?;\n\n serializer.stream.release()\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/serde/src/serializer.rs", "rank": 18, "score": 147976.04223664754 }, { "content": "pub fn log(msg: &str) {\n\n logger().log(msg);\n\n}\n", "file_path": "risc0/zkvm/sdk/rust/core/src/lib.rs", "rank": 19, "score": 143034.32526365973 }, { "content": "/// Return the default implementation of a [Sha].\n\npub fn default_implementation() -> &'static DefaultImplementation {\n\n static DEFAULT_IMPLEMENTATION: DefaultImplementation = DefaultImplementation {};\n\n &DEFAULT_IMPLEMENTATION\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Digest;\n\n\n\n #[test]\n\n fn test_from_str() {\n\n assert_eq!(\n\n Digest::from_str(\"00000077000000AA0000001200000034000000560000007a000000a900000009\"),\n\n Digest::new([119, 170, 18, 52, 86, 122, 169, 9])\n\n );\n\n }\n\n}\n\n\n\n#[allow(missing_docs)]\n\npub mod testutil {\n", "file_path": "risc0/zkp/rust/core/src/sha.rs", "rank": 22, "score": 139761.71812295922 }, { "content": "pub fn logger() -> &'static dyn Log {\n\n unsafe { LOGGER }\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/core/src/lib.rs", "rank": 23, "score": 136002.42332055047 }, { "content": "// Computes a raw digest of the given slice. The data must already\n\n// contain the end marker and the trailer.\n\nfn raw_digest(data: &[u32]) -> &'static Digest {\n\n assert_eq!(data.len() % CHUNK_SIZE, 0);\n\n // Allocate fresh memory that's guaranteed to be uninitialized so\n\n // the host can write to it.\n\n unsafe {\n\n let alloced = Box::<mem::MaybeUninit<Digest>>::new(mem::MaybeUninit::<Digest>::uninit());\n\n let digest = (*Box::into_raw(alloced)).as_mut_ptr();\n\n raw_digest_to(data, digest);\n\n &*digest\n\n }\n\n}\n\n\n\n// Computes a raw digest of the given slice, and stores the digest in\n\n// the given pointer. The digest memory must be uninitilaized.\n\npub(crate) unsafe fn raw_digest_to(data: &[u32], digest: *mut Digest) {\n\n assert_eq!(data.len() % CHUNK_SIZE, 0);\n\n let type_count = data.len() / CHUNK_SIZE;\n\n\n\n let desc_ptr = alloc_desc();\n\n\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/sha.rs", "rank": 24, "score": 132936.1416533612 }, { "content": "pub fn set_logger(logger: &'static dyn Log) {\n\n unsafe { LOGGER = logger };\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/core/src/lib.rs", "rank": 25, "score": 127184.46436269837 }, { "content": "fn mul(lhs: u32, rhs: u32) -> u32 {\n\n (((lhs as u64) * (rhs as u64)) % P_U64) as u32\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{Fp, P, P_U64};\n\n use rand::SeedableRng;\n\n\n\n #[test]\n\n fn inv() {\n\n // Smoke test for inv\n\n assert_eq!(Fp(5).inv() * Fp(5), Fp(1));\n\n }\n\n\n\n #[test]\n\n fn pow() {\n\n // Smoke tests for pow\n\n assert_eq!(Fp(5).pow(0), Fp(1));\n\n assert_eq!(Fp(5).pow(1), Fp(5));\n", "file_path": "risc0/zkp/rust/core/src/fp.rs", "rank": 26, "score": 119696.0212132213 }, { "content": "fn add(lhs: u32, rhs: u32) -> u32 {\n\n let x = lhs + rhs;\n\n return if x >= P { x - P } else { x };\n\n}\n\n\n", "file_path": "risc0/zkp/rust/core/src/fp.rs", "rank": 27, "score": 119696.0212132213 }, { "content": "fn sub(lhs: u32, rhs: u32) -> u32 {\n\n let x = lhs.wrapping_sub(rhs);\n\n return if x > P { x.wrapping_add(P) } else { x };\n\n}\n\n\n", "file_path": "risc0/zkp/rust/core/src/fp.rs", "rank": 28, "score": 119696.0212132213 }, { "content": "fn fold_eval(values: &mut [Fp4], mix: Fp4, s: usize, j: usize) -> Fp4 {\n\n rev_butterfly(values, FRI_FOLD_PO2);\n\n let norm = Fp::new(FRI_FOLD as u32).inv();\n\n for i in 0..FRI_FOLD {\n\n values[i] *= norm;\n\n }\n\n bit_reverse(values, FRI_FOLD_PO2);\n\n let s_po2 = to_po2(s);\n\n let root_po2 = FRI_FOLD_PO2 + s_po2;\n\n let inv_wk: Fp = Fp::new(ROU_REV[root_po2]).pow(j);\n\n let mut mul = Fp::new(1);\n\n let mut tot = Fp4::zero();\n\n let mut mix_pow = Fp4::one();\n\n for i in 0..FRI_FOLD {\n\n tot += values[i] * mul * mix_pow;\n\n mul *= inv_wk;\n\n mix_pow *= mix;\n\n }\n\n tot\n\n}\n", "file_path": "risc0/zkvm/sdk/rust/verify/src/zkp/fri.rs", "rank": 29, "score": 119473.35147682414 }, { "content": "struct StepState;\n\n\n\n} // namespace risc0::circuit\n", "file_path": "risc0/zkvm/circuit/types.h", "rank": 30, "score": 112020.13191925774 }, { "content": "struct CaptureValLoc {\n\n CaptureValLoc(Value val, SourceLoc loc = SourceLoc::current()) : val(val), loc(loc) {}\n\n CaptureValLoc(Buffer::BufAccess access, SourceLoc loc = SourceLoc::current())\n\n : val(access), loc(loc) {}\n\n CaptureValLoc(Fp fp, SourceLoc loc = SourceLoc::current()) : val(fp), loc(loc) {}\n\n CaptureValLoc(int num, SourceLoc loc = SourceLoc::current()) : val(Fp(num)), loc(loc) {}\n\n Value val;\n\n SourceLoc loc;\n\n};\n\n\n\ninline Value operator+(CaptureValLoc a, CaptureValLoc b) {\n\n return getGlobalContext()->add(a.val.getImpl(), b.val.getImpl(), b.loc);\n\n}\n\n\n\ninline Value operator-(CaptureValLoc a, CaptureValLoc b) {\n\n return getGlobalContext()->sub(a.val.getImpl(), b.val.getImpl(), b.loc);\n\n}\n\n\n\ninline Value operator*(CaptureValLoc a, CaptureValLoc b) {\n\n return getGlobalContext()->mul(a.val.getImpl(), b.val.getImpl(), b.loc);\n", "file_path": "risc0/zkvm/circuit/edsl.h", "rank": 31, "score": 108891.32415748121 }, { "content": "/// Align the given address `addr` upwards to alignment `align`.\n\n///\n\n/// Requires that `align` is a power of two.\n\nfn align_up(addr: usize, align: usize) -> usize {\n\n (addr + align - 1) & !(align - 1)\n\n}\n", "file_path": "risc0/zkvm/sdk/rust/serde/src/lib.rs", "rank": 32, "score": 107786.98094581599 }, { "content": "fn alloc_desc() -> *mut SHADescriptor {\n\n // SAFETY: Single threaded and this is the only place we use CUR_DESC.\n\n unsafe {\n\n let cur_desc = CUR_DESC.0.get();\n\n let ptr = (REGION_SHA_START as *mut SHADescriptor).add(*cur_desc);\n\n *cur_desc += 1;\n\n ptr\n\n }\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/sha.rs", "rank": 33, "score": 96706.49182900965 }, { "content": "fn receipt(bench: &mut Bencher) {\n\n let data: Vec<u8> = fs::read(\"src/zkvm/simple_receipt.receipt\").unwrap();\n\n let as_u32: Vec<u32> = data\n\n .chunks(4)\n\n .map(|bytes| u32::from_le_bytes(<[u8; 4]>::try_from(bytes).unwrap()))\n\n .collect();\n\n let receipt: Receipt = risc0_zkvm_serde::from_slice(&as_u32).unwrap();\n\n\n\n let method_id =\n\n &MethodID::try_from(fs::read(\"src/zkvm/simple_receipt.id\").unwrap().as_slice()).unwrap();\n\n bench.iter(|| {\n\n receipt.verify(method_id);\n\n })\n\n}\n\n\n\nbenchmark_group!(benches, receipt);\n\nbenchmark_main!(benches);\n", "file_path": "risc0/zkvm/sdk/rust/verify/benches/verify.rs", "rank": 34, "score": 95347.90379948377 }, { "content": "class RegU32 {\n\npublic:\n\n RegU32(BufAlloc& alloc) : buf(alloc.allocGeneric(2)) {}\n\n Value low(SourceLoc loc = SourceLoc::current()) { return buf.at(0, loc); }\n\n Value high(SourceLoc loc = SourceLoc::current()) { return buf.at(1, loc); }\n\n ValueU32 get(SourceLoc loc = SourceLoc::current()) { return {low(loc), high(loc)}; }\n\n void setLow(Value val, SourceLoc loc = SourceLoc::current()) { buf.at(0, loc) = val; }\n\n void setHigh(Value val, SourceLoc loc = SourceLoc::current()) { buf.at(1, loc) = val; }\n\n void set(ValueU32 val, SourceLoc loc = SourceLoc::current()) {\n\n setLow(val.low, loc);\n\n setHigh(val.high, loc);\n\n }\n\n\n\nprivate:\n\n Buffer buf;\n\n};\n\n\n", "file_path": "risc0/zkvm/circuit/types.h", "rank": 35, "score": 94547.81265013742 }, { "content": "/// Write private data to the host.\n\npub fn write<T: Serialize>(data: &T) {\n\n ENV.get().write(data);\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/env.rs", "rank": 36, "score": 86711.23489265633 }, { "content": "/// Commit public data to the journal.\n\npub fn commit<T: Serialize>(data: &T) {\n\n ENV.get().commit(data);\n\n}\n\n\n\nimpl Env {\n\n fn new() -> Self {\n\n Env {\n\n input: Deserializer::new(unsafe {\n\n slice::from_raw_parts(REGION_INPUT_START as _, REGION_INPUT_LEN / WORD_SIZE)\n\n }),\n\n output: Serializer::new(Slice::new(unsafe {\n\n slice::from_raw_parts_mut(REGION_OUTPUT_START as _, REGION_OUTPUT_LEN / WORD_SIZE)\n\n })),\n\n commit: Serializer::new(Slice::new(unsafe {\n\n slice::from_raw_parts_mut(REGION_COMMIT_START as _, REGION_COMMIT_LEN / WORD_SIZE)\n\n })),\n\n commit_len: 0,\n\n }\n\n }\n\n\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/env.rs", "rank": 37, "score": 86711.23489265633 }, { "content": "// Current sha descriptor index.\n\nstruct CurDesc(UnsafeCell<usize>);\n\n\n\n// SAFETY: single threaded environment\n\nunsafe impl Sync for CurDesc {}\n\n\n\nstatic CUR_DESC: CurDesc = CurDesc(UnsafeCell::new(0));\n\n\n\nconst END_MARKER: u8 = 0x80;\n\n\n\n// Chunk size in words for optimized SHA to operate on; all SHA\n\n// requests must be a multiple of this size.\n\nconst CHUNK_SIZE: usize = 64 / WORD_SIZE;\n\n\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/sha.rs", "rank": 38, "score": 86338.15906770964 }, { "content": "/// Evaluate a polynomial whose coeffients are in the extension field at a\n\n/// point.\n\npub fn poly_eval(coeffs: &[Fp4], x: Fp4) -> Fp4 {\n\n let mut mul = Fp4::one();\n\n let mut tot = Fp4::zero();\n\n for i in 0..coeffs.len() {\n\n tot += coeffs[i] * mul;\n\n mul *= x;\n\n }\n\n tot\n\n}\n", "file_path": "risc0/zkp/rust/core/src/poly.rs", "rank": 39, "score": 85276.37193956622 }, { "content": "pub fn verify<S: Sha, C: Circuit>(\n\n sha: &S,\n\n circuit: &mut C,\n\n proof: &[u32],\n\n) -> Result<(), VerificationError> {\n\n // Make IOP\n\n let mut iop = ReadIOP::new(sha, proof);\n\n\n\n // Do 'execute' phase and get size\n\n circuit.execute(&mut iop);\n\n let po2 = circuit.po2();\n\n assert!(po2 as usize <= MAX_CYCLES_PO2);\n\n let size = 1 << po2;\n\n let domain = INV_RATE * size;\n\n\n\n // Get taps and compute sizes\n\n let taps = circuit.taps();\n\n // TODO: This is a very silly way to do this\n\n let mut accum_size = 0;\n\n let mut code_size = 0;\n", "file_path": "risc0/zkvm/sdk/rust/verify/src/zkp/verify.rs", "rank": 40, "score": 84758.52982442042 }, { "content": "/// Require that accesses to behind the given pointer before the memory\n\n/// barrier don't get optimized away or reordered to after the memory\n\n/// barrier.\n\npub fn memory_barrier<T>(ptr: *const T) {\n\n // SAFETY: This passes a pointer in, but does nothing with it.\n\n unsafe { asm!(\"/* {0} */\", in(reg) (ptr)) }\n\n}\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/lib.rs", "rank": 41, "score": 84758.52982442042 }, { "content": "struct OpCombine : public PolyOpBase<OpCombine, 7, true> {\n\n PolyOp prev;\n\n PolyOp mul;\n\n PolyOp inner;\n\n OpCombine(PolyOp prev, PolyOp mul, PolyOp inner) : prev(prev), mul(mul), inner(inner) {}\n\n int getDegree() const { return std::max(prev->degree(), mul->degree() + inner->degree()); }\n\n std::tuple<PolyOp, PolyOp, PolyOp> key() const { return std::make_tuple(prev, mul, inner); }\n\n void computeTaps(PolyContext::Impl& impl) override {\n\n prev->computeTaps(impl);\n\n mul->computeTaps(impl);\n\n inner->computeTaps(impl);\n\n }\n\n std::string output(PolyContext::Impl& impl, const std::string& out) override;\n\n void findCriticalPath(PolyContext::Impl& impl) override;\n\n};\n\n\n", "file_path": "risc0/zkvm/circuit/poly_context.cpp", "rank": 42, "score": 84756.5328483524 }, { "content": "struct OpBegin : public PolyOpBase<OpBegin, 5, true> {\n\n int getDegree() const { return 0; }\n\n int key() const { return 0; }\n\n void computeTaps(PolyContext::Impl& impl) override {}\n\n std::string output(PolyContext::Impl& impl, const std::string& out) override;\n\n void findCriticalPath(PolyContext::Impl& impl) override;\n\n};\n\n\n", "file_path": "risc0/zkvm/circuit/poly_context.cpp", "rank": 43, "score": 84756.5328483524 }, { "content": "struct OpAssertZero : public PolyOpBase<OpAssertZero, 6, true> {\n\n PolyOp prev;\n\n PolyOp zero;\n\n OpAssertZero(PolyOp prev, PolyOp zero) : prev(prev), zero(zero) {}\n\n int getDegree() const { return std::max(prev->degree(), zero->degree()); }\n\n std::pair<PolyOp, PolyOp> key() const { return std::make_pair(prev, zero); }\n\n void computeTaps(PolyContext::Impl& impl) override {\n\n prev->computeTaps(impl);\n\n zero->computeTaps(impl);\n\n }\n\n std::string output(PolyContext::Impl& impl, const std::string& out) override;\n\n void findCriticalPath(PolyContext::Impl& impl) override;\n\n};\n\n\n", "file_path": "risc0/zkvm/circuit/poly_context.cpp", "rank": 44, "score": 81144.88094683453 }, { "content": " constexpr bool operator!=(Fp4 rhs) device const { return Fp4(*this) != rhs; }\n", "file_path": "risc0/zkp/core/fp4.h", "rank": 45, "score": 71499.83503820896 }, { "content": "struct RoundResult {\n\n GameState state;\n\n HitType hit;\n\n\n\n template <typename Archive> void transfer(Archive& ar) {\n\n ar.transfer(state);\n\n ar.transfer(hit);\n\n }\n\n\n", "file_path": "examples/cpp/battleship/battleship.h", "rank": 46, "score": 71499.83503820896 }, { "content": " RegU32 result;\n", "file_path": "risc0/zkvm/circuit/cycle.h", "rank": 47, "score": 71484.12067528932 }, { "content": "fn main() {\n\n CFG.include_prefix = \"risc0/zkp\";\n\n CFG.exported_header_links = vec![\"risc0-core-sys\"];\n\n\n\n let mut build = cxx_build::bridge(\"lib.rs\");\n\n build\n\n .file(\"accel/backend/cpu/impl.cpp\")\n\n .file(\"core/ntt.cpp\")\n\n .file(\"core/poly.cpp\")\n\n .file(\"core/sha_rng.cpp\")\n\n .file(\"prove/fri.cpp\")\n\n .file(\"prove/merkle.cpp\")\n\n .file(\"prove/poly_group.cpp\")\n\n .file(\"prove/prove.cpp\")\n\n .file(\"verify/fri.cpp\")\n\n .file(\"verify/merkle.cpp\")\n\n .file(\"verify/taps.cpp\")\n\n .file(\"verify/verify.cpp\")\n\n .define(\"__TBB_NO_IMPLICIT_LINKAGE\", None)\n\n .flag_if_supported(\"/std:c++17\")\n", "file_path": "risc0/zkp/build.rs", "rank": 48, "score": 66509.02904792712 }, { "content": "fn main() {\n\n CFG.include_prefix = \"risc0/core\";\n\n\n\n let rng_file = match env::var(\"CARGO_CFG_TARGET_OS\").unwrap().as_str() {\n\n \"linux\" => \"rng_linux.cpp\",\n\n \"macos\" => \"rng_macos.cpp\",\n\n \"windows\" => \"rng_win32.cpp\",\n\n os => panic!(\"Unsupported target_os: {os}\"),\n\n };\n\n\n\n cxx_build::bridge(\"lib.rs\")\n\n .file(\"elf.cpp\")\n\n .file(\"log.cpp\")\n\n .file(\"rng.cpp\")\n\n .file(rng_file)\n\n .flag_if_supported(\"/std:c++17\")\n\n .flag_if_supported(\"-std=c++17\")\n\n .warnings(false)\n\n .compile(\"risc0-core-sys\");\n\n}\n", "file_path": "risc0/core/build.rs", "rank": 49, "score": 66509.02904792712 }, { "content": "fn main() {\n\n CFG.include_prefix = \"risc0/zkvm\";\n\n\n\n let inc_dir = env::var_os(\"DEP_RISC0_ZKVM_CIRCUIT_GEN_INC\").unwrap();\n\n\n\n let mut build = cxx_build::bridge(\"lib.rs\");\n\n build\n\n .file(\"prove/exec.cpp\")\n\n .file(\"prove/io_handler.cpp\")\n\n .file(\"prove/method_id.cpp\")\n\n .file(\"prove/riscv.cpp\")\n\n .file(\"prove/step_context.cpp\")\n\n .file(\"prove/step.cpp\")\n\n .file(\"verify/riscv.cpp\")\n\n .file(\"sdk/cpp/host/c_api.cpp\")\n\n .file(\"sdk/cpp/host/receipt.cpp\")\n\n .include(inc_dir)\n\n .define(\"__TBB_NO_IMPLICIT_LINKAGE\", None)\n\n .flag_if_supported(\"/std:c++17\")\n\n .flag_if_supported(\"-std=c++17\")\n", "file_path": "risc0/zkvm/build.rs", "rank": 50, "score": 66509.02904792712 }, { "content": "fn main() {\n\n CFG.include_prefix = \"risc0/zkvm/platform\";\n\n\n\n cxx_build::bridge(\"src/lib.rs\")\n\n .flag_if_supported(\"/std:c++17\")\n\n .flag_if_supported(\"-std=c++17\")\n\n .warnings(false)\n\n .compile(\"risc0-zkp-platform\");\n\n}\n", "file_path": "risc0/zkvm/platform/build.rs", "rank": 51, "score": 64816.002675058044 }, { "content": "fn main() {\n\n CFG.include_prefix = \"risc0/zkvm/circuit\";\n\n CFG.exported_header_links = vec![\"risc0-zkp-sys\", \"risc0-zkvm-platform-sys\"];\n\n\n\n cxx_build::bridge(\"lib.rs\")\n\n .file(\"accum_regs.cpp\")\n\n .file(\"compute_cycle.cpp\")\n\n .file(\"context.cpp\")\n\n .file(\"data_regs.cpp\")\n\n .file(\"decode_cycle.cpp\")\n\n .file(\"divide_cycle.cpp\")\n\n .file(\"final_cycle.cpp\")\n\n .file(\"gen_context.cpp\")\n\n .file(\"make_circuit.cpp\")\n\n .file(\"mem_check.cpp\")\n\n .file(\"mem_io_regs.cpp\")\n\n .file(\"multiply_cycle.cpp\")\n\n .file(\"poly_context.cpp\")\n\n .file(\"sha_cycle.cpp\")\n\n .file(\"step_state.cpp\")\n\n .flag_if_supported(\"/std:c++17\")\n\n .flag_if_supported(\"/Zc:preprocessor\")\n\n .flag_if_supported(\"/EHsc\")\n\n .flag_if_supported(\"-std=c++17\")\n\n .warnings(false)\n\n .compile(\"risc0-zkp-circuit\");\n\n\n\n println!(\"cargo:rustc-link-lib=static=risc0-core-sys\");\n\n}\n", "file_path": "risc0/zkvm/circuit/build.rs", "rank": 52, "score": 64816.002675058044 }, { "content": "// This class is an implementation detail and carefully built to be efficient on\n\n// RISC-V for use in recursion.\n\nstruct TapData {\n\n uint16_t offset; // The offset in register group (reg #)\n\n uint16_t back; // How many cycles back this tap is\n\n RegisterGroup group; // Which register group this tap is a part of\n\n // Precomputed helpers to speed up iteration\n\n uint8_t combo; // Which combo this register is part of\n\n uint8_t skip; // How far to skip to next register\n\n bool operator<(const TapData& rhs) const {\n\n if (group != rhs.group) {\n\n return group < rhs.group;\n\n }\n\n if (offset != rhs.offset) {\n\n return offset < rhs.offset;\n\n }\n\n return back < rhs.back;\n\n }\n\n bool operator==(const TapData& rhs) const {\n\n return group == rhs.group && offset == rhs.offset && back == rhs.back;\n\n }\n\n};\n\n\n\n} // namespace detail\n\n\n", "file_path": "risc0/zkp/verify/taps.h", "rank": 53, "score": 64761.68084595642 }, { "content": "// Combo data holds the tap set for each 'combo'. Basically, combo N consists\n\n// of taps in the range [offsets[n], offsets[n+1]). Again this is an\n\n// implementation detail, and the format is designed to put the actual arrays\n\n// into static data.\n\nstruct ComboData {\n\n uint16_t* taps;\n\n uint16_t* offsets;\n\n uint16_t count;\n\n};\n\n\n\n} // namespace detail\n\n\n", "file_path": "risc0/zkp/verify/taps.h", "rank": 54, "score": 64755.69524979392 }, { "content": "struct Op {\n\n OpType type;\n\n size_t out;\n\n size_t a;\n\n size_t b;\n\n size_t c;\n\n Fp val;\n\n const char* loc;\n\n};\n\n\n\nstatic constexpr size_t kNumSteps = kNumStepFp4s + kNumStepCons;\n\n\n\nstatic const Op g_steps[kNumSteps] = {\n\n#define CHECK_EVAL\n\n#define do_const(out, cval) {OpType::CONST, out, 0, 0, 0, cval},\n\n#define do_get(out, buf, reg, back, id) {OpType::GET, out, id, 0, 0, 0},\n\n#define do_get_global(out, reg) {OpType::GET_GLOBAL, out, reg, 0, 0, 0},\n\n#define do_begin(out) {OpType::BEGIN, out, 0, 0, 0, 0},\n\n#define do_assert_zero(out, in, zval, loc) {OpType::ASSERT_ZERO, out, in, zval, 0, 0, loc},\n\n#define do_combine(out, prev, cond, inner, loc) {OpType::COMBINE, out, prev, cond, inner, 0, loc},\n", "file_path": "risc0/zkvm/verify/riscv.cpp", "rank": 55, "score": 64751.48711085541 }, { "content": "struct TurnMessage {\n\n Position shot;\n\n};\n\n\n", "file_path": "examples/cpp/battleship/protocol.h", "rank": 56, "score": 64751.48711085541 }, { "content": "struct StepContext {\n\n MemoryHandler* io;\n\n MemoryState mem;\n\n uint32_t curStep;\n\n uint32_t numSteps;\n\n Fp globals[kGlobalSize];\n\n\n\n Fp get(const Fp* buf, size_t offset, size_t back);\n\n void set(Fp* buf, size_t offset, Fp val);\n\n Fp getDigits(const Fp* buf, size_t bits, size_t offset, size_t back, size_t size);\n\n Fp setDigits(Fp* buf, size_t bits, size_t offset, size_t size, Fp val);\n\n Fp getMux(const Fp* buf, size_t offset, size_t back, size_t size);\n\n void setMux(Fp* buf, size_t offset, size_t size, Fp val);\n\n void memWrite(Fp cycle, Fp addr, Fp low, Fp high);\n\n std::array<Fp, 2> memRead(Fp cycle, Fp addr);\n\n std::array<Fp, 5> memCheck(); // Cycle, Addr, IsWrite, Low, High\n\n std::array<Fp, 4> divide(Fp numerLow, Fp numerHigh, Fp denomLow, Fp denomHigh);\n\n void requireDigits(Fp* buf, size_t bits, size_t offset, size_t size);\n\n void requireMux(Fp* buf, size_t offset, size_t size, const char* msg);\n\n void requireZero(Fp val, const char* msg);\n", "file_path": "risc0/zkvm/prove/step.h", "rank": 57, "score": 64751.48711085541 }, { "content": "struct InitMessage {\n\n risc0::Receipt receipt;\n\n\n\n struct Content {\n\n risc0::ShaDigest state;\n\n\n\n template <typename Archive> void transfer(Archive& ar) { ar.transfer(state); }\n\n };\n\n\n\n Content decode() const;\n\n};\n\n\n", "file_path": "examples/cpp/battleship/protocol.h", "rank": 58, "score": 64751.48711085541 }, { "content": "struct Primitives {\n\n bool flag;\n\n int8_t i8;\n\n uint8_t u8;\n\n int16_t i16;\n\n uint16_t u16;\n\n int32_t i32;\n\n uint32_t u32;\n\n int64_t i64;\n\n uint64_t u64;\n\n\n\n template <typename Archive> void transfer(Archive& ar) {\n\n ar.transfer(flag);\n\n ar.transfer(i8);\n\n ar.transfer(u8);\n\n ar.transfer(i16);\n\n ar.transfer(u16);\n\n ar.transfer(i32);\n\n ar.transfer(u32);\n\n ar.transfer(i64);\n\n ar.transfer(u64);\n\n }\n\n\n\n bool operator==(const Primitives& rhs) const {\n\n return flag == rhs.flag && i8 == rhs.i8 && u8 == rhs.u8 && i16 == rhs.i16 && u16 == rhs.u16 &&\n\n i32 == rhs.i32 && u32 == rhs.u32 && i64 == rhs.i64 && u64 == rhs.u64;\n\n }\n\n};\n\n\n", "file_path": "risc0/core/test/archive.cpp", "rank": 59, "score": 64751.48711085541 }, { "content": "struct RoundMessage {\n\n risc0::Receipt receipt;\n\n\n\n struct Content {\n\n risc0::ShaDigest old_state;\n\n risc0::ShaDigest new_state;\n\n Position shot;\n\n HitType hit;\n\n\n\n template <typename Archive> void transfer(Archive& ar) {\n\n ar.transfer(old_state);\n\n ar.transfer(new_state);\n\n ar.transfer(shot);\n\n ar.transfer(hit);\n\n }\n\n };\n\n\n\n Content decode() const;\n\n};\n\n\n", "file_path": "examples/cpp/battleship/protocol.h", "rank": 60, "score": 64751.48711085541 }, { "content": "struct ShuffleMessage {\n\n risc0::Receipt receipt;\n\n};\n\n\n", "file_path": "examples/cpp/deck/protocol.h", "rank": 61, "score": 64751.48711085541 }, { "content": "struct ElfHeader {\n\n uint8_t ei_magic[4];\n\n uint8_t ei_class;\n\n uint8_t ei_data;\n\n uint8_t ei_version;\n\n uint8_t ei_pad[9];\n\n uint16_t e_type;\n\n uint16_t e_machine;\n\n uint32_t e_version;\n\n uint32_t e_entry;\n\n uint32_t e_phoff;\n\n uint32_t e_shoff;\n\n uint32_t e_flags;\n\n uint16_t e_ehsize;\n\n uint16_t e_phentsize;\n\n uint16_t e_phnum;\n\n uint16_t e_shentsize;\n\n uint16_t e_shnum;\n\n uint16_t e_shstrndx;\n\n};\n\n\n", "file_path": "risc0/core/elf.cpp", "rank": 62, "score": 64751.48711085541 }, { "content": "struct ProgHeader {\n\n uint32_t p_type;\n\n uint32_t p_offset;\n\n uint32_t p_vaddr;\n\n uint32_t p_paddr;\n\n uint32_t p_filesz;\n\n uint32_t p_memsz;\n\n uint32_t p_flags;\n\n uint32_t p_align;\n\n};\n\n\n\n} // namespace\n\n\n\nuint32_t loadElf(const std::string& name, uint32_t maxMem, std::map<uint32_t, uint32_t>& memOut) {\n\n using namespace std;\n\n ifstream is;\n\n is.open(name, ios::binary);\n\n if (is.fail() || is.bad()) {\n\n std::stringstream ss;\n\n ss << \"Could not load ELF: \" << name;\n", "file_path": "risc0/core/elf.cpp", "rank": 63, "score": 64751.48711085541 }, { "content": "struct ShuffleContent {\n\n risc0::ShaDigest dealerKeyDigest;\n\n risc0::Key playerKey;\n\n risc0::ShaDigest deckDigest;\n\n uint32_t deckSize;\n\n\n\n template <typename Archive> void transfer(Archive& ar) {\n\n ar.transfer(dealerKeyDigest);\n\n ar.transfer(playerKey);\n\n ar.transfer(deckDigest);\n\n ar.transfer(deckSize);\n\n }\n\n};\n\n\n", "file_path": "examples/cpp/deck/protocol.h", "rank": 64, "score": 64751.48711085541 }, { "content": "struct MemoryState {\n\n std::map<uint32_t, uint32_t> data;\n\n std::set<MemoryEvent> history;\n\n\n\n void dump(size_t logLevel);\n\n\n\n uint8_t loadByte(uint32_t addr);\n\n uint32_t load(uint32_t addr);\n\n uint32_t loadBE(uint32_t addr);\n\n void loadRegion(uint32_t addr, void* ptr, uint32_t len);\n\n\n\n void storeByte(uint32_t addr, uint8_t byte);\n\n void store(uint32_t addr, uint32_t value);\n\n void store(uint32_t addr, const void* ptr, uint32_t len);\n\n\n\n size_t strlen(uint32_t addr);\n\n};\n\n\n", "file_path": "risc0/zkvm/prove/step.h", "rank": 65, "score": 64751.48711085541 }, { "content": "struct MemoryEvent {\n\n uint32_t addr;\n\n uint32_t cycle;\n\n bool isWrite;\n\n uint32_t data;\n\n bool operator<(const MemoryEvent& rhs) const {\n\n if (addr != rhs.addr) {\n\n return addr < rhs.addr;\n\n }\n\n return cycle < rhs.cycle;\n\n }\n\n};\n\n\n", "file_path": "risc0/zkvm/prove/step.h", "rank": 66, "score": 64751.48711085541 }, { "content": "struct IoHandler {\n\n virtual void onInit(MemoryState& mem) {}\n\n virtual void onWrite(const BufferU8& data) {}\n\n virtual void onCommit(const BufferU8& data) {}\n\n virtual void onFault(const std::string& msg);\n\n virtual KeyStore& getKeyStore() = 0;\n\n};\n\n\n", "file_path": "risc0/zkvm/prove/step.h", "rank": 67, "score": 64751.48711085541 }, { "content": "struct ValueImplBase {\n\n virtual ~ValueImplBase() {}\n\n};\n\n\n", "file_path": "risc0/zkvm/circuit/context.h", "rank": 68, "score": 63173.14940601573 }, { "content": "/// A structure representing a SHA256 digest.\n\nstruct Digest {\n\n /// @private\n\n uint32_t words[8];\n\n\n\n /// @private\n\n template <typename Archive> void transfer(Archive& ar) {\n\n for (uint32_t& word : words) {\n\n ar.transfer(word);\n\n }\n\n }\n\n};\n\n\n", "file_path": "risc0/zkvm/sdk/cpp/guest/sha.h", "rank": 69, "score": 63173.14940601573 }, { "content": "// The initial message, sent from dealer to player, to ensure dealer shuffles fairly.\n\nstruct DealerCommitMessage {\n\n risc0::ShaDigest dealerKeyDigest;\n\n uint32_t deckSize;\n\n};\n\n\n", "file_path": "examples/cpp/deck/protocol.h", "rank": 70, "score": 63173.14940601573 }, { "content": "struct PlayerCommitMessage {\n\n risc0::Key playerKey;\n\n};\n\n\n", "file_path": "examples/cpp/deck/protocol.h", "rank": 71, "score": 63173.14940601573 }, { "content": "struct BufferImplBase {\n\n virtual ~BufferImplBase() {}\n\n size_t size;\n\n size_t offset;\n\n size_t back = 0;\n\n size_t digits = 0;\n\n bool isMux = false;\n\n};\n\n\n", "file_path": "risc0/zkvm/circuit/context.h", "rank": 72, "score": 63173.14940601573 }, { "content": "struct TapSetData {\n\n const detail::TapData* taps;\n\n size_t groupBegin[kNumRegisterGroups + 1];\n\n ComboData combos;\n\n};\n\n\n\n} // namespace detail\n\n\n\ntemplate <typename It> class IteratorRange {\n\npublic:\n\n IteratorRange(It begin, It end) : begin_(begin), end_(end) {}\n\n It begin() const { return begin_; }\n\n It end() const { return end_; }\n\n\n\nprivate:\n\n It begin_;\n\n It end_;\n\n};\n\n\n", "file_path": "risc0/zkp/verify/taps.h", "rank": 73, "score": 63173.14940601573 }, { "content": "struct CardResponseContent {\n\n risc0::ShaDigest deckDigest;\n\n uint32_t pos;\n\n uint32_t card;\n\n\n\n template <typename Archive> void transfer(Archive& ar) {\n\n ar.transfer(deckDigest);\n\n ar.transfer(pos);\n\n ar.transfer(card);\n\n }\n\n};\n\n\n", "file_path": "examples/cpp/deck/protocol.h", "rank": 74, "score": 63173.14940601573 }, { "content": "struct Receipt {\n\n BufferU8 journal;\n\n BufferU32 seal;\n\n\n\n // Verify a receipt against some code, throws if invalid.\n\n void verify(const MethodId& methodId) const;\n\n\n\n template <typename Archive> void transfer(Archive& ar) {\n\n ar.transfer(journal);\n\n ar.transfer(seal);\n\n }\n\n};\n\n\n", "file_path": "risc0/zkvm/sdk/cpp/host/receipt.h", "rank": 75, "score": 63173.14940601573 }, { "content": "struct CardResponseMessage {\n\n risc0::Receipt receipt;\n\n};\n\n\n", "file_path": "examples/cpp/deck/protocol.h", "rank": 76, "score": 63173.14940601573 }, { "content": "struct CardRequestMessage {\n\n uint32_t pos;\n\n};\n\n\n", "file_path": "examples/cpp/deck/protocol.h", "rank": 77, "score": 63173.14940601573 }, { "content": "struct MixState {\n\n Fp4 tot;\n\n Fp4 mul;\n\n};\n\n\n\n} // namespace\n\n\n\nvoid RiscVProveCircuit::evalCheck( //\n\n AccelSlice<Fp> check, // Output: Check polynomial\n\n // Evaluations of each polynomial on an extended domain\n\n AccelConstSlice<Fp> codeEval, //\n\n AccelConstSlice<Fp> dataEval, //\n\n AccelConstSlice<Fp> accumEval, //\n\n // Mix factor for polynomial constraints\n\n Fp4 polyMix) const {\n\n size_t size = size_t(1) << po2_;\n\n size_t domain = size * kInvRate;\n\n uint32_t mask = domain - 1;\n\n Fp* out = check.devicePointer();\n\n constexpr size_t expPo2 = log2Ceil(kInvRate);\n", "file_path": "risc0/zkvm/prove/riscv.cpp", "rank": 78, "score": 63173.14940601573 }, { "content": "struct Digest;\n", "file_path": "risc0/zkvm/sdk/cpp/guest/key.h", "rank": 79, "score": 63173.14940601573 }, { "content": "struct StringPair {\n\n std::string first;\n\n std::string second;\n\n\n\n template <typename Archive> void transfer(Archive& ar) {\n\n ar.transfer(first);\n\n ar.transfer(second);\n\n }\n\n\n\n bool operator==(const StringPair& rhs) const {\n\n return first == rhs.first && second == rhs.second;\n\n }\n\n};\n\n\n", "file_path": "risc0/core/test/archive.cpp", "rank": 80, "score": 63173.14940601573 }, { "content": "fn main() {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let inc_dir = Path::new(&out_dir);\n\n let out_dir = inc_dir.join(\"risc0\").join(\"zkvm\").join(\"circuit\");\n\n let out_path = out_dir.join(\"step.cpp.inc\");\n\n\n\n fs::create_dir_all(out_dir).unwrap();\n\n\n\n make_circuit(&out_path.to_str().unwrap()).unwrap();\n\n\n\n println!(\"cargo:inc={}\", inc_dir.to_str().unwrap());\n\n}\n", "file_path": "risc0/zkvm/circuit/make-circuit/build.rs", "rank": 81, "score": 61758.74964389549 }, { "content": "fn main() {\n\n risc0_build::link();\n\n}\n", "file_path": "risc0/zkvm/sdk/rust/guest/build.rs", "rank": 82, "score": 61758.74964389549 }, { "content": "fn main() {\n\n risc0_build::embed_methods();\n\n}\n", "file_path": "risc0/zkvm/sdk/rust/methods/build.rs", "rank": 83, "score": 61758.74964389549 }, { "content": "fn main() {\n\n println!(\"cargo:rustc-link-lib=static=tbb\");\n\n println!(\"cargo:rustc-link-lib=static=risc0-core-sys\");\n\n println!(\"cargo:rustc-link-lib=static=risc0-zkp-sys\");\n\n println!(\"cargo:rustc-link-lib=static=risc0-zkvm-sys\");\n\n}\n", "file_path": "risc0/zkvm/sdk/rust/host/build.rs", "rank": 84, "score": 61758.74964389549 }, { "content": "struct ArchiveTestBuffer {\n\n std::unique_ptr<uint32_t[]> buf;\n\n size_t size;\n\n};\n\n\n", "file_path": "risc0/core/test/archive.cpp", "rank": 85, "score": 61696.98711028791 }, { "content": "struct InternLessThan {\n\n bool operator()(PolyOp a, PolyOp b) const { return a->lessThan(*b); }\n\n};\n\n\n\n} // namespace\n\n\n", "file_path": "risc0/zkvm/circuit/poly_context.cpp", "rank": 86, "score": 61696.98711028791 }, { "content": "struct ReceiptReader {\n\npublic:\n\n ReceiptReader(const Receipt& receipt) : stream(receipt.journal), archive(stream) {}\n\n\n\n template <typename T> T read() {\n\n T out;\n\n archive.transfer(out);\n\n return out;\n\n }\n\n\n\nprivate:\n\n CheckedStreamReader stream;\n\n ArchiveReader<CheckedStreamReader> archive;\n\n};\n\n\n", "file_path": "risc0/zkvm/sdk/cpp/host/receipt.h", "rank": 87, "score": 61696.98711028791 }, { "content": "struct ProveRoundInfo {\n\n size_t size;\n\n size_t domain;\n\n AccelSlice<Fp> evaluated;\n\n AccelSlice<Fp> outCoeffs;\n\n std::unique_ptr<MerkleTreeProver> merkle;\n\n\n\n ProveRoundInfo(WriteIOP& iop, AccelConstSlice<Fp> coeffs)\n\n : size(coeffs.size() / 4) // Coeffs are really Fp4s, but we treat as FP to use NTT/Merkle\n\n , domain(size * kInvRate)\n\n , evaluated(AccelSlice<Fp>::allocate(domain * 4))\n\n , outCoeffs(AccelSlice<Fp>::allocate(size / kFriFold * 4)) {\n\n LOG(1, \"Doing FRI folding\");\n\n batchExpand(evaluated, coeffs, 4);\n\n batchEvaluateNTT(evaluated, 4, log2Ceil(kInvRate));\n\n merkle =\n\n std::make_unique<MerkleTreeProver>(evaluated, domain / kFriFold, kFriFold * 4, kQueries);\n\n merkle->commit(iop);\n\n Fp4 foldMix = Fp4::random(iop);\n\n friFoldAccel(outCoeffs, coeffs, AccelSlice<Fp4>::copy(&foldMix, 1));\n", "file_path": "risc0/zkp/prove/fri.cpp", "rank": 88, "score": 61696.98711028791 }, { "content": "struct VerifyRoundInfo {\n\n size_t domain;\n\n MerkleTreeVerifier merkle;\n\n Fp4 mix;\n\n\n\n VerifyRoundInfo(ReadIOP& iop, size_t inDomain)\n\n : domain(inDomain / kFriFold)\n\n , merkle(iop, domain, kFriFold * 4, kQueries)\n\n , mix(Fp4::random(iop)) {}\n\n\n\n void verifyQuery(ReadIOP& iop, size_t* pos, Fp4* goal) const {\n\n // Compute which group we are in\n\n size_t quot = *pos / domain;\n\n size_t group = *pos % domain;\n\n // Get the column data\n\n auto data = merkle.verify(iop, group);\n\n std::vector<Fp4> data4(kFriFold);\n\n for (size_t i = 0; i < kFriFold; i++) {\n\n data4[i] = Fp4(data[0 * kFriFold + i],\n\n data[1 * kFriFold + i],\n", "file_path": "risc0/zkp/verify/fri.cpp", "rank": 89, "score": 61696.98711028791 }, { "content": "#[ctor::ctor]\n\nfn init() {\n\n unsafe { ffi::risc0_init() };\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::Prover;\n\n use anyhow::Result;\n\n use risc0_zkvm_core::Digest;\n\n use risc0_zkvm_methods::methods::{FAIL_ID, FAIL_PATH, IO_ID, IO_PATH, SHA_ID, SHA_PATH};\n\n use risc0_zkvm_serde::{from_slice, to_vec};\n\n\n\n #[test]\n\n fn sha() {\n\n assert_eq!(\n\n run_sha(\"\"),\n\n Digest::new([\n\n 0xe3b0c442, 0x98fc1c14, 0x9afbf4c8, 0x996fb924, 0x27ae41e4, 0x649b934c, 0xa495991b,\n\n 0x7852b855,\n\n ])\n", "file_path": "risc0/zkvm/sdk/rust/host/src/lib.rs", "rank": 90, "score": 60373.9051376423 }, { "content": "fn main() {\n\n risc0_build::link();\n\n}\n", "file_path": "risc0/zkvm/sdk/rust/methods/inner/build.rs", "rank": 91, "score": 60373.9051376423 }, { "content": "struct Env {\n\n input: Deserializer<'static>,\n\n output: Serializer<Slice<'static>>,\n\n commit: Serializer<Slice<'static>>,\n\n commit_len: usize,\n\n}\n\n\n", "file_path": "risc0/zkvm/sdk/rust/guest/src/env.rs", "rank": 92, "score": 60313.38964484431 }, { "content": "struct risc0_prover {\n\n std::unique_ptr<risc0::Prover> prover;\n\n};\n\n\n", "file_path": "risc0/zkvm/sdk/cpp/host/c_api.cpp", "rank": 93, "score": 60313.38964484431 }, { "content": "struct risc0_receipt {\n\n risc0::Receipt receipt;\n\n};\n\n\n\nvoid risc0_init() {\n\n if (const char* level = std::getenv(\"RISC0_LOG\")) {\n\n risc0::setLogLevel(std::atoi(level));\n\n }\n\n}\n\n\n\nconst char* risc0_string_ptr(risc0_string* str) {\n\n return str->str.c_str();\n\n}\n\n\n\nvoid risc0_string_free(risc0_string* str) {\n\n risc0_error err;\n\n ffi_wrap_void(&err, [&] { delete str; });\n\n}\n\n\n\nrisc0_prover* risc0_prover_new(risc0_error* err,\n", "file_path": "risc0/zkvm/sdk/cpp/host/c_api.cpp", "rank": 94, "score": 60313.38964484431 }, { "content": "struct BenchmarkStreamWriter {\n\n void write_word(uint32_t word) { tot_written += sizeof(uint32_t); }\n\n void write_dword(uint64_t dword) { tot_written += sizeof(uint64_t); }\n\n void write_buffer(const void* buf, size_t len) { tot_written += align(len); }\n\n\n\n size_t tot_written = 0;\n\n};\n\nstatic_assert(is_stream_writer<BenchmarkStreamWriter>());\n\n\n\nstatic void BM_Simple_Loop(benchmark::State& state) {\n\n uint32_t num_iter = state.range(0);\n\n size_t tot_iter = 0;\n\n\n\n for (auto _ : state) {\n\n MethodId methodId = makeMethodId(\"examples/cpp/deck/shuffle_method\");\n\n Prover prover(\"risc0/zkvm/prove/bench/shuffle_method\", methodId);\n\n prover.writeInput(num_iter);\n\n VectorStreamWriter receipt_buf;\n\n Receipt receipt = prover.run();\n\n BenchmarkStreamWriter writer;\n", "file_path": "risc0/zkvm/prove/bench/benchmark.cpp", "rank": 95, "score": 60313.38964484431 }, { "content": "struct risc0_string {\n\n std::string str;\n\n};\n\n\n\n} // extern \"C\"\n\n\n\ntemplate <typename F> void ffi_wrap_void(risc0_error* err, F fn) {\n\n try {\n\n err->msg = nullptr;\n\n fn();\n\n } catch (const std::exception& ex) {\n\n err->msg = new risc0_string{ex.what()};\n\n } catch (...) {\n\n err->msg = new risc0_string{\"C++ exception\"};\n\n }\n\n}\n\n\n\ntemplate <typename T, typename F> T ffi_wrap(risc0_error* err, T val, F fn) {\n\n try {\n\n err->msg = nullptr;\n", "file_path": "risc0/zkvm/sdk/cpp/host/c_api.cpp", "rank": 96, "score": 60313.38964484431 } ]
Rust
botan/src/rng.rs
chux0519/botan-rs
4fc7560f2fa29a0ced584027fe2c791393d24773
use botan_sys::*; use utils::*; #[derive(Debug)] pub struct RandomNumberGenerator { obj: botan_rng_t } impl Drop for RandomNumberGenerator { fn drop(&mut self) { unsafe { botan_rng_destroy(self.obj); } } } impl RandomNumberGenerator { fn new_of_type(typ: &str) -> Result<RandomNumberGenerator> { let mut obj = ptr::null_mut(); let typ = make_cstr(typ)?; call_botan! { botan_rng_init(&mut obj, typ.as_ptr()) } Ok(RandomNumberGenerator { obj }) } pub(crate) fn handle(&self) -> botan_rng_t { self.obj } pub fn new_userspace() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_of_type("user") } pub fn new_system() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_of_type("system") } pub fn new() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_userspace() } pub fn read(&self, len: usize) -> Result<Vec<u8>> { let mut result = vec![0; len]; self.fill(&mut result)?; Ok(result) } pub fn fill(&self, out: &mut [u8]) -> Result<()> { call_botan! { botan_rng_get(self.obj, out.as_mut_ptr(), out.len()) } Ok(()) } pub fn reseed(&self, bits: usize) -> Result<()> { call_botan! { botan_rng_reseed(self.obj, bits) } Ok(()) } pub fn reseed_from_rng(&self, source: &RandomNumberGenerator, bits: usize) -> Result<()> { call_botan! { botan_rng_reseed_from_rng(self.obj, source.handle(), bits) } Ok(()) } pub fn add_entropy(&self, seed: &[u8]) -> Result<()> { call_botan! { botan_rng_add_entropy(self.obj, seed.as_ptr(), seed.len()) } Ok(()) } }
use botan_sys::*; use utils::*; #[derive(Debug)] pub struct RandomNumberGenerator { obj: botan_rng_t } impl Drop for RandomNumberGenerator { fn drop(&mut self) { unsafe { botan_rng_destroy(self.obj); } } } impl RandomNumberGenerator { fn new_of_type(typ: &str) -> Result<RandomNumberGe
} pub fn new() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_userspace() } pub fn read(&self, len: usize) -> Result<Vec<u8>> { let mut result = vec![0; len]; self.fill(&mut result)?; Ok(result) } pub fn fill(&self, out: &mut [u8]) -> Result<()> { call_botan! { botan_rng_get(self.obj, out.as_mut_ptr(), out.len()) } Ok(()) } pub fn reseed(&self, bits: usize) -> Result<()> { call_botan! { botan_rng_reseed(self.obj, bits) } Ok(()) } pub fn reseed_from_rng(&self, source: &RandomNumberGenerator, bits: usize) -> Result<()> { call_botan! { botan_rng_reseed_from_rng(self.obj, source.handle(), bits) } Ok(()) } pub fn add_entropy(&self, seed: &[u8]) -> Result<()> { call_botan! { botan_rng_add_entropy(self.obj, seed.as_ptr(), seed.len()) } Ok(()) } }
nerator> { let mut obj = ptr::null_mut(); let typ = make_cstr(typ)?; call_botan! { botan_rng_init(&mut obj, typ.as_ptr()) } Ok(RandomNumberGenerator { obj }) } pub(crate) fn handle(&self) -> botan_rng_t { self.obj } pub fn new_userspace() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_of_type("user") } pub fn new_system() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_of_type("system")
random
[ { "content": "/// Password based key derivation function\n\n///\n\n/// Note currently only PBKDF2 is supported by this interface.\n\n/// For PBKDF2, iterations >= 100000 is recommended.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let salt = rng.read(10).unwrap();\n\n/// let key = botan::pbkdf(\"PBKDF2(SHA-256)\", 32, \"passphrase\", &salt, 10000).unwrap();\n\n/// assert_eq!(key.len(), 32);\n\n/// ```\n\npub fn pbkdf(algo: &str,\n\n out_len: usize,\n\n passphrase: &str,\n\n salt: &[u8],\n\n iterations: usize) -> Result<Vec<u8>> {\n\n\n\n derive_key_from_password(algo, out_len, passphrase, salt, iterations, 0, 0)\n\n}\n\n\n", "file_path": "botan/src/pbkdf.rs", "rank": 0, "score": 119662.14623621076 }, { "content": "/// Verify a bcrypt password hash\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let bcrypt = botan::bcrypt_hash(\"password\", &rng, 10).unwrap();\n\n/// assert_eq!(botan::bcrypt_verify(\"not even close\", &bcrypt), Ok(false));\n\n/// assert_eq!(botan::bcrypt_verify(\"password\", &bcrypt), Ok(true));\n\n/// ```\n\npub fn bcrypt_verify(pass: &str, hash: &str) -> Result<bool> {\n\n\n\n let rc = unsafe {\n\n botan_bcrypt_is_valid(make_cstr(pass)?.as_ptr(),\n\n make_cstr(hash)?.as_ptr())\n\n };\n\n\n\n if rc == 0 {\n\n Ok(true)\n\n }\n\n else if rc == BOTAN_FFI_INVALID_VERIFIER {\n\n Ok(false)\n\n }\n\n else {\n\n Err(Error::from(rc))\n\n }\n\n}\n", "file_path": "botan/src/bcrypt.rs", "rank": 1, "score": 113026.85051725223 }, { "content": "/// Hex decode some data\n\npub fn hex_decode(x: &str) -> Result<Vec<u8>> {\n\n\n\n let mut output = vec![0u8; x.len()/2];\n\n let mut output_len = output.len();\n\n\n\n let input = make_cstr(x)?;\n\n\n\n call_botan! { botan_hex_decode(input.as_ptr(), x.len(), output.as_mut_ptr(), &mut output_len) }\n\n\n\n output.resize(output_len, 0);\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 2, "score": 103713.96227000149 }, { "content": "/// Base64 decode some data\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// assert!(botan::base64_decode(\"ThisIsInvalid!\").is_err());\n\n/// assert_eq!(botan::base64_decode(\"YWJjZGVm\").unwrap(), b\"abcdef\");\n\n/// ```\n\npub fn base64_decode(x: &str) -> Result<Vec<u8>> {\n\n\n\n // Hard to provide a decent lower bound as it is possible x includes\n\n // lots of spaces or trailing = padding chars\n\n let bin_len = x.len();\n\n\n\n let input = make_cstr(x)?;\n\n\n\n call_botan_ffi_returning_vec_u8(bin_len, &|out_buf, out_len| {\n\n unsafe { botan_base64_decode(input.as_ptr(), x.len(), out_buf, out_len) }\n\n })\n\n}\n", "file_path": "botan/src/memutils.rs", "rank": 3, "score": 103713.96227000149 }, { "content": "/// Return the identifier used for PKCS1 v1.5 signatures for the specified hash\n\npub fn pkcs_hash_id(hash_algo: &str) -> Result<Vec<u8>> {\n\n let hash_algo = make_cstr(hash_algo)?;\n\n let id_len = 32; // largest currently is 20 bytes\n\n call_botan_ffi_returning_vec_u8(id_len, &|out_buf, out_len| {\n\n unsafe { botan_pkcs_hash_id(hash_algo.as_ptr(), out_buf, out_len) }\n\n })\n\n\n\n}\n", "file_path": "botan/src/pubkey.rs", "rank": 4, "score": 96687.58619385371 }, { "content": "/// Produce a bcrypt password hash\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let bcrypt1 = botan::bcrypt_hash(\"password\", &rng, 10).unwrap();\n\n/// let bcrypt2 = botan::bcrypt_hash(\"password\", &rng, 10).unwrap();\n\n/// assert_ne!(bcrypt1, bcrypt2); // different salt each time\n\n/// ```\n\npub fn bcrypt_hash(pass: &str, rng : &RandomNumberGenerator, workfactor: usize) -> Result<String> {\n\n\n\n let mut out = vec![0; BCRYPT_SIZE + 1];\n\n let mut out_len = out.len();\n\n\n\n call_botan! {\n\n botan_bcrypt_generate(out.as_mut_ptr(), &mut out_len,\n\n make_cstr(pass)?.as_ptr(),\n\n rng.handle(),\n\n workfactor, 0u32)\n\n };\n\n\n\n out.resize(out_len - 1, 0);\n\n Ok(String::from_utf8(out).map_err(|_| Error::ConversionError)?)\n\n}\n\n\n", "file_path": "botan/src/bcrypt.rs", "rank": 5, "score": 86012.52453112195 }, { "content": "/// Password based key derivation function\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let salt = rng.read(10).unwrap();\n\n/// let key = botan::derive_key_from_password(\"Scrypt\", 32, \"passphrase\", &salt, 8192, 8, 1).unwrap();\n\n/// assert_eq!(key.len(), 32);\n\n/// ```\n\npub fn derive_key_from_password(\n\n algo: &str,\n\n out_len: usize,\n\n passphrase: &str,\n\n salt: &[u8],\n\n param1: usize,\n\n param2: usize,\n\n param3: usize) -> Result<Vec<u8>> {\n\n\n\n let algo = make_cstr(algo)?;\n\n let passphrase = make_cstr(passphrase)?;\n\n\n\n let mut output = vec![0u8; out_len];\n\n\n\n call_botan! {\n\n botan_pwdhash(algo.as_ptr(),\n\n param1,\n\n param2,\n\n param3,\n\n output.as_mut_ptr(),\n\n output.len(),\n\n passphrase.as_ptr(),\n\n 0,\n\n salt.as_ptr(),\n\n salt.len())\n\n }\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "botan/src/pbkdf.rs", "rank": 6, "score": 78386.4324572182 }, { "content": "/// Key derivation function\n\n///\n\n/// Produces a KDF output of the specified size when run over the\n\n/// provided secret, salt, and label inputs\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let salt = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];\n\n/// let label = vec![0x42, 0x6F, 0x62];\n\n/// let secret = vec![0x4E, 0x6F, 0x74, 0x20, 0x54, 0x65, 0x6C, 0x6C, 0x69, 0x6E, 0x67];\n\n/// let v = botan::kdf(\"HKDF(SHA-256)\", 23, &secret, &salt, &label).unwrap();\n\n/// assert_eq!(v.len(), 23);\n\n/// ```\n\npub fn kdf(algo: &str, output_len: usize, secret: &[u8], salt: &[u8], label: &[u8]) -> Result<Vec<u8>> {\n\n\n\n let mut output = vec![0u8; output_len];\n\n\n\n let algo = make_cstr(algo)?;\n\n\n\n call_botan! { botan_kdf(algo.as_ptr(),\n\n output.as_mut_ptr(), output_len,\n\n secret.as_ptr(), secret.len(),\n\n salt.as_ptr(), salt.len(),\n\n label.as_ptr(), label.len()) };\n\n\n\n Ok(output)\n\n\n\n}\n", "file_path": "botan/src/kdf.rs", "rank": 7, "score": 75848.69562969457 }, { "content": "/// Scrypt key derivation\n\n///\n\n/// The n, r, p parameters control how much time and memory is used.\n\n/// As of 2018, n = 32768, r = 8, p = 1 seems sufficient.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let salt = rng.read(10).unwrap();\n\n/// let n = 32768;\n\n/// let r = 8;\n\n/// let p = 1;\n\n/// let key = botan::scrypt(32, \"passphrase\", &salt, n, r, p).unwrap();\n\n/// assert_eq!(key.len(), 32);\n\n/// ```\n\npub fn scrypt(out_len: usize,\n\n passphrase: &str,\n\n salt: &[u8],\n\n n: usize,\n\n r: usize,\n\n p: usize) -> Result<Vec<u8>> {\n\n\n\n derive_key_from_password(\"Scrypt\", out_len, passphrase, salt, n, r, p)\n\n}\n", "file_path": "botan/src/pbkdf.rs", "rank": 8, "score": 74511.43934105856 }, { "content": "/// Hex encode some data\n\npub fn hex_encode(x: &[u8]) -> Result<String> {\n\n let flags = 0u32;\n\n\n\n let mut output = vec![0u8; x.len() * 2];\n\n call_botan! { botan_hex_encode(x.as_ptr(), x.len(), output.as_mut_ptr() as *mut c_char, flags) };\n\n\n\n let cstr = CString::new(output).map_err(|_| Error::ConversionError)?;\n\n let ostr = cstr.into_string().map_err(|_| Error::ConversionError)?;\n\n Ok(ostr)\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 9, "score": 67781.80940364877 }, { "content": "/// Base64 encode some data\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// assert_eq!(botan::base64_encode(&[97,98,99,100,101,102]).unwrap(), \"YWJjZGVm\");\n\n/// assert_eq!(botan::base64_encode(&[0x5A, 0x16, 0xAD, 0x4E, 0x17, 0x87, 0x79, 0xC9]).unwrap(), \"WhatTheHeck=\");\n\n/// ```\n\npub fn base64_encode(x: &[u8]) -> Result<String> {\n\n\n\n let b64_len = 1 + ((x.len() + 2) / 3) * 4;\n\n\n\n call_botan_ffi_returning_string(b64_len, &|out_buf, out_len| {\n\n unsafe { botan_base64_encode(x.as_ptr(), x.len(), out_buf as *mut c_char, out_len) }\n\n })\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 10, "score": 67781.80940364877 }, { "content": "/// Securely zeroize memory\n\n///\n\n/// Write zeros to the array (eg to clear out a key) in a way that is\n\n/// unlikely to be removed by the compiler.\n\npub fn scrub_mem<T: Copy>(a: &mut [T]) {\n\n let bytes = mem::size_of::<T>() * a.len();\n\n unsafe { botan_scrub_mem(a.as_mut_ptr() as *mut c_void, bytes) };\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 11, "score": 64901.5854079992 }, { "content": "/// Const time comparison\n\n///\n\n/// Compare two arrays without leaking side channel information\n\npub fn const_time_compare<T: Copy>(a: &[T], b: &[T]) -> bool {\n\n if a.len() != b.len() {\n\n return false;\n\n }\n\n\n\n let bytes = mem::size_of::<T>() * a.len();\n\n let rc = unsafe { botan_constant_time_compare(a.as_ptr() as *const u8, b.as_ptr() as *const u8, bytes) };\n\n\n\n return rc == 0;\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 12, "score": 59877.414836382886 }, { "content": "/// Wrap a key using NIST's AES key wrap algorithm.\n\n///\n\n/// The kek (key-encryption-key) must be a valid length for an AES\n\n/// key. The wrapped key must be a multiple of 8 bytes.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// // Wrap a 128-bit key with a 256-bit key:\n\n/// let key = vec![0; 16];\n\n/// let kek = vec![0; 32];\n\n/// let wrapped = botan::nist_key_wrap(&kek, &key).unwrap();\n\n/// ```\n\npub fn nist_key_wrap(kek: &[u8], key: &[u8]) -> Result<Vec<u8>> {\n\n\n\n if kek.len() != 16 && kek.len() != 24 && kek.len() != 32 {\n\n return Err(Error::InvalidKeyLength);\n\n }\n\n\n\n if key.len() % 8 != 0 {\n\n return Err(Error::InvalidInput);\n\n }\n\n\n\n let mut output = vec![0; key.len() + 8];\n\n let mut output_len = output.len();\n\n\n\n call_botan! {\n\n botan_key_wrap3394(key.as_ptr(), key.len(),\n\n kek.as_ptr(), kek.len(),\n\n output.as_mut_ptr(), &mut output_len)\n\n }\n\n\n\n output.resize(output_len, 0);\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "botan/src/keywrap.rs", "rank": 13, "score": 57063.719095035915 }, { "content": "/// Unwrap a key encrypted using NIST's AES key wrap algorithm\n\n/// # Examples\n\n///\n\n/// ```\n\n/// // Wrap a 128-bit key with a 256-bit key:\n\n/// let key = vec![0; 16];\n\n/// let kek = vec![0; 32];\n\n/// let wrapped = botan::nist_key_wrap(&kek, &key).unwrap();\n\n/// let unwrapped = botan::nist_key_unwrap(&kek, &wrapped).unwrap();\n\n/// assert_eq!(unwrapped, key);\n\n/// ```\n\npub fn nist_key_unwrap(kek: &[u8], wrapped: &[u8]) -> Result<Vec<u8>> {\n\n\n\n if kek.len() != 16 && kek.len() != 24 && kek.len() != 32 {\n\n return Err(Error::InvalidKeyLength);\n\n }\n\n\n\n if wrapped.len() % 8 != 0 {\n\n return Err(Error::InvalidInput);\n\n }\n\n\n\n let mut output = vec![0; wrapped.len() - 8];\n\n let mut output_len = output.len();\n\n\n\n call_botan! {\n\n botan_key_unwrap3394(wrapped.as_ptr(), wrapped.len(),\n\n kek.as_ptr(), kek.len(),\n\n output.as_mut_ptr(), &mut output_len)\n\n }\n\n\n\n output.resize(output_len, 0);\n\n\n\n Ok(output)\n\n\n\n}\n", "file_path": "botan/src/keywrap.rs", "rank": 14, "score": 57063.719095035915 }, { "content": "fn main() {\n\n println!(\"cargo:rustc-link-lib=botan-2\");\n\n}\n", "file_path": "botan-sys/build.rs", "rank": 15, "score": 35006.185807520356 }, { "content": "#[test]\n\nfn test_totp() {\n\n let totp = botan::TOTP::new(b\"1234567890123456789012345678901234567890123456789012345678901234\",\n\n \"SHA-512\", 8, 30).unwrap();\n\n\n\n assert_eq!(totp.generate(59).unwrap(), 90693936);\n\n assert_eq!(totp.generate(1111111109).unwrap(), 25091201);\n\n assert_eq!(totp.generate(1111111111).unwrap(), 99943326);\n\n\n\n assert!(totp.check(90693936, 59, 0).unwrap());\n\n assert!(!totp.check(90693936, 60, 0).unwrap());\n\n assert!(totp.check(90693936, 59+30, 1).unwrap());\n\n assert!(!totp.check(90693936, 59+31, 1).unwrap());\n\n}\n", "file_path": "botan/tests/tests.rs", "rank": 16, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_rng() {\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n\n\n let read1 = rng.read(10).unwrap();\n\n let read2 = rng.read(10).unwrap();\n\n\n\n assert!(read1 != read2);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 17, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_kdf() {\n\n\n\n let salt = botan::hex_decode(\"000102030405060708090A0B0C\").unwrap();\n\n let label = botan::hex_decode(\"F0F1F2F3F4F5F6F7F8F9\").unwrap();\n\n let secret = botan::hex_decode(\"0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B\").unwrap();\n\n let expected_output = botan::hex_decode(\"3CB25F25FAACD57A90434F64D0362F2A2D2D0A90CF1A5A4C5DB02D56ECC4C5BF34007208D5B887185865\").unwrap();\n\n\n\n let output = botan::kdf(\"HKDF(SHA-256)\", expected_output.len(), &secret, &salt, &label).unwrap();\n\n\n\n assert_eq!(output, expected_output);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 18, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_rsa() {\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n\n\n let padding = \"EMSA-PKCS1-v1_5(SHA-256)\";\n\n let msg = rng.read(32).unwrap();\n\n\n\n let privkey = botan::Privkey::create(\"RSA\", \"1024\", &rng).unwrap();\n\n let pubkey = privkey.pubkey().unwrap();\n\n\n\n assert_eq!(privkey.get_field(\"e\"), botan::MPI::from_str(\"65537\"));\n\n assert_eq!(privkey.get_field(\"n\").unwrap().bit_count().unwrap(), 1024);\n\n\n\n assert_eq!(pubkey.get_field(\"n\"), privkey.get_field(\"n\"));\n\n\n\n let p = privkey.get_field(\"p\").unwrap();\n\n let q = privkey.get_field(\"q\").unwrap();\n\n\n\n assert_eq!(&p * &q, privkey.get_field(\"n\").unwrap());\n\n\n\n let signer = botan::Signer::new(&privkey, padding).unwrap();\n", "file_path": "botan/tests/tests.rs", "rank": 19, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_bcrypt() {\n\n let pass = \"password\";\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n\n\n let bcrypt1 = botan::bcrypt_hash(pass, &rng, 10).unwrap();\n\n\n\n assert_eq!(bcrypt1.len(), 60);\n\n\n\n let bcrypt2 = botan::bcrypt_hash(pass, &rng, 10).unwrap();\n\n\n\n assert_eq!(bcrypt2.len(), 60);\n\n\n\n assert!(bcrypt1 != bcrypt2);\n\n\n\n assert!(botan::bcrypt_verify(pass, &bcrypt1).unwrap());\n\n assert!(botan::bcrypt_verify(pass, &bcrypt2).unwrap());\n\n\n\n assert_eq!(botan::bcrypt_verify(\"passwurd\", &bcrypt2).unwrap(), false);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 20, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_ed25519() {\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n\n\n let msg = vec![23,42,69,6,66];\n\n let padding = \"Pure\";\n\n\n\n let ed_priv = botan::Privkey::create(\"Ed25519\", \"\", &rng).unwrap();\n\n\n\n let signer = botan::Signer::new(&ed_priv, padding).unwrap();\n\n signer.update(&msg).unwrap();\n\n let signature1 = signer.finish(&rng).unwrap();\n\n\n\n let ed_bits = ed_priv.get_ed25519_key().unwrap();\n\n\n\n let ed_loaded = botan::Privkey::load_ed25519(&ed_bits.1).unwrap();\n\n let signer = botan::Signer::new(&ed_loaded, padding).unwrap();\n\n signer.update(&msg).unwrap();\n\n let signature2 = signer.finish(&rng).unwrap();\n\n\n\n let ed_pub = ed_priv.pubkey().unwrap();\n", "file_path": "botan/tests/tests.rs", "rank": 21, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_x25519() {\n\n\n\n // Test from RFC 8037\n\n let a_pub_bits = botan::hex_decode(\"de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f\").unwrap();\n\n let b_priv_bits = botan::hex_decode(\"77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a\").unwrap();\n\n let b_pub_bits = botan::hex_decode(\"8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a\").unwrap();\n\n let expected_shared = botan::hex_decode(\"4a5d9d5ba4ce2de1728e3bf480350f25e07e21c947d19e3376f09b3c1e161742\").unwrap();\n\n\n\n let a_pub = botan::Pubkey::load_x25519(&a_pub_bits).unwrap();\n\n assert_eq!(a_pub.get_x25519_key().unwrap(), a_pub_bits);\n\n\n\n let b_priv = botan::Privkey::load_x25519(&b_priv_bits).unwrap();\n\n assert_eq!(b_priv.get_x25519_key().unwrap(), b_priv_bits);\n\n\n\n assert_eq!(b_priv.key_agreement_key().unwrap(), b_pub_bits);\n\n assert_eq!(b_priv.pubkey().unwrap().get_x25519_key().unwrap(), b_pub_bits);\n\n\n\n let ka = botan::KeyAgreement::new(&b_priv, \"Raw\").unwrap();\n\n\n\n let shared = ka.agree(0, &a_pub_bits, &[]).unwrap();\n\n\n\n assert_eq!(shared, expected_shared);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 22, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_pbkdf() {\n\n\n\n let salt = botan::hex_decode(\"0001020304050607\").unwrap();\n\n let iterations = 10000;\n\n let passphrase = \"xyz\";\n\n let expected_output = botan::hex_decode(\"DEFD2987FA26A4672F4D16D98398432AD95E896BF619F6A6B8D4ED\").unwrap();\n\n\n\n let output = botan::pbkdf(\"PBKDF2(SHA-256)\", expected_output.len(), passphrase, &salt, iterations).unwrap();\n\n\n\n assert_eq!(output, expected_output);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 23, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_hex() {\n\n let raw = vec![1,2,3,255,42,23];\n\n assert_eq!(botan::hex_encode(&raw).unwrap(), \"010203FF2A17\");\n\n assert_eq!(botan::hex_decode(\"010203FF2A17\").unwrap(), raw);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 24, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_version() {\n\n let version = botan::Version::new().unwrap();\n\n\n\n /*\n\n If we are running against a released version we know it must be at\n\n least 2.8 since we require APIs added after the 2.7 release.\n\n */\n\n\n\n assert_eq!(version.major, 2);\n\n assert!(version.minor >= 8);\n\n assert!(version.release_date == 0 || version.release_date >= 20181001);\n\n assert!(version.ffi_api >= 20180713);\n\n\n\n assert!(botan::Version::supports_version(version.ffi_api));\n\n assert!(botan::Version::supports_version(20180713));\n\n assert!(!botan::Version::supports_version(20180712));\n\n\n\n //println!(\"{:?}\", version);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 25, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_certs() {\n\n let cert_bits = botan::hex_decode(\"3082035A30820305A003020102020101300C06082A8648CE3D04030105003050310B3009060355040613024445310D300B060355040A0C0462756E64310C300A060355040B0C03627369310D300B06035504051304343536373115301306035504030C0C637363612D6765726D616E79301E170D3037303731393135323731385A170D3238303131393135313830305A3050310B3009060355040613024445310D300B060355040A0C0462756E64310C300A060355040B0C03627369310D300B06035504051304343536373115301306035504030C0C637363612D6765726D616E79308201133081D406072A8648CE3D02013081C8020101302806072A8648CE3D0101021D00D7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF303C041C68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43041C2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B0439040D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD021D00D7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F020101033A000401364A4B0F0102E9502AB9DC6855D90B065A6F5E5E48395F8309D57C11ABAFF21756607EF6757EC9886CA222D83CA04B1A99FA43C5A9BCE1A38201103082010C30360603551D11042F302D8118637363612D6765726D616E79406273692E62756E642E646586116661783A2B343932323839353832373232300E0603551D0F0101FF040403020106301D0603551D0E041604140096452DE588F966C4CCDF161DD1F3F5341B71E7301F0603551D230418301680140096452DE588F966C4CCDF161DD1F3F5341B71E730410603551D20043A30383036060904007F0007030101013029302706082B06010505070201161B687474703A2F2F7777772E6273692E62756E642E64652F6373636130120603551D130101FF040830060101FF020100302B0603551D1004243022800F32303037303731393135323731385A810F32303237313131393135313830305A300C06082A8648CE3D0403010500034100303E021D00C6B41E830217FD4C93B59E9E2B13734E09C182FA63FAEE4115A8EDD5021D00D27938DA01B8951A9064A1B696AEDF181B74968829C138F0EB2F623B\").unwrap();\n\n\n\n let cert = botan::Certificate::load(&cert_bits).unwrap();\n\n\n\n let key_id = botan::hex_decode(\"0096452DE588F966C4CCDF161DD1F3F5341B71E7\").unwrap();\n\n assert_eq!(cert.serial_number().unwrap(), vec![1]);\n\n assert_eq!(cert.authority_key_id().unwrap(), key_id);\n\n assert_eq!(cert.subject_key_id().unwrap(), key_id);\n\n\n\n assert_eq!(cert.allows_usage(botan::CertUsage::CertificateSign).unwrap(), true);\n\n assert_eq!(cert.allows_usage(botan::CertUsage::CrlSign).unwrap(), true);\n\n assert_eq!(cert.allows_usage(botan::CertUsage::KeyEncipherment).unwrap(), false);\n\n\n\n let pubkey = cert.public_key().unwrap();\n\n\n\n assert_eq!(pubkey.algo_name().unwrap(), \"ECDSA\");\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 26, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_scrypt() {\n\n\n\n let salt = botan::hex_decode(\"4E61436C\").unwrap();\n\n let n = 1024;\n\n let r = 8;\n\n let p = 16;\n\n let passphrase = \"password\";\n\n let expected_output = botan::hex_decode(\"fdbabe1c9d3472007856e7190d01e9fe7c6ad7cbc8237830e77376634b3731622e\").unwrap();\n\n\n\n let output = botan::scrypt(expected_output.len(), passphrase, &salt, n, r, p).unwrap();\n\n\n\n assert_eq!(output, expected_output);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 27, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_hash() {\n\n let hash = botan::HashFunction::new(\"SHA-384\").unwrap();\n\n\n\n assert_eq!(hash.output_length().unwrap(), 48);\n\n assert_eq!(hash.block_size().unwrap(), 128);\n\n assert_eq!(hash.algo_name().unwrap(), \"SHA-384\");\n\n\n\n assert!(hash.update(&[97,98]).is_ok());\n\n\n\n let hash_dup = hash.duplicate().unwrap();\n\n\n\n assert!(hash.update(&[99]).is_ok());\n\n assert!(hash_dup.update(&[100]).is_ok());\n\n\n\n hash.clear().unwrap();\n\n\n\n hash.update(&[97,98,99]).unwrap();\n\n\n\n let digest = hash.finish().unwrap();\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 28, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_hotp() {\n\n let hotp = botan::HOTP::new(&[0xFF], \"SHA-1\", 6).unwrap();\n\n assert_eq!(hotp.generate(23).unwrap(), 330795);\n\n\n\n assert!(hotp.check(330795, 23).unwrap());\n\n assert!(!hotp.check(330795, 22).unwrap());\n\n assert!(!hotp.check(330796, 23).unwrap());\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 29, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_mac() {\n\n let mac = botan::MsgAuthCode::new(\"HMAC(SHA-384)\").unwrap();\n\n\n\n let key_spec = mac.key_spec().unwrap();\n\n assert_eq!(mac.output_length().unwrap(), 48);\n\n assert_eq!(mac.algo_name().unwrap(), \"HMAC(SHA-384)\");\n\n\n\n assert!(key_spec.is_valid_keylength(20));\n\n\n\n mac.set_key(&vec![0xAA; 20]).unwrap();\n\n\n\n mac.update(&vec![0xDD; 1]).unwrap();\n\n mac.update(&vec![0xDD; 29]).unwrap();\n\n mac.update(&vec![0xDD; 20]).unwrap();\n\n\n\n let r = mac.finish().unwrap();\n\n\n\n assert_eq!(botan::hex_encode(&r).unwrap(),\n\n \"88062608D3E6AD8A0AA2ACE014C8A86F0AA635D947AC9FEBE83EF4E55966144B2A5AB39DC13814B94E3AB6E101A34F27\");\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 30, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_mp() {\n\n let mut a = botan::MPI::new().unwrap();\n\n let mut b = botan::MPI::new().unwrap();\n\n\n\n assert_eq!(a.to_u32().unwrap(), 0);\n\n assert_eq!(b.to_u32().unwrap(), 0);\n\n\n\n a.set_i32(9).unwrap();\n\n b.set_i32(81).unwrap();\n\n\n\n assert_eq!(a.to_u32().unwrap(), 9);\n\n assert_eq!(b.to_u32().unwrap(), 81);\n\n\n\n let mut c = &a + &b;\n\n assert_eq!(c.to_u32().unwrap(), 90);\n\n\n\n let d = botan::MPI::from_str(\"0x5A\").unwrap();\n\n assert_eq!(c, d);\n\n\n\n c *= &botan::MPI::from_str(\"1030\").unwrap();\n", "file_path": "botan/tests/tests.rs", "rank": 31, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_pubkey() {\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n\n\n let ecdsa_key = botan::Privkey::create(\"ECDSA\", \"secp256r1\", &rng).unwrap();\n\n\n\n assert!(ecdsa_key.check_key(&rng).unwrap(), true);\n\n assert_eq!(ecdsa_key.algo_name().unwrap(), \"ECDSA\");\n\n\n\n assert!(ecdsa_key.get_field(\"n\").is_err());\n\n assert_eq!(ecdsa_key.get_field(\"order\"),\n\n botan::MPI::from_str(\"0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551\"));\n\n\n\n let pub_key = ecdsa_key.pubkey().unwrap();\n\n\n\n assert_eq!(pub_key.algo_name().unwrap(), \"ECDSA\");\n\n\n\n let bits = ecdsa_key.der_encode().unwrap();\n\n let pem = ecdsa_key.pem_encode().unwrap();\n\n assert!(pem.starts_with(\"-----BEGIN PRIVATE KEY-----\\n\"));\n\n assert!(pem.ends_with(\"-----END PRIVATE KEY-----\\n\"));\n", "file_path": "botan/tests/tests.rs", "rank": 32, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_fpe() {\n\n let modulus = botan::MPI::from_str(\"1000000000\").unwrap();\n\n let input = botan::MPI::from_str(\"939210311\").unwrap();\n\n\n\n let key = vec![0; 32];\n\n let tweak = vec![0; 8];\n\n\n\n let fpe = botan::FPE::new_fe1(&modulus, &key, 8, false).unwrap();\n\n\n\n let ctext = fpe.encrypt(&input, &tweak).unwrap();\n\n\n\n assert_ne!(ctext, input);\n\n\n\n let ptext = fpe.decrypt(&ctext, &tweak).unwrap();\n\n\n\n assert_eq!(ptext, input);\n\n}\n\n\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 33, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_cipher() {\n\n let cipher = botan::Cipher::new(\"AES-128/GCM\", botan::CipherDirection::Encrypt).unwrap();\n\n\n\n assert_eq!(cipher.tag_length(), 16);\n\n\n\n let zero16 = vec![0; 16];\n\n let zero12 = vec![0; 12];\n\n\n\n assert!(cipher.set_associated_data(&[1,2,3]).is_err()); // trying to set AD before key is set\n\n assert_eq!(cipher.set_key(&vec![0; 42]).unwrap_err(), botan::Error::InvalidKeyLength);\n\n\n\n cipher.set_key(&zero16).unwrap();\n\n\n\n cipher.set_associated_data(&[1,2,3]).unwrap();\n\n cipher.set_associated_data(&[]).unwrap();\n\n\n\n let ctext = cipher.process(&zero12, &zero16).unwrap();\n\n\n\n assert_eq!(botan::hex_encode(&ctext).unwrap(),\n\n \"0388DACE60B6A392F328C2B971B2FE78AB6E47D42CEC13BDF53A67B21257BDDF\");\n\n\n\n let cipher = botan::Cipher::new(\"AES-128/GCM\", botan::CipherDirection::Decrypt).unwrap();\n\n cipher.set_key(&zero16).unwrap();\n\n\n\n let ptext = cipher.process(&zero12, &ctext).unwrap();\n\n\n\n assert_eq!(ptext, zero16);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 34, "score": 33714.89278380704 }, { "content": "#[test]\n\nfn test_chacha() {\n\n let cipher = botan::Cipher::new(\"ChaCha20\", botan::CipherDirection::Encrypt).unwrap();\n\n\n\n assert_eq!(cipher.tag_length(), 0);\n\n\n\n let key_spec = cipher.key_spec().unwrap();\n\n\n\n assert!(key_spec.is_valid_keylength(0) == false);\n\n assert!(key_spec.is_valid_keylength(16));\n\n assert!(key_spec.is_valid_keylength(32));\n\n assert!(key_spec.is_valid_keylength(48) == false);\n\n\n\n let key = vec![0; 32];\n\n\n\n let expected = botan::hex_decode(\"76B8E0ADA0F13D90405D6AE55386BD28BDD219B8A08DED1AA836EFCC8B770DC7DA41597C5157488D7724E03FB8D84A376A43B8F41518A11CC387B669\").unwrap();\n\n\n\n cipher.set_key(&key).unwrap();\n\n\n\n assert!(cipher.set_associated_data(&[1,2,3]).is_err()); // not an AEAD\n\n assert!(cipher.set_associated_data(&[]).is_err());\n\n\n\n let iv = vec![];\n\n let input = vec![0; expected.len()];\n\n\n\n let ctext = cipher.process(&iv, &input).unwrap();\n\n\n\n assert_eq!(ctext, expected);\n\n}\n\n\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 35, "score": 33714.89278380704 }, { "content": "use botan_sys::*;\n\n\n\npub(crate) use std::os::raw::{c_char, c_int, c_void};\n\npub(crate) use std::ffi::{CStr, CString};\n\npub(crate) use std::ptr;\n\npub(crate) use std::mem;\n\n\n\n/// The result of calling an operation on the library\n\npub type Result<T> = ::std::result::Result<T, Error>;\n\n\n\npub(crate) fn make_cstr(input: &str) -> Result<CString> {\n\n let cstr = CString::new(input).map_err(|_| Error::ConversionError)?;\n\n Ok(cstr)\n\n}\n\n\n\npub(crate) fn call_botan_ffi_returning_vec_u8(\n\n initial_size: usize,\n\n cb: &Fn(*mut u8, *mut usize) -> c_int) -> Result<Vec<u8>> {\n\n\n\n let mut output = vec![0; initial_size];\n", "file_path": "botan/src/utils.rs", "rank": 36, "score": 33214.23737320821 }, { "content": " Ok(output)\n\n}\n\n\n\npub(crate) fn call_botan_ffi_returning_string(\n\n initial_size: usize,\n\n cb: &Fn(*mut u8, *mut usize) -> c_int) -> Result<String> {\n\n\n\n let v = call_botan_ffi_returning_vec_u8(initial_size, cb)?;\n\n\n\n let cstr = CStr::from_bytes_with_nul(&v).map_err(|_| Error::ConversionError)?;\n\n let ostr = cstr.to_str().map_err(|_| Error::ConversionError)?.to_owned();\n\n Ok(ostr)\n\n}\n\n\n\n#[derive(Clone,Debug,PartialEq)]\n\n/// Possible errors\n\npub enum Error {\n\n /// A provided authentication code was incorrect\n\n BadAuthCode,\n\n /// A bad flag was passed to the library\n", "file_path": "botan/src/utils.rs", "rank": 37, "score": 33210.01634877218 }, { "content": " BOTAN_FFI_ERROR_KEY_NOT_SET => Error::KeyNotSet,\n\n BOTAN_FFI_ERROR_NOT_IMPLEMENTED => Error::NotImplemented,\n\n BOTAN_FFI_ERROR_NULL_POINTER => Error::NullPointer,\n\n BOTAN_FFI_ERROR_OUT_OF_MEMORY => Error::OutOfMemory,\n\n BOTAN_FFI_ERROR_UNKNOWN_ERROR => Error::UnknownError,\n\n BOTAN_FFI_INVALID_VERIFIER => Error::InvalidVerifier,\n\n _ => Error::UnknownError,\n\n }\n\n }\n\n}\n\n\n\n/// Specifies valid keylengths for symmetric ciphers/MACs\n\npub struct KeySpec {\n\n min_keylen: usize,\n\n max_keylen: usize,\n\n mod_keylen: usize,\n\n}\n\n\n\nimpl KeySpec {\n\n\n", "file_path": "botan/src/utils.rs", "rank": 38, "score": 33206.61525242654 }, { "content": " self.min_keylen\n\n }\n\n\n\n /// Return the maximum supported keylength\n\n pub fn maximum_keylength(&self) -> usize {\n\n self.max_keylen\n\n }\n\n\n\n /// Return the required multiple of the keylength\n\n ///\n\n /// That is each key must be N*keylength_multiple() for some N\n\n pub fn keylength_multiple(&self) -> usize {\n\n self.mod_keylen\n\n }\n\n\n\n}\n", "file_path": "botan/src/utils.rs", "rank": 39, "score": 33206.3294617846 }, { "content": " pub(crate) fn new(min_keylen: usize, max_keylen: usize, mod_keylen: usize) -> Result<KeySpec> {\n\n if min_keylen > max_keylen {\n\n return Err(Error::ConversionError);\n\n }\n\n if mod_keylen == 0 {\n\n return Err(Error::ConversionError);\n\n }\n\n\n\n Ok(KeySpec { min_keylen, max_keylen, mod_keylen })\n\n }\n\n\n\n /// Return true if the specified key length is valid for this object\n\n pub fn is_valid_keylength(&self, keylen: usize) -> bool {\n\n (keylen >= self.min_keylen &&\n\n keylen <= self.max_keylen &&\n\n keylen % self.mod_keylen == 0)\n\n }\n\n\n\n /// Return the minimum supported keylength\n\n pub fn minimum_keylength(&self) -> usize {\n", "file_path": "botan/src/utils.rs", "rank": 40, "score": 33206.03679826356 }, { "content": " NullPointer,\n\n /// Memory exhaustion\n\n OutOfMemory,\n\n /// Some unknown error occurred\n\n UnknownError,\n\n /// An error occured while converting data to C\n\n ConversionError\n\n}\n\n\n\nimpl From<i32> for Error {\n\n fn from(err: i32) -> Error {\n\n match err {\n\n BOTAN_FFI_ERROR_BAD_FLAG => Error::BadFlag,\n\n BOTAN_FFI_ERROR_BAD_MAC => Error::BadAuthCode,\n\n BOTAN_FFI_ERROR_BAD_PARAMETER => Error::BadParameter,\n\n BOTAN_FFI_ERROR_EXCEPTION_THROWN => Error::ExceptionThrown,\n\n BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE => Error::InsufficientBufferSpace,\n\n BOTAN_FFI_ERROR_INVALID_INPUT => Error::InvalidInput,\n\n BOTAN_FFI_ERROR_INVALID_KEY_LENGTH => Error::InvalidKeyLength,\n\n BOTAN_FFI_ERROR_INVALID_OBJECT => Error::InvalidObject,\n", "file_path": "botan/src/utils.rs", "rank": 41, "score": 33204.760453057956 }, { "content": " let mut out_len = output.len();\n\n\n\n let rc = cb(output.as_mut_ptr(), &mut out_len);\n\n if rc == 0 {\n\n assert!(out_len <= output.len());\n\n output.resize(out_len, 0);\n\n return Ok(output);\n\n }\n\n else if rc != BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE {\n\n return Err(Error::from(rc));\n\n }\n\n\n\n output.resize(out_len, 0);\n\n let rc = cb(output.as_mut_ptr(), &mut out_len);\n\n\n\n if rc != 0 {\n\n return Err(Error::from(rc));\n\n }\n\n\n\n output.resize(out_len, 0);\n", "file_path": "botan/src/utils.rs", "rank": 42, "score": 33203.266603592965 }, { "content": " BadFlag,\n\n /// An invalid parameter was provided to the library\n\n BadParameter,\n\n /// An exception was thrown while processing this request\n\n ExceptionThrown,\n\n /// There was insufficient buffer space to write the output\n\n InsufficientBufferSpace,\n\n /// Something about the input was invalid\n\n InvalidInput,\n\n /// An invalid object was provided to the library\n\n InvalidObject,\n\n /// A verifier was incorrect\n\n InvalidVerifier,\n\n /// An key of invalid length was provided\n\n InvalidKeyLength,\n\n /// An object was invoked without the key being set\n\n KeyNotSet,\n\n /// Some functionality is not implemented in the current library version\n\n NotImplemented,\n\n /// A null pointer was incorrectly provided\n", "file_path": "botan/src/utils.rs", "rank": 43, "score": 33203.266603592965 }, { "content": "#[test]\n\nfn test_pubkey_encrypt() {\n\n let msg = vec![1,23,42];\n\n\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n\n\n let priv_key = botan::Privkey::create(\"RSA\", \"2048\", &rng).unwrap();\n\n assert!(priv_key.key_agreement_key().is_err());\n\n let pub_key = priv_key.pubkey().unwrap();\n\n\n\n let encryptor = botan::Encryptor::new(&pub_key, \"OAEP(SHA-256)\").unwrap();\n\n\n\n let ctext = encryptor.encrypt(&msg, &rng).unwrap();\n\n assert_eq!(ctext.len(), 2048/8);\n\n\n\n let decryptor = botan::Decryptor::new(&priv_key, \"OAEP(SHA-256)\").unwrap();\n\n\n\n let ptext = decryptor.decrypt(&ctext).unwrap();\n\n\n\n assert_eq!(ptext, msg);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 44, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_scrub_mem() {\n\n let mut v = vec![1,2,3];\n\n botan::scrub_mem(&mut v);\n\n assert_eq!(v, vec![0,0,0]);\n\n\n\n let mut a = [1u32, 2u32, 3u32, 2049903u32];\n\n botan::scrub_mem(&mut a);\n\n assert_eq!(a, [0,0,0,0]);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 45, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_ct_compare() {\n\n let a = vec![1,2,3];\n\n\n\n assert_eq!(botan::const_time_compare(&a, &[1,2,3]), true);\n\n assert_eq!(botan::const_time_compare(&a, &[1,2,3,4]), false);\n\n assert_eq!(botan::const_time_compare(&a, &[1,2,4]), false);\n\n assert_eq!(botan::const_time_compare(&a, &a), true);\n\n assert_eq!(botan::const_time_compare(&a, &vec![1,2,3]), true);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 46, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_rng() {\n\n unsafe {\n\n let mut rng = std::ptr::null_mut();\n\n botan_rng_init(&mut rng, std::ptr::null());\n\n\n\n let mut rng1 = vec![0u8; 16];\n\n let mut rng2 = vec![0u8; 16];\n\n assert_eq!(botan_rng_get(rng, rng1.as_mut_ptr(), rng1.len()), 0);\n\n assert_eq!(botan_rng_get(rng, rng2.as_mut_ptr(), rng2.len()), 0);\n\n\n\n assert!(rng1 != rng2);\n\n\n\n assert_eq!(botan_rng_destroy(rng), 0);\n\n }\n\n}\n\n\n", "file_path": "botan-sys/tests/tests.rs", "rank": 47, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_pubkey_encryption() {\n\n\n\n let padding = \"EMSA-PKCS1-v1_5(SHA-256)\";\n\n let msg = [1,2,3];\n\n\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n let key = botan::Privkey::create(\"RSA\", \"1024\", &rng).unwrap();\n\n\n\n let der = key.der_encode_encrypted(\"passphrase\", &rng).unwrap();\n\n let pem = key.pem_encode_encrypted(\"pemword\", &rng).unwrap();\n\n\n\n assert!(pem.starts_with(\"-----BEGIN ENCRYPTED PRIVATE KEY-----\\n\"));\n\n assert!(pem.ends_with(\"-----END ENCRYPTED PRIVATE KEY-----\\n\"));\n\n\n\n let signer = botan::Signer::new(&key, padding).unwrap();\n\n\n\n signer.update(&msg).unwrap();\n\n let sig1 = signer.finish(&rng).unwrap();\n\n\n\n //assert!(botan::Privkey::load_encrypted_der(&der, \"i forget\").is_err());\n", "file_path": "botan/tests/tests.rs", "rank": 48, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_hash() {\n\n\n\n unsafe {\n\n let mut hash = std::ptr::null_mut();\n\n assert_eq!(botan_hash_init(&mut hash, CString::new(\"SHA-384\").unwrap().as_ptr(), 0u32), 0);\n\n\n\n let input = vec![97,98,99];\n\n assert_eq!(botan_hash_update(hash, input.as_ptr(), input.len()), 0);\n\n assert_eq!(botan_hash_update(hash, input.as_ptr(), input.len()), 0);\n\n\n\n let mut output_len = 0;\n\n assert_eq!(botan_hash_output_length(hash, &mut output_len), 0);\n\n assert!(output_len == 48);\n\n\n\n let mut digest = vec![0u8; output_len];\n\n assert_eq!(botan_hash_final(hash, digest.as_mut_ptr()), 0);\n\n\n\n assert_eq!(digest[0], 0xCA);\n\n assert_eq!(digest[1], 0xF3);\n\n assert_eq!(digest[47], 0x8D);\n\n\n\n assert_eq!(botan_hash_destroy(hash), 0);\n\n }\n\n}\n\n\n", "file_path": "botan-sys/tests/tests.rs", "rank": 49, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_hex() {\n\n\n\n let bin = vec![0x42, 0x23, 0x45, 0x8F];\n\n let mut out = Vec::new();\n\n out.resize(bin.len()*2, 0);\n\n\n\n unsafe {\n\n assert_eq!(botan_hex_encode(bin.as_ptr(), bin.len(), out.as_mut_ptr(), 0), 0);\n\n }\n\n\n\n assert_eq!(out[0], '4' as i8);\n\n assert_eq!(out[1], '2' as i8);\n\n assert_eq!(out[2], '2' as i8);\n\n assert_eq!(out[3], '3' as i8);\n\n assert_eq!(out[4], '4' as i8);\n\n assert_eq!(out[5], '5' as i8);\n\n assert_eq!(out[6], '8' as i8);\n\n assert_eq!(out[7], 'F' as i8);\n\n\n\n let mut decoded = vec![0; 1024];\n", "file_path": "botan-sys/tests/tests.rs", "rank": 50, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_version() {\n\n\n\n unsafe {\n\n let api_version = botan_ffi_api_version();\n\n\n\n assert!(botan_ffi_supports_api(api_version) == 0);\n\n assert!(botan_ffi_supports_api(api_version + 1) != 0);\n\n\n\n assert!(botan_version_major() == 2);\n\n assert!(botan_version_minor() > 0);\n\n assert!(botan_version_patch() <= 10);\n\n }\n\n}\n\n\n", "file_path": "botan-sys/tests/tests.rs", "rank": 51, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_cert_verify() {\n\n let ca = b\"-----BEGIN CERTIFICATE-----\n\nMIIBkDCCATegAwIBAgIRANQudMcHu/SmX8470nbNlj0wCgYIKoZIzj0EAwIwEjEQ\n\nMA4GA1UEAxMHVGVzdCBDQTAeFw0xODA4MTYyMjMyNDFaFw00NjAxMDEyMjMyNDFa\n\nMBIxEDAOBgNVBAMTB1Rlc3QgQ0EwWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAASN\n\n+LHr9ZN72sxZqi4zcYDIg4xzN3DOF3epvlpGHLnju5ogp8dJ46YydTi3g/SfBGOp\n\nj9jrYP5Jgkkmpo0lMh7ho24wbDAhBgNVHQ4EGgQYLg/lfneWJ36rZdGMoVyKD6Zl\n\nmHkST7ZNMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEBMCMGA1Ud\n\nIwQcMBqAGC4P5X53lid+q2XRjKFcig+mZZh5Ek+2TTAKBggqhkjOPQQDAgNHADBE\n\nAiB30ZIFV1cZbknu5lt1fWrM9tNSgCbj5BN9CI+Q9aq1LQIgD9o/8oGmFgvWLjsx\n\nb39VOu00+Vy9kpNO1Sgx7wSWoIU=\n\n-----END CERTIFICATE-----\";\n\n\n\n let ee = b\"-----BEGIN CERTIFICATE-----\n\nMIIBoDCCAUagAwIBAgIRAK27a2NlSYEH63xIsAbBA1wwCgYIKoZIzj0EAwIwEjEQ\n\nMA4GA1UEAxMHVGVzdCBDQTAeFw0xODA4MTYyMjMzNDBaFw00NjAxMDEyMjMzNDBa\n\nMBoxGDAWBgNVBAMTD1Rlc3QgRW5kIEVudGl0eTBZMBMGByqGSM49AgEGCCqGSM49\n\nAwEHA0IABDykQMvlV7GyIJeANLWEs5bXReqpvTEFu3zYPBjOhyx784VPVl84h8c5\n\nycru3Hk8N/SIITSWzpbjPMp9jRbyDy+jdTBzMCEGA1UdDgQaBBjkPzL+BXHtQJDR\n\nciwvzeHQKuQZOstyM2swGwYDVR0RBBQwEoIQdGVzdC5leGFtcGxlLmNvbTAMBgNV\n", "file_path": "botan/tests/tests.rs", "rank": 52, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_pubkey_sign() {\n\n let msg = vec![1,23,42];\n\n\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n\n\n let ecdsa_key = botan::Privkey::create(\"ECDSA\", \"secp256r1\", &rng).unwrap();\n\n assert!(ecdsa_key.key_agreement_key().is_err());\n\n\n\n let signer = botan::Signer::new(&ecdsa_key, \"EMSA1(SHA-256)\").unwrap();\n\n\n\n signer.update(&msg).unwrap();\n\n let signature = signer.finish(&rng).unwrap();\n\n\n\n let pub_key = ecdsa_key.pubkey().unwrap();\n\n\n\n let verifier = botan::Verifier::new(&pub_key, \"EMSA1(SHA-256)\").unwrap();\n\n\n\n verifier.update(&[1]).unwrap();\n\n verifier.update(&[23, 42]).unwrap();\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 53, "score": 32527.396047672817 }, { "content": "#[test]\n\nfn test_block_cipher() {\n\n let bc = botan::BlockCipher::new(\"AES-128\").unwrap();\n\n\n\n assert_eq!(bc.algo_name().unwrap(), \"AES-128\");\n\n assert_eq!(bc.block_size().unwrap(), 16);\n\n\n\n let key_spec = bc.key_spec().unwrap();\n\n\n\n assert!(key_spec.is_valid_keylength(20) == false);\n\n assert!(key_spec.is_valid_keylength(16));\n\n\n\n assert_eq!(bc.set_key(&vec![0; 32]).unwrap_err(), botan::Error::InvalidKeyLength);\n\n\n\n bc.set_key(&vec![0; 16]).unwrap();\n\n\n\n let input = vec![0; 16];\n\n\n\n let ctext = bc.encrypt_blocks(&input).unwrap();\n\n\n\n assert_eq!(botan::hex_encode(&ctext).unwrap(),\n\n \"66E94BD4EF8A2C3B884CFA59CA342B2E\");\n\n\n\n let ptext = bc.decrypt_blocks(&ctext).unwrap();\n\n\n\n assert_eq!(ptext, input);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 54, "score": 32527.396047672817 }, { "content": "use std::os::raw::{c_int, c_char, c_void};\n\n\n\nextern \"C\" {\n\n\n\n pub fn botan_constant_time_compare(x: *const u8, y: *const u8, len: usize) -> c_int;\n\n\n\n pub fn botan_scrub_mem(mem: *mut c_void, bytes: usize) -> c_int;\n\n\n\n pub fn botan_hex_encode(\n\n x: *const u8,\n\n len: usize,\n\n out: *mut c_char,\n\n flags: u32,\n\n ) -> c_int;\n\n\n\n pub fn botan_hex_decode(\n\n hex_str: *const c_char,\n\n in_len: usize,\n\n out: *mut u8,\n\n out_len: *mut usize,\n", "file_path": "botan-sys/src/utils.rs", "rank": 55, "score": 31759.45124795136 }, { "content": " ) -> c_int;\n\n\n\n pub fn botan_base64_encode(\n\n x: *const u8,\n\n len: usize,\n\n out: *mut c_char,\n\n out_len: *mut usize,\n\n ) -> c_int;\n\n\n\n pub fn botan_base64_decode(\n\n base64_str: *const c_char,\n\n in_len: usize,\n\n out: *mut u8,\n\n out_len: *mut usize,\n\n ) -> c_int;\n\n\n\n}\n", "file_path": "botan-sys/src/utils.rs", "rank": 56, "score": 31757.879262948933 }, { "content": "#[test]\n\nfn test_aes_key_wrap() {\n\n let kek = botan::hex_decode(\"000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F\").unwrap();\n\n let key = botan::hex_decode(\"00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F\").unwrap();\n\n\n\n let wrapped = botan::nist_key_wrap(&kek, &key).unwrap();\n\n\n\n assert_eq!(botan::hex_encode(&wrapped).unwrap(),\n\n \"28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21\");\n\n\n\n let unwrapped = botan::nist_key_unwrap(&kek, &wrapped).unwrap();\n\n\n\n assert_eq!(unwrapped, key);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 57, "score": 31431.664173983438 }, { "content": "#[test]\n\nfn test_pkcs_hash_id() {\n\n assert!(botan::pkcs_hash_id(\"SHA-192\").is_err());\n\n\n\n let id = botan::pkcs_hash_id(\"SHA-384\").unwrap();\n\n\n\n assert_eq!(botan::hex_encode(&id).unwrap(),\n\n \"3041300D060960864801650304020205000430\");\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 58, "score": 31431.664173983438 }, { "content": "#[test]\n\nfn test_pubkey_key_agreement() {\n\n\n\n let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n\n\n let a_priv = botan::Privkey::create(\"ECDH\", \"secp384r1\", &rng).unwrap();\n\n let b_priv = botan::Privkey::create(\"ECDH\", \"secp384r1\", &rng).unwrap();\n\n\n\n let a_pub = a_priv.key_agreement_key().unwrap();\n\n let b_pub = b_priv.key_agreement_key().unwrap();\n\n\n\n let a_ka = botan::KeyAgreement::new(&a_priv, \"KDF2(SHA-384)\").unwrap();\n\n let b_ka = botan::KeyAgreement::new(&b_priv, \"KDF2(SHA-384)\").unwrap();\n\n\n\n let salt = rng.read(16).unwrap();\n\n\n\n let a_key = a_ka.agree(32, &b_pub, &salt).unwrap();\n\n let b_key = b_ka.agree(32, &a_pub, &salt).unwrap();\n\n assert_eq!(a_key, b_key);\n\n\n\n let a_ka = botan::KeyAgreement::new(&a_priv, \"Raw\").unwrap();\n\n let b_ka = botan::KeyAgreement::new(&b_priv, \"Raw\").unwrap();\n\n\n\n let a_key = a_ka.agree(0, &b_pub, &salt).unwrap();\n\n let b_key = b_ka.agree(0, &a_pub, &vec![]).unwrap();\n\n\n\n assert_eq!(a_key, b_key);\n\n assert_eq!(a_key.len(), 384/8);\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 59, "score": 31431.664173983438 }, { "content": "\n\nuse botan_sys::*;\n\nuse utils::*;\n\n\n\nuse pubkey::Pubkey;\n\n\n\n#[derive(Debug)]\n\n/// X.509 certificate\n\npub struct Certificate {\n\n obj: botan_x509_cert_t\n\n}\n\n\n\nimpl Drop for Certificate {\n\n fn drop(&mut self) {\n\n unsafe { botan_x509_cert_destroy(self.obj) };\n\n }\n\n}\n\n\n\nimpl Clone for Certificate {\n\n fn clone(&self) -> Certificate {\n", "file_path": "botan/src/x509.rs", "rank": 60, "score": 21.405046396965894 }, { "content": "\n\nuse botan_sys::*;\n\nuse utils::*;\n\n\n\n#[derive(Debug)]\n\n/// Generate or check HOTP tokens\n\npub struct HOTP {\n\n obj: botan_hotp_t,\n\n}\n\n\n\n#[derive(Debug)]\n\n/// Generate or check TOTP tokens\n\npub struct TOTP {\n\n obj: botan_totp_t,\n\n}\n\n\n\nimpl Drop for HOTP {\n\n fn drop(&mut self) {\n\n unsafe { botan_hotp_destroy(self.obj); }\n\n }\n", "file_path": "botan/src/otp.rs", "rank": 62, "score": 20.68138671833991 }, { "content": " fn drop(&mut self) {\n\n unsafe { botan_privkey_destroy(self.obj) };\n\n }\n\n}\n\n\n\nimpl Drop for Pubkey {\n\n fn drop(&mut self) {\n\n unsafe { botan_pubkey_destroy(self.obj) };\n\n }\n\n}\n\n\n\nimpl Privkey {\n\n\n\n pub(crate) fn handle(&self) -> botan_privkey_t { self.obj }\n\n\n\n /// Create a new private key\n\n ///\n\n pub fn create(alg: &str, params: &str, rng: &RandomNumberGenerator) -> Result<Privkey> {\n\n\n\n let mut obj = ptr::null_mut();\n", "file_path": "botan/src/pubkey.rs", "rank": 63, "score": 19.763691240635854 }, { "content": "use botan_sys::*;\n\nuse utils::*;\n\n\n\n#[derive(Debug)]\n\n/// A hash function object\n\npub struct HashFunction {\n\n obj: botan_hash_t,\n\n output_length: usize\n\n}\n\n\n\nimpl Clone for HashFunction {\n\n fn clone(&self) -> HashFunction {\n\n self.duplicate().expect(\"copying hash object state failed\")\n\n }\n\n}\n\n\n\nimpl Drop for HashFunction {\n\n fn drop(&mut self) {\n\n unsafe { botan_hash_destroy(self.obj); }\n\n }\n", "file_path": "botan/src/hash.rs", "rank": 64, "score": 19.49458786138549 }, { "content": "\n\nuse botan_sys::*;\n\nuse utils::*;\n\n\n\n#[derive(Debug)]\n\n/// Message authentication code\n\npub struct MsgAuthCode {\n\n obj: botan_mac_t,\n\n output_length: usize,\n\n min_keylen: usize,\n\n max_keylen: usize,\n\n mod_keylen: usize,\n\n}\n\n\n\nimpl Drop for MsgAuthCode {\n\n fn drop(&mut self) {\n\n unsafe { botan_mac_destroy(self.obj); }\n\n }\n\n}\n\n\n", "file_path": "botan/src/mac.rs", "rank": 65, "score": 18.72759535954811 }, { "content": "\n\nuse botan_sys::*;\n\nuse utils::*;\n\n\n\nuse mp::MPI;\n\nuse rng::RandomNumberGenerator;\n\n\n\n#[derive(Debug)]\n\n/// A public key object\n\npub struct Pubkey {\n\n obj: botan_pubkey_t\n\n}\n\n\n\n#[derive(Debug)]\n\n/// A private key object\n\npub struct Privkey {\n\n obj: botan_privkey_t\n\n}\n\n\n\nimpl Drop for Privkey {\n", "file_path": "botan/src/pubkey.rs", "rank": 66, "score": 18.4137582793398 }, { "content": "\n\nuse botan_sys::*;\n\nuse utils::*;\n\n\n\n#[derive(Debug)]\n\n/// A raw block cipher interface (ie ECB mode)\n\n///\n\n/// Warning: you almost certainly want an AEAD cipher mode instead\n\npub struct BlockCipher {\n\n obj: botan_block_cipher_t,\n\n block_size: usize,\n\n min_keylen: usize,\n\n max_keylen: usize,\n\n mod_keylen: usize,\n\n}\n\n\n\nimpl Drop for BlockCipher {\n\n fn drop(&mut self) {\n\n unsafe { botan_block_cipher_destroy(self.obj); }\n\n }\n", "file_path": "botan/src/block.rs", "rank": 67, "score": 17.030880729002416 }, { "content": "}\n\n\n\nimpl Drop for TOTP {\n\n fn drop(&mut self) {\n\n unsafe { botan_totp_destroy(self.obj); }\n\n }\n\n}\n\n\n\nimpl HOTP {\n\n\n\n /// Instantiate a new HOTP instance with the given parameters\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let hotp = botan::HOTP::new(&[1,2,3,4], \"SHA-1\", 6);\n\n /// ```\n\n pub fn new(key: &[u8], hash_algo: &str, digits: usize) -> Result<HOTP> {\n\n let mut obj = ptr::null_mut();\n\n\n", "file_path": "botan/src/otp.rs", "rank": 68, "score": 16.50543723015502 }, { "content": "}\n\n\n\nimpl Drop for Decryptor {\n\n fn drop(&mut self) {\n\n unsafe { botan_pk_op_decrypt_destroy(self.obj) };\n\n }\n\n}\n\n\n\nimpl Decryptor {\n\n\n\n /// Create a new decryption object\n\n pub fn new(key: &Privkey, padding: &str) -> Result<Decryptor> {\n\n let padding = make_cstr(padding)?;\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_pk_op_decrypt_create(&mut obj, key.handle(), padding.as_ptr(), 0u32) }\n\n Ok(Decryptor { obj })\n\n }\n\n\n\n /// Decrypt a message\n\n pub fn decrypt(&self, ctext: &[u8]) -> Result<Vec<u8>> {\n", "file_path": "botan/src/pk_ops.rs", "rank": 69, "score": 15.73911912763781 }, { "content": " obj: botan_mp_t,\n\n}\n\n\n\nimpl Drop for MPI {\n\n fn drop(&mut self) {\n\n unsafe { botan_mp_destroy(self.obj); }\n\n }\n\n}\n\n\n\nimpl Clone for MPI {\n\n fn clone(&self) -> MPI {\n\n self.duplicate().expect(\"copying MPI object failed\")\n\n }\n\n}\n\n\n\nimpl MPI {\n\n\n\n pub(crate) fn handle(&self) -> botan_mp_t { self.obj }\n\n\n\n /// Crate a new (zero-valued) MPI\n", "file_path": "botan/src/mp.rs", "rank": 70, "score": 15.592202773295906 }, { "content": " Encrypt,\n\n /// Decrypt\n\n Decrypt\n\n}\n\n\n\nimpl Drop for Cipher {\n\n fn drop(&mut self) {\n\n unsafe { botan_cipher_destroy(self.obj); }\n\n }\n\n}\n\n\n\nimpl Cipher {\n\n /// Create a new cipher object in the specified direction\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// let aes_gcm = botan::Cipher::new(\"AES-128/GCM\", botan::CipherDirection::Encrypt).unwrap();\n\n /// ```\n\n pub fn new(name: &str, direction: CipherDirection) -> Result<Cipher> {\n\n let mut obj = ptr::null_mut();\n", "file_path": "botan/src/cipher.rs", "rank": 71, "score": 15.537177181940915 }, { "content": "#[derive(Debug)]\n\n/// An object that performs key agreement\n\npub struct KeyAgreement {\n\n obj: botan_pk_op_ka_t\n\n}\n\n\n\nimpl Drop for KeyAgreement {\n\n fn drop(&mut self) {\n\n unsafe { botan_pk_op_key_agreement_destroy(self.obj) };\n\n }\n\n}\n\n\n\nimpl KeyAgreement {\n\n\n\n /// Create a new key agreement operator\n\n pub fn new(key: &Privkey, kdf: &str) -> Result<KeyAgreement> {\n\n let kdf = make_cstr(kdf)?;\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_pk_op_key_agreement_create(&mut obj, key.handle(), kdf.as_ptr(), 0u32) }\n\n Ok(KeyAgreement { obj })\n", "file_path": "botan/src/pk_ops.rs", "rank": 72, "score": 14.820927791001946 }, { "content": " obj: botan_pk_op_sign_t,\n\n sig_len: usize,\n\n}\n\n\n\nimpl Drop for Signer {\n\n fn drop(&mut self) {\n\n unsafe { botan_pk_op_sign_destroy(self.obj) };\n\n }\n\n}\n\n\n\nimpl Signer {\n\n\n\n /// Create a new signature operator\n\n pub fn new(key: &Privkey, padding: &str) -> Result<Signer> {\n\n let padding = make_cstr(padding)?;\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_pk_op_sign_create(&mut obj, key.handle(), padding.as_ptr(), 0u32) };\n\n let mut sig_len = 0;\n\n call_botan! { botan_pk_op_sign_output_length(obj, &mut sig_len) };\n\n Ok(Signer { obj, sig_len })\n", "file_path": "botan/src/pk_ops.rs", "rank": 73, "score": 13.831476165947482 }, { "content": "///\n\n/// ```\n\n/// let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n/// let rsa = botan::Privkey::create(\"RSA\", \"2048\", &rng).unwrap();\n\n/// let rsa_pub = rsa.pubkey().unwrap();\n\n/// let enc = botan::Encryptor::new(&rsa_pub, \"OAEP(SHA-256)\").unwrap();\n\n/// let ctext = enc.encrypt(&[1,2,3], &rng).unwrap();\n\n/// ```\n\npub struct Encryptor {\n\n obj: botan_pk_op_encrypt_t\n\n}\n\n\n\nimpl Drop for Encryptor {\n\n fn drop(&mut self) {\n\n unsafe { botan_pk_op_encrypt_destroy(self.obj) };\n\n }\n\n}\n\n\n\nimpl Encryptor {\n\n\n", "file_path": "botan/src/pk_ops.rs", "rank": 74, "score": 13.763123928594423 }, { "content": "/// assert!(ctext < modulus);\n\n/// let ptext = fpe.decrypt(&ctext, &tweak).unwrap();\n\n/// assert_eq!(ptext, input);\n\n/// ```\n\npub struct FPE {\n\n obj: botan_fpe_t\n\n}\n\n\n\nimpl Drop for FPE {\n\n fn drop(&mut self) {\n\n unsafe { botan_fpe_destroy(self.obj); }\n\n }\n\n}\n\n\n\nimpl FPE {\n\n /// Create a new FPE instance, FE1 scheme\n\n /// Rounds should be 16 or higher for best security\n\n pub fn new_fe1(modulus: &MPI, key: &[u8], rounds: usize, compat_mode: bool) -> Result<FPE> {\n\n let mut obj = ptr::null_mut();\n\n\n", "file_path": "botan/src/fpe.rs", "rank": 75, "score": 13.449237284081233 }, { "content": " let mut ptext_len = 0;\n\n\n\n call_botan! { botan_pk_op_decrypt_output_length(self.obj, ctext.len(), &mut ptext_len) };\n\n\n\n call_botan_ffi_returning_vec_u8(ptext_len, &|out_buf, out_len| {\n\n unsafe { botan_pk_op_decrypt(self.obj, out_buf, out_len, ctext.as_ptr(), ctext.len()) }\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\n/// An object that can perform public key signature verification\n\npub struct Verifier {\n\n obj: botan_pk_op_verify_t\n\n}\n\n\n\nimpl Drop for Verifier {\n\n fn drop(&mut self) {\n\n unsafe { botan_pk_op_verify_destroy(self.obj) };\n\n }\n", "file_path": "botan/src/pk_ops.rs", "rank": 76, "score": 12.496130854824406 }, { "content": " /// Swap two MPI values\n\n pub fn swap(&mut self, other: &mut MPI) -> Result<()> {\n\n call_botan! { botan_mp_swap(self.obj, other.obj) };\n\n Ok(())\n\n }\n\n\n\n /// Perform a primality test on self\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use std::str::FromStr;\n\n /// let n = botan::MPI::from_str(\"1111111111111111111\").unwrap();\n\n /// let rng = botan::RandomNumberGenerator::new_system().unwrap();\n\n /// assert!(n.is_prime(&rng, 128).unwrap());\n\n /// ```\n\n pub fn is_prime(&self, rng: &RandomNumberGenerator, test_prob: usize) -> Result<bool> {\n\n let rc = unsafe { botan_mp_is_prime(self.obj, rng.handle(), test_prob) };\n\n match rc {\n\n 0 => Ok(false),\n", "file_path": "botan/src/mp.rs", "rank": 77, "score": 12.327267783935733 }, { "content": " 1 => Ok(true),\n\n e => Err(Error::from(e))\n\n }\n\n }\n\n\n\n /// Return the greatest common divisor of x and y\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use std::str::FromStr;\n\n /// let x = botan::MPI::from_str(\"1111111111111111\").unwrap();\n\n /// let y = botan::MPI::from_str(\"111111111111\").unwrap();\n\n /// assert_eq!(botan::MPI::gcd(&x, &y).unwrap(), botan::MPI::from_str(\"1111\").unwrap());\n\n /// ```\n\n pub fn gcd(x: &MPI, y: &MPI) -> Result<MPI> {\n\n let r = MPI::new()?;\n\n call_botan! { botan_mp_gcd(r.obj, x.obj, y.obj) };\n\n Ok(r)\n\n }\n\n\n", "file_path": "botan/src/mp.rs", "rank": 78, "score": 11.612585809648621 }, { "content": "pub use fpe::*;\n\npub use hash::*;\n\npub use kdf::*;\n\npub use keywrap::*;\n\npub use mac::*;\n\npub use memutils::*;\n\npub use mp::*;\n\npub use otp::*;\n\npub use pbkdf::*;\n\npub use pk_ops::*;\n\npub use pubkey::*;\n\npub use rng::*;\n\npub use x509::*;\n\npub use version::*;\n\npub use utils::*;\n", "file_path": "botan/src/lib.rs", "rank": 79, "score": 11.39084392525438 }, { "content": "use botan_sys::*;\n\nuse utils::*;\n\n\n\nuse rng::RandomNumberGenerator;\n\n\n\nuse std::cmp::{Eq, Ord, Ordering};\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse std::ops::{Add, AddAssign,\n\n Sub, SubAssign,\n\n Mul, MulAssign,\n\n Div, DivAssign,\n\n Rem, RemAssign,\n\n Shl, ShlAssign,\n\n Shr, ShrAssign,\n\n Neg};\n\n\n\n/// A big integer type\n\npub struct MPI {\n", "file_path": "botan/src/mp.rs", "rank": 80, "score": 11.326850510634026 }, { "content": "\n\npub use block::*;\n\npub use cipher::*;\n\npub use errors::*;\n\npub use fpe::*;\n\npub use hash::*;\n\npub use keywrap::*;\n\npub use kdf::*;\n\npub use mac::*;\n\npub use mp::*;\n\npub use otp::*;\n\npub use passhash::*;\n\npub use pk_ops::*;\n\npub use pubkey::*;\n\npub use rng::*;\n\npub use utils::*;\n\npub use version::*;\n\npub use x509::*;\n", "file_path": "botan-sys/src/lib.rs", "rank": 81, "score": 11.28152834058628 }, { "content": "use std::os::raw::{c_int, c_char};\n\n\n\nuse rng::botan_rng_t;\n\n\n\npub enum botan_mp_struct {}\n\npub type botan_mp_t = *mut botan_mp_struct;\n\n\n\nextern \"C\" {\n\n\n\n pub fn botan_mp_init(mp: *mut botan_mp_t) -> c_int;\n\n pub fn botan_mp_destroy(mp: botan_mp_t) -> c_int;\n\n pub fn botan_mp_to_hex(mp: botan_mp_t, out: *mut c_char) -> c_int;\n\n pub fn botan_mp_to_str(mp: botan_mp_t, base: u8, out: *mut c_char, out_len: *mut usize) -> c_int;\n\n pub fn botan_mp_clear(mp: botan_mp_t) -> c_int;\n\n pub fn botan_mp_set_from_int(mp: botan_mp_t, initial_value: c_int) -> c_int;\n\n pub fn botan_mp_set_from_mp(dest: botan_mp_t, source: botan_mp_t) -> c_int;\n\n pub fn botan_mp_set_from_str(dest: botan_mp_t, str: *const c_char) -> c_int;\n\n\n\n pub fn botan_mp_set_from_radix_str(\n\n dest: botan_mp_t,\n", "file_path": "botan-sys/src/mp.rs", "rank": 82, "score": 10.772521303301318 }, { "content": "use std::os::raw::{c_int, c_char};\n\n\n\nuse rng::botan_rng_t;\n\nuse pubkey::{botan_pubkey_t, botan_privkey_t};\n\n\n\npub enum botan_pk_op_encrypt_struct {}\n\npub type botan_pk_op_encrypt_t = *mut botan_pk_op_encrypt_struct;\n\n\n\npub enum botan_pk_op_decrypt_struct {}\n\npub type botan_pk_op_decrypt_t = *mut botan_pk_op_decrypt_struct;\n\n\n\npub enum botan_pk_op_sign_struct {}\n\npub type botan_pk_op_sign_t = *mut botan_pk_op_sign_struct;\n\n\n\npub enum botan_pk_op_verify_struct {}\n\npub type botan_pk_op_verify_t = *mut botan_pk_op_verify_struct;\n\n\n\npub enum botan_pk_op_ka_struct {}\n\npub type botan_pk_op_ka_t = *mut botan_pk_op_ka_struct;\n\n\n", "file_path": "botan-sys/src/pk_ops.rs", "rank": 83, "score": 10.726821519047908 }, { "content": "use std::os::raw::{c_int, c_char};\n\n\n\nuse mp::botan_mp_t;\n\nuse rng::botan_rng_t;\n\n\n\npub enum botan_pubkey_struct {}\n\npub type botan_pubkey_t = *mut botan_pubkey_struct;\n\n\n\npub enum botan_privkey_struct {}\n\npub type botan_privkey_t = *mut botan_privkey_struct;\n\n\n\nextern \"C\" {\n\n pub fn botan_privkey_create(\n\n key: *mut botan_privkey_t,\n\n algo_name: *const c_char,\n\n algo_params: *const c_char,\n\n rng: botan_rng_t,\n\n ) -> c_int;\n\n pub fn botan_privkey_check_key(\n\n key: botan_privkey_t,\n", "file_path": "botan-sys/src/pubkey.rs", "rank": 84, "score": 10.591805513839242 }, { "content": "}\n\n\n\nimpl Pubkey {\n\n\n\n pub(crate) fn from_handle(obj: botan_pubkey_t) -> Pubkey { Pubkey { obj } }\n\n\n\n pub(crate) fn handle(&self) -> botan_pubkey_t { self.obj }\n\n\n\n /// Load a DER encoded public key\n\n pub fn load_der(der: &[u8]) -> Result<Pubkey> {\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_pubkey_load(&mut obj, der.as_ptr(), der.len()) }\n\n Ok(Pubkey { obj })\n\n }\n\n\n\n /// Load a PEM encoded public key\n\n pub fn load_pem(pem: &str) -> Result<Pubkey> {\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_pubkey_load(&mut obj, make_cstr(pem)?.as_ptr() as *const u8, pem.len()) }\n\n Ok(Pubkey { obj })\n", "file_path": "botan/src/pubkey.rs", "rank": 85, "score": 10.356740666389557 }, { "content": "}\n\n\n\nimpl BlockCipher {\n\n /// Create a new block cipher instance, failing if the cipher is unknown\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let cipher = botan::BlockCipher::new(\"AES-128\");\n\n /// assert!(cipher.is_ok());\n\n /// let no_such_cipher = botan::BlockCipher::new(\"SuperCipher9000\");\n\n /// assert!(no_such_cipher.is_err());\n\n /// ```\n\n pub fn new(name: &str) -> Result<BlockCipher> {\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_block_cipher_init(&mut obj, make_cstr(name)?.as_ptr()) };\n\n\n\n let block_size = unsafe { botan_block_cipher_block_size(obj) };\n\n\n\n if block_size < 0 {\n", "file_path": "botan/src/block.rs", "rank": 86, "score": 10.273390460802773 }, { "content": "\n\nuse botan_sys::*;\n\nuse utils::*;\n\n\n\n#[derive(Debug)]\n\n/// A symmetric cipher\n\npub struct Cipher {\n\n obj: botan_cipher_t,\n\n direction: CipherDirection,\n\n tag_length: usize,\n\n default_nonce_length: usize,\n\n min_keylen: usize,\n\n max_keylen: usize,\n\n mod_keylen: usize\n\n}\n\n\n\n#[derive(PartialEq, Debug, Copy, Clone)]\n\n/// Which direction the cipher processes in\n\npub enum CipherDirection {\n\n /// Encrypt\n", "file_path": "botan/src/cipher.rs", "rank": 87, "score": 10.162472685131542 }, { "content": "impl Certificate {\n\n\n\n pub(crate) fn handle(&self) -> botan_x509_cert_t { self.obj }\n\n\n\n /// Load a X.509 certificate from DER or PEM representation\n\n pub fn load(data: &[u8]) -> Result<Certificate> {\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_x509_cert_load(&mut obj, data.as_ptr(), data.len()) };\n\n Ok(Certificate { obj })\n\n }\n\n\n\n /// Read an X.509 certificate from a file\n\n pub fn from_file(fsname: &str) -> Result<Certificate> {\n\n let fsname = make_cstr(fsname)?;\n\n\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_x509_cert_load_file(&mut obj, fsname.as_ptr()) };\n\n Ok(Certificate { obj })\n\n }\n\n\n", "file_path": "botan/src/x509.rs", "rank": 88, "score": 10.11703545131154 }, { "content": " let as_str_len = 4096;\n\n call_botan_ffi_returning_string(as_str_len, &|out_buf, out_len| {\n\n unsafe { botan_x509_cert_to_string(self.obj, out_buf as *mut c_char, out_len) }\n\n })\n\n }\n\n\n\n /// Test if the certificate is allowed for a particular usage\n\n pub fn allows_usage(&self, usage: CertUsage) -> Result<bool> {\n\n\n\n let usage_bit : X509KeyConstraints = X509KeyConstraints::from(usage);\n\n\n\n let rc = unsafe { botan_x509_cert_allowed_usage(self.obj, usage_bit as u32) };\n\n\n\n if rc == 0 {\n\n Ok(true)\n\n }\n\n else if rc == 1 {\n\n Ok(false)\n\n }\n\n else {\n", "file_path": "botan/src/x509.rs", "rank": 89, "score": 10.054332023533073 }, { "content": "\n\nimpl Version {\n\n\n\n /// Read the version information\n\n pub fn new() -> Result<Version> {\n\n\n\n unsafe {\n\n let version_str = CStr::from_ptr(botan_version_string()).to_str().map_err(|_| Error::ConversionError)?;\n\n\n\n Ok(Version {\n\n major: botan_version_major(),\n\n minor: botan_version_minor(),\n\n patch: botan_version_patch(),\n\n release_date: botan_version_datestamp(),\n\n ffi_api: botan_ffi_api_version(),\n\n string: version_str.to_string(),\n\n })\n\n }\n\n }\n\n\n\n /// Return true if the specified API version is supported by this version of the library\n\n pub fn supports_version(version: u32) -> bool {\n\n let rc = unsafe { botan_ffi_supports_api(version) };\n\n return rc == 0;\n\n }\n\n\n\n}\n", "file_path": "botan/src/version.rs", "rank": 90, "score": 10.031190626040761 }, { "content": " let bn_digits = 1 + (bit_count / log_base) as usize;\n\n\n\n call_botan_ffi_returning_string(bn_digits, &|out_buf, out_len| {\n\n unsafe { botan_mp_to_str(self.obj, 10, out_buf as *mut c_char, out_len) }\n\n })\n\n }\n\n\n\n /// Return value of self as hex string\n\n pub fn to_hex(&self) -> Result<String> {\n\n let byte_count = self.byte_count()?;\n\n\n\n call_botan_ffi_returning_string(byte_count*2 + 1, &|out_buf, out_len| {\n\n unsafe { botan_mp_to_str(self.obj, 16, out_buf as *mut c_char, out_len) }\n\n })\n\n }\n\n\n\n /// Return value of self as a byte array (big endian)\n\n pub fn to_bin(&self) -> Result<Vec<u8>> {\n\n let bytes = self.byte_count()?;\n\n let mut output = vec![0; bytes];\n", "file_path": "botan/src/mp.rs", "rank": 91, "score": 9.97786168495331 }, { "content": " /// Create a new public key encryptor object\n\n pub fn new(key: &Pubkey, padding: &str) -> Result<Encryptor> {\n\n let padding = make_cstr(padding)?;\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_pk_op_encrypt_create(&mut obj, key.handle(), padding.as_ptr(), 0u32) }\n\n Ok(Encryptor { obj })\n\n }\n\n\n\n /// Encrypt a message using the provided public key\n\n pub fn encrypt(&self, ptext: &[u8], rng: &RandomNumberGenerator) -> Result<Vec<u8>> {\n\n let mut ctext_len = 0;\n\n\n\n call_botan! { botan_pk_op_encrypt_output_length(self.obj, ptext.len(), &mut ctext_len) };\n\n\n\n call_botan_ffi_returning_vec_u8(ctext_len, &|out_buf, out_len| {\n\n unsafe { botan_pk_op_encrypt(self.obj, rng.handle(), out_buf, out_len, ptext.as_ptr(), ptext.len()) }\n\n })\n\n }\n\n}\n\n\n", "file_path": "botan/src/pk_ops.rs", "rank": 92, "score": 9.78948268903262 }, { "content": "use botan_sys::*;\n\nuse utils::*;\n\n\n\nuse mp::MPI;\n\n\n\n#[derive(Debug)]\n\n/// Represents an instance of format preserving encryption\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::str::FromStr;\n\n/// let modulus = botan::MPI::from_str(\"1000000000\").unwrap();\n\n/// let key = vec![0; 32];\n\n/// let rounds = 16;\n\n/// let compat_mode = false;\n\n/// let fpe = botan::FPE::new_fe1(&modulus, &key, rounds, compat_mode).unwrap();\n\n/// let input = botan::MPI::from_str(\"9392024\").unwrap();\n\n/// let tweak = vec![1,2,3,4,5];\n\n/// let ctext = fpe.encrypt(&input, &tweak).unwrap();\n", "file_path": "botan/src/fpe.rs", "rank": 93, "score": 9.721236818481977 }, { "content": "}\n\n\n\nimpl Verifier {\n\n\n\n /// Create a new verifier object\n\n pub fn new(key: &Pubkey, padding: &str) -> Result<Verifier> {\n\n let padding = make_cstr(padding)?;\n\n let mut obj = ptr::null_mut();\n\n call_botan! { botan_pk_op_verify_create(&mut obj, key.handle(), padding.as_ptr(), 0u32) }\n\n Ok(Verifier { obj })\n\n }\n\n\n\n /// Add more bytes of the message that will be verified\n\n pub fn update(&self, data: &[u8]) -> Result<()> {\n\n call_botan! { botan_pk_op_verify_update(self.obj, data.as_ptr(), data.len()) };\n\n Ok(())\n\n }\n\n\n\n /// Verify the provided signature and return true if valid\n\n pub fn finish(&self, signature: &[u8]) -> Result<bool> {\n", "file_path": "botan/src/pk_ops.rs", "rank": 94, "score": 9.585456995810869 }, { "content": "\n\n /// Return true if self is even\n\n pub fn is_even(&self) -> Result<bool> {\n\n match unsafe { botan_mp_is_even(self.obj) } {\n\n 0 => Ok(false),\n\n 1 => Ok(true),\n\n e => Err(Error::from(e))\n\n }\n\n }\n\n\n\n /// Return true if self equals other\n\n pub fn equals(&self, other: &MPI) -> Result<bool> {\n\n match unsafe { botan_mp_equal(self.obj, other.obj) } {\n\n 0 => Ok(false),\n\n 1 => Ok(true),\n\n e => Err(Error::from(e))\n\n }\n\n }\n\n\n\n /// Compare self with other\n", "file_path": "botan/src/mp.rs", "rank": 95, "score": 9.583471645952836 }, { "content": "\n\n call_botan! { botan_privkey_create(&mut obj,\n\n make_cstr(alg)?.as_ptr(),\n\n make_cstr(params)?.as_ptr(),\n\n rng.handle()) }\n\n\n\n Ok(Privkey { obj })\n\n }\n\n\n\n /// Load an RSA private key (p,q,e)\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use std::str::FromStr;\n\n /// let p = botan::MPI::from_str(\"289698020102256958291511331409682926199\").unwrap();\n\n /// let q = botan::MPI::from_str(\"293497288893125842977275290547344412783\").unwrap();\n\n /// let e = botan::MPI::from_str(\"65537\").unwrap();\n\n /// let rsa = botan::Privkey::load_rsa(&p, &q, &e).unwrap();\n\n /// ```\n", "file_path": "botan/src/pubkey.rs", "rank": 96, "score": 9.527582463121828 }, { "content": "use std::os::raw::{c_int, c_char, c_uint};\n\n\n\nuse pubkey::{botan_pubkey_t, botan_privkey_t};\n\nuse rng::botan_rng_t;\n\n\n\npub enum botan_x509_cert_struct {}\n\npub type botan_x509_cert_t = *mut botan_x509_cert_struct;\n\n\n\n#[repr(u32)]\n\npub enum X509KeyConstraints {\n\n NO_CONSTRAINTS = 0,\n\n DIGITAL_SIGNATURE = 32768,\n\n NON_REPUDIATION = 16384,\n\n KEY_ENCIPHERMENT = 8192,\n\n DATA_ENCIPHERMENT = 4096,\n\n KEY_AGREEMENT = 2048,\n\n KEY_CERT_SIGN = 1024,\n\n CRL_SIGN = 512,\n\n ENCIPHER_ONLY = 256,\n\n DECIPHER_ONLY = 128,\n", "file_path": "botan-sys/src/x509.rs", "rank": 97, "score": 9.462695433162828 }, { "content": " }\n\n else {\n\n Err(Error::from(rc))\n\n }\n\n }\n\n\n\n\n\n}\n\n\n\nimpl TOTP {\n\n\n\n /// Instantiate a new TOTP instance with the given parameters\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let totp = botan::TOTP::new(&[1,2,3,4], \"SHA-1\", 6, 30);\n\n /// ```\n\n pub fn new(key: &[u8], hash_algo: &str, digits: usize, time_step: usize) -> Result<TOTP> {\n\n let mut obj = ptr::null_mut();\n", "file_path": "botan/src/otp.rs", "rank": 98, "score": 9.374344586342703 }, { "content": " }\n\n }\n\n\n\n /// Return true if self is an integer == 0\n\n pub fn is_zero(&self) -> Result<bool> {\n\n match unsafe { botan_mp_is_zero(self.obj) } {\n\n 0 => Ok(false),\n\n 1 => Ok(true),\n\n e => Err(Error::from(e))\n\n }\n\n }\n\n\n\n /// Return true if self is odd\n\n pub fn is_odd(&self) -> Result<bool> {\n\n match unsafe { botan_mp_is_odd(self.obj) } {\n\n 0 => Ok(false),\n\n 1 => Ok(true),\n\n e => Err(Error::from(e))\n\n }\n\n }\n", "file_path": "botan/src/mp.rs", "rank": 99, "score": 9.319982375898277 } ]
Rust
src/tests.rs
metal4people/merkletree
e2d3d22c51eb51a90da3b629ef71fc0423e61018
#![cfg(test)] use crate::merkletree::MerkleTree; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; fn hash_value<T>(value: T) -> u64 where T: Hash, { let mut hasher = DefaultHasher::new(); value.hash(&mut hasher); return hasher.finish(); } #[test] fn test_from_str_vec() { let values = vec!["one", "two", "three", "four"]; let hashes = vec![ hash_value(&values[0]), hash_value(&values[1]), hash_value(&values[2]), hash_value(&values[3]), ]; let count = values.len(); let tree = MerkleTree::build_tree(values); let h01 = hash_value((hashes[0], hashes[1])); let h23 = hash_value((hashes[2], hashes[3])); let root_hash = hash_value((h01, h23)); assert_eq!(tree.count(), count); assert_eq!(tree.height(), 2); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree_empty() { let values: Vec<Vec<u8>> = vec![]; let tree = MerkleTree::build_tree(values); let mut hasher = DefaultHasher::new(); "".hash(&mut hasher); let empty_hash = hasher.finish(); let root_hash = tree.root_hash().clone(); assert_eq!(root_hash, empty_hash); } #[test] fn test_build_tree1() { let values = vec!["hello, world".to_string()]; let root_hash = hash_value(&values[0]); let tree = MerkleTree::build_tree(values); assert_eq!(tree.count(), 1); assert_eq!(tree.height(), 0); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree3() { let values = vec![vec![1], vec![2], vec![3]]; let tree = MerkleTree::build_tree(values); let hashes = vec![ hash_value(&vec![1]), hash_value(&vec![2]), hash_value(&vec![3]), ]; let h01 = hash_value((&hashes[0], &hashes[1])); let h2 = &hashes[2]; let root_hash = hash_value((&h01, h2)); assert_eq!(tree.count(), 3); assert_eq!(tree.height(), 2); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree9() { let values = (1..10).map(|x| vec![x]).collect::<Vec<_>>(); let hashes = values.iter().map(|v| hash_value(v)).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values); let h01 = hash_value((&hashes[0], &hashes[1])); let h23 = hash_value((&hashes[2], &hashes[3])); let h45 = hash_value((&hashes[4], &hashes[5])); let h67 = hash_value((&hashes[6], &hashes[7])); let h8 = &hashes[8]; let h0123 = hash_value((&h01, &h23)); let h4567 = hash_value((&h45, &h67)); let h1to7 = hash_value((&h0123, &h4567)); let root_hash = hash_value((&h1to7, h8)); assert_eq!(tree.count(), 9); assert_eq!(tree.height(), 4); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_valid_proof() { let values = (1..10).map(|x| vec![x]).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values.clone()); for value in values { let proof = tree.gen_proof(value); assert!(proof.is_some()); let is_valid = tree.validate_proof(&proof.unwrap()); assert!(is_valid); } } #[test] fn test_valid_proof_str() { let values = vec!["Hello", "my", "name", "is", "Rusty"]; let tree = MerkleTree::build_tree(values); let value = "Rusty"; let proof = tree.gen_proof(&value); assert!(proof.is_some()); let is_valid = tree.validate_proof(&proof.unwrap()); assert!(is_valid); } #[test] fn test_wrong_proof() { let values1 = vec![vec![1], vec![2], vec![3], vec![4]]; let tree1 = MerkleTree::build_tree(values1.clone()); let values2 = vec![vec![4], vec![5], vec![6], vec![7]]; let tree2 = MerkleTree::build_tree(values2); for value in values1 { let proof = tree1.gen_proof(value); assert!(proof.is_some()); let is_valid = tree2.validate_proof(&proof.unwrap()); assert_eq!(is_valid, false); } } #[test] fn test_nth_proof() { for &count in &[1, 2, 3, 10, 15, 16, 17, 22] { let values = (1..=count).map(|x| vec![x as u8]).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values.clone()); for i in 0..count { let proof = tree.gen_nth_proof(i); assert!(proof.is_some()); assert_eq!(vec![i as u8 + 1], proof.as_ref().unwrap().value); assert!(tree.validate_proof(&proof.unwrap())); } assert!(tree.gen_nth_proof(count).is_none()); assert!(tree.gen_nth_proof(count + 1000).is_none()); } }
#![cfg(test)] use crate::merkletree::MerkleTree; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; fn hash_value<T>(value: T) -> u64 where T: Hash, { let mut hasher = DefaultHasher::new(); value.hash(&mut hasher); return hasher.finish(); } #[test] fn test_from_str_vec() { let values = vec!["one", "two", "three", "four"]; let hashes = vec![ hash_value(&values[0]), hash_value(&values[1]), hash_value(&values[2]), hash_value(&values[3]), ]; let count = values.len(); let tree = MerkleTree::build_tree(values); let h01 = hash_value((hashes[0], hashes[1])); let h23 = hash_value((hashes[2], hashes[3])); let root_hash = hash_value((h01, h23)); assert_eq!(tree.count(), count); assert_eq!(tree.height(), 2); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree_empty() { let values: Vec<Vec<u8>> = vec![]; let tree = MerkleTree::build_tree(values); let mut hasher = DefaultHasher::new(); "".hash(&mut hasher); let empty_hash = hasher.finish(); let root_hash = tree.root_hash().clone(); assert_eq!(root_hash, empty_hash); } #[test] fn test_build_tree1() { let values = vec!["hello, world".to_string()]; let root_hash = hash_value(&values[0]); let tree = MerkleTree::build_tree(values); assert_eq!(tree.count(), 1); assert_eq!(tree.height(), 0); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree3() { let values = vec![vec![1], vec![2], vec![3]]; let tree = MerkleTree::build_tree(values); let hashes = vec![ hash_value(&vec![1]), hash_value(&vec![2]), hash_value(&vec![3]), ]; let h01 = hash_value((&hashes[0], &hashes[1])); let h2 = &hashes[2]; let root_hash = hash_value((&h01, h2)); assert_eq!(tree.count(), 3); assert_eq!(tree.height(), 2); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree9() { let values = (1..10).map(|x| vec![x]).collect::<Vec<_>>(); let hashes = values.iter().map(|v| hash_value(v)).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values); let h01 = hash_value((&hashes[0], &hashes[1])); let h23 = hash_value((&hashes[2], &hashes[3])); let h45 = hash_value((&hashes[4], &hashes[5])); let h67 = hash_value((&hashes[6], &hashes[7])); let h8 = &hashes[8]; let h0123 = hash_value((&h01, &h23)); let h4567 = hash_value((&h45, &h67)); let h1to7 = hash_value((&h0123, &h4567)); let root_hash = hash_value((&h1to7, h8)); assert_eq!(tree.count(), 9); assert_eq!(tree.height(), 4); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_va
#[test] fn test_valid_proof_str() { let values = vec!["Hello", "my", "name", "is", "Rusty"]; let tree = MerkleTree::build_tree(values); let value = "Rusty"; let proof = tree.gen_proof(&value); assert!(proof.is_some()); let is_valid = tree.validate_proof(&proof.unwrap()); assert!(is_valid); } #[test] fn test_wrong_proof() { let values1 = vec![vec![1], vec![2], vec![3], vec![4]]; let tree1 = MerkleTree::build_tree(values1.clone()); let values2 = vec![vec![4], vec![5], vec![6], vec![7]]; let tree2 = MerkleTree::build_tree(values2); for value in values1 { let proof = tree1.gen_proof(value); assert!(proof.is_some()); let is_valid = tree2.validate_proof(&proof.unwrap()); assert_eq!(is_valid, false); } } #[test] fn test_nth_proof() { for &count in &[1, 2, 3, 10, 15, 16, 17, 22] { let values = (1..=count).map(|x| vec![x as u8]).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values.clone()); for i in 0..count { let proof = tree.gen_nth_proof(i); assert!(proof.is_some()); assert_eq!(vec![i as u8 + 1], proof.as_ref().unwrap().value); assert!(tree.validate_proof(&proof.unwrap())); } assert!(tree.gen_nth_proof(count).is_none()); assert!(tree.gen_nth_proof(count + 1000).is_none()); } }
lid_proof() { let values = (1..10).map(|x| vec![x]).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values.clone()); for value in values { let proof = tree.gen_proof(value); assert!(proof.is_some()); let is_valid = tree.validate_proof(&proof.unwrap()); assert!(is_valid); } }
function_block-function_prefixed
[ { "content": "fn bench_big_rnd_tree(c: &mut Criterion) {\n\n c.bench_function(\"MerkleTree::build_tree - big\", |b| {\n\n let mut values = vec![vec![0u8; 256]; 160];\n\n let mut rng = rand::thread_rng();\n\n\n\n for mut v in &mut values {\n\n rng.fill_bytes(&mut v);\n\n }\n\n\n\n b.iter(|| MerkleTree::build_tree(black_box(values.clone())))\n\n });\n\n}\n\n\n", "file_path": "benches/proof.rs", "rank": 6, "score": 83358.2134746375 }, { "content": "fn bench_small_str_tree(c: &mut Criterion) {\n\n c.bench_function(\"MerkleTree::build_tree - small\", |b| {\n\n let values = vec![\"one\", \"two\", \"three\", \"four\"];\n\n b.iter(|| MerkleTree::build_tree(black_box(values.clone())))\n\n });\n\n}\n\n\n", "file_path": "benches/proof.rs", "rank": 7, "score": 83358.21347463751 }, { "content": "fn bench_small_str_proof_gen(c: &mut Criterion) {\n\n c.bench_function(\"MerkleTree::gen_proof - small\", |b| {\n\n let values = vec![\"one\", \"two\", \"three\", \"four\"];\n\n let tree = MerkleTree::build_tree(values.clone());\n\n\n\n b.iter(|| {\n\n for value in &values {\n\n tree.gen_proof(black_box(value));\n\n }\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/proof.rs", "rank": 12, "score": 60608.72320135165 }, { "content": "fn bench_big_rnd_proof_gen(c: &mut Criterion) {\n\n c.bench_function(\"MerkleTree::gen_proof - big\", |b| {\n\n let mut values = vec![vec![0u8; 256]; 160];\n\n let mut rng = rand::thread_rng();\n\n\n\n for mut v in &mut values {\n\n rng.fill_bytes(&mut v);\n\n }\n\n\n\n let tree = MerkleTree::build_tree(values.clone());\n\n\n\n b.iter(|| {\n\n for value in &values {\n\n tree.gen_proof(black_box(value.clone()));\n\n }\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/proof.rs", "rank": 13, "score": 60608.72320135165 }, { "content": "fn bench_big_rnd_proof_check(c: &mut Criterion) {\n\n c.bench_function(\"MerkleTree::validate_proof - big\", |b| {\n\n let mut values = vec![vec![0u8; 256]; 160];\n\n let mut rng = rand::thread_rng();\n\n\n\n for mut v in &mut values {\n\n rng.fill_bytes(&mut v);\n\n }\n\n\n\n let tree = MerkleTree::build_tree(values.clone());\n\n let proofs = values\n\n .into_iter()\n\n .map(|v| tree.gen_proof(v).unwrap())\n\n .collect::<Vec<_>>();\n\n\n\n b.iter(|| {\n\n for proof in &proofs {\n\n tree.validate_proof(proof);\n\n }\n\n })\n", "file_path": "benches/proof.rs", "rank": 14, "score": 60608.72320135165 }, { "content": "fn bench_small_str_proof_check(c: &mut Criterion) {\n\n c.bench_function(\"MerkleTree::validate_proof - small\", |b| {\n\n let values = vec![\"one\", \"two\", \"three\", \"four\"];\n\n let tree = MerkleTree::build_tree(values.clone());\n\n let proofs = values\n\n .iter()\n\n .map(|v| tree.gen_proof(v).unwrap())\n\n .collect::<Vec<_>>();\n\n\n\n b.iter(|| {\n\n for proof in &proofs {\n\n tree.validate_proof(proof);\n\n }\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/proof.rs", "rank": 15, "score": 60608.72320135165 }, { "content": "use std::collections::hash_map::DefaultHasher;\n\nuse std::fmt;\n\nuse std::hash::{Hash, Hasher};\n\n\n\n/// Binary tree that's used for Merkle tree implementation\n\n#[derive(Clone)]\n\npub enum Tree<T> {\n\n /// Empty node\n\n Empty {\n\n /// Hash for the empty value\n\n hash: u64,\n\n },\n\n\n\n /// Leaf node\n\n Leaf {\n\n /// Value hash\n\n hash: u64,\n\n /// Hashed value\n\n value: T,\n\n /// Index in the array: the left most leaf has 0 index\n", "file_path": "src/tree.rs", "rank": 16, "score": 26755.391706207265 }, { "content": "\n\n Tree::Empty { hash }\n\n }\n\n\n\n /// Create new leaf with predefined T value and index\n\n pub fn new_leaf(value: T, index: usize) -> Tree<T>\n\n where\n\n T: Hash,\n\n {\n\n let mut hasher = DefaultHasher::new();\n\n value.hash(&mut hasher);\n\n let hash = hasher.finish();\n\n\n\n Tree::Leaf {\n\n hash: hash.clone(),\n\n value: value,\n\n index: index,\n\n }\n\n }\n\n\n", "file_path": "src/tree.rs", "rank": 17, "score": 26754.0248180196 }, { "content": " match *self {\n\n Tree::Empty { ref hash } => {\n\n return f.debug_struct(\"Tree::Empty\").field(\"hash\", hash).finish()\n\n }\n\n Tree::Leaf { ref hash, .. } => {\n\n return f.debug_struct(\"Tree::Leaf\").field(\"hash\", hash).finish()\n\n }\n\n Tree::Node { ref hash, .. } => {\n\n return f.debug_struct(\"Tree::Node\").field(\"hash\", hash).finish()\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Tree<T> {\n\n /// Create an empty tree\n\n pub fn empty() -> Self {\n\n let mut hasher = DefaultHasher::new();\n\n \"\".hash(&mut hasher);\n\n let hash = hasher.finish();\n", "file_path": "src/tree.rs", "rank": 18, "score": 26751.579854721356 }, { "content": " /// Returns a hash from the tree\n\n pub fn hash(&self) -> u64 {\n\n match *self {\n\n Tree::Empty { ref hash } => hash.clone(),\n\n Tree::Leaf { ref hash, .. } => hash.clone(),\n\n Tree::Node { ref hash, .. } => hash.clone(),\n\n }\n\n }\n\n\n\n /// Returns a left index of the covered tree segment (inclusively)\n\n pub fn get_left_index(&self) -> Option<usize> {\n\n match *self {\n\n Tree::Empty { .. } => None,\n\n Tree::Leaf { ref index, .. } => Some(index.clone()),\n\n Tree::Node { ref left_index, .. } => Some(left_index.clone()),\n\n }\n\n }\n\n\n\n /// Returns a right index of the covered tree segment (inclusively)\n\n pub fn get_right_index(&self) -> Option<usize> {\n", "file_path": "src/tree.rs", "rank": 19, "score": 26750.412496515703 }, { "content": " index: usize,\n\n },\n\n\n\n /// Node that has both right and left nodes\n\n Node {\n\n /// Hash of (right sub-tree hash + left sub-tree hash)\n\n hash: u64,\n\n /// Left sub-tree\n\n left: Box<Tree<T>>,\n\n /// Right sub-tree\n\n right: Box<Tree<T>>,\n\n /// Index of the left most leaf covered by the current tree\n\n left_index: usize,\n\n /// Index of the right most leaf covered by the current tree\n\n right_index: usize,\n\n },\n\n}\n\n\n\nimpl<T> fmt::Debug for Tree<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/tree.rs", "rank": 20, "score": 26749.29573458123 }, { "content": " match *self {\n\n Tree::Empty { .. } => None,\n\n Tree::Leaf { ref index, .. } => Some(index.clone()),\n\n Tree::Node {\n\n ref right_index, ..\n\n } => Some(right_index.clone()),\n\n }\n\n }\n\n}\n", "file_path": "src/tree.rs", "rank": 21, "score": 26745.04076985662 }, { "content": "use crate::tree::Tree;\n\nuse std::collections::hash_map::DefaultHasher;\n\nuse std::hash::{Hash, Hasher};\n\n\n\n/// A Merkle tree is a binary tree that has two main types of nodes:\n\n/// - leafs that contain a value (usually block) and it's hash;\n\n/// - nodes that contain a hash of a left and right nodes hashes concatenation;\n\n/// h0123\n\n/// / \\\n\n/// h01 h23\n\n/// / \\ / \\\n\n/// h0 h1 h2 h3\n\n#[derive(Debug)]\n\npub struct MerkleTree<T> {\n\n /// The root of the inner binary tree\n\n root: Tree<T>,\n\n\n\n /// The height of the tree\n\n height: usize,\n\n\n", "file_path": "src/merkletree.rs", "rank": 23, "score": 15.194800241441921 }, { "content": " /// Returns `None` if `values` is empty.\n\n pub fn build_tree(values: Vec<T>) -> Self\n\n where\n\n T: Hash,\n\n {\n\n if values.is_empty() {\n\n return MerkleTree {\n\n root: Tree::empty(),\n\n height: 0,\n\n count: 0,\n\n map: std::collections::HashMap::new(),\n\n };\n\n }\n\n\n\n let count = values.len();\n\n let mut cur = Vec::with_capacity(count);\n\n let mut next = Vec::with_capacity(count / 2 + 1);\n\n\n\n let mut index = 0usize;\n\n let mut map: std::collections::HashMap<u64, usize> = std::collections::HashMap::new();\n", "file_path": "src/merkletree.rs", "rank": 24, "score": 15.169487384657959 }, { "content": " pub fn gen_nth_proof(&self, n: usize) -> Option<Proof<T>> {\n\n let mut path: Vec<u64> = Vec::new();\n\n return self.get_nth_proof_impl(&mut path, &self.root, n);\n\n }\n\n\n\n /// Generate hash path starting from the root hash and ending with leaf hash.\n\n fn get_nth_proof_impl(\n\n &self,\n\n path: &mut Vec<u64>,\n\n tree: &Tree<T>,\n\n leaf_index: usize,\n\n ) -> Option<Proof<T>> {\n\n match tree {\n\n Tree::Empty { .. } => return None,\n\n Tree::Leaf { value, .. } => {\n\n debug_assert!(tree.get_left_index() == tree.get_right_index());\n\n\n\n if tree.get_left_index().unwrap() != leaf_index {\n\n return None;\n\n }\n", "file_path": "src/merkletree.rs", "rank": 25, "score": 10.206138125285372 }, { "content": " /// The number of leaf nodes in the tree\n\n count: usize,\n\n\n\n /// Hashmap value hash -> leaf indices\n\n map: std::collections::HashMap<u64, usize>,\n\n}\n\n\n\n/// Proof generated from the Merkel tree\n\n/// Proof contains the hash path and the target value\n\n#[derive(Debug, PartialEq)]\n\npub struct Proof<T> {\n\n /// The path to the value\n\n pub path: Vec<u64>,\n\n\n\n /// The target value\n\n pub value: T,\n\n}\n\n\n\nimpl<T: Clone> MerkleTree<T> {\n\n /// Constructs a Merkle Tree from a vector of data blocks.\n", "file_path": "src/merkletree.rs", "rank": 26, "score": 10.14221751946085 }, { "content": " pub fn height(&self) -> usize {\n\n self.height\n\n }\n\n\n\n /// Returns the number of leaves in the Merkle tree\n\n pub fn count(&self) -> usize {\n\n self.count\n\n }\n\n\n\n /// Returns whether the Merkle tree is empty or not\n\n pub fn is_empty(&self) -> bool {\n\n self.count() == 0\n\n }\n\n\n\n /// Generate an inclusion proof for the given value.\n\n /// Returns `None` if the given value is not found in the tree.\n\n pub fn gen_proof(&self, value: T) -> Option<Proof<T>>\n\n where\n\n T: Hash + PartialEq,\n\n {\n", "file_path": "src/merkletree.rs", "rank": 27, "score": 9.693716916488611 }, { "content": "\n\n for value in values {\n\n let leaf = Tree::new_leaf(value, index);\n\n map.insert(leaf.hash(), index);\n\n cur.push(leaf);\n\n index = index + 1;\n\n }\n\n\n\n let mut height = 0;\n\n\n\n while cur.len() > 1 {\n\n while cur.len() > 0 {\n\n if cur.len() == 1 {\n\n next.push(cur.remove(0));\n\n } else {\n\n let left = cur.remove(0);\n\n let right = cur.remove(0);\n\n\n\n let mut hasher = DefaultHasher::new();\n\n (left.hash(), right.hash()).hash(&mut hasher);\n", "file_path": "src/merkletree.rs", "rank": 28, "score": 9.589909227059483 }, { "content": " let mut hasher = DefaultHasher::new();\n\n value.hash(&mut hasher);\n\n let value_hash = hasher.finish();\n\n\n\n if !self.map.contains_key(&value_hash) {\n\n return None;\n\n }\n\n\n\n let leaf_index = self.map.get(&value_hash).unwrap();\n\n let proof = self.gen_nth_proof(*leaf_index);\n\n if !proof.is_some() {\n\n return None;\n\n }\n\n if value != proof.as_ref().unwrap().value {\n\n return None;\n\n }\n\n return proof;\n\n }\n\n\n\n /// Generate an inclusion proof for the `n`-th leaf value.\n", "file_path": "src/merkletree.rs", "rank": 29, "score": 9.032531927016361 }, { "content": " debug_assert!(cur.len() == 1);\n\n\n\n let root = cur.remove(0);\n\n debug_assert!(root.get_left_index().unwrap() == 0usize);\n\n debug_assert!(root.get_right_index().unwrap() == count - 1);\n\n\n\n MerkleTree {\n\n root,\n\n height,\n\n count,\n\n map,\n\n }\n\n }\n\n\n\n /// Returns the root hash of Merkle tree\n\n pub fn root_hash(&self) -> u64 {\n\n self.root.hash().clone()\n\n }\n\n\n\n /// Returns the height of Merkle tree\n", "file_path": "src/merkletree.rs", "rank": 30, "score": 8.978103584700762 }, { "content": "# Merkle Proof and Verification\n\n\n\n## Introduction\n\n\n\nMerkle trees are hash-based data structures used to prove the integrity of transaction data stored in the block.\n\n\n\n![Merkle Tree](img/merkle_tree.jpeg \"Merkle Tree\")\n\n\n\nAbove you can see what this tree would look like. The eight transactions in the block (A-H) are lined up in the bottom row. The second row contains four hashes (S(X) = sha3 hash) of the child transactions. The third row contains hashes of the child hashes, and the root contains a hash of the hashes of the hashes of the transactions. Generically, this is how the transaction part of an Ethereum block is laid out and the root here is what we know of as a transaction header (one of the 15 pieces of information that goes into the block header).\n\n\n\n## The Problem\n\n\n\nThe reason we use Merkle trees to store block data (i.e. transactions) is that verification is very efficient. This verification is called a Merkle proof.\n\n\n\nSuppose we want to prove that transaction C was indeed in the block that formed the header shown above.\n\n\n\n![Merkle Proof](img/merkle_proof.jpeg \"Merkle Proof\")\n\n\n\nIn addition to the transaction hash C , we also need D, S(A,B), and S(S(E,F),S(G,H)) to form the proof. The verification itself performs the following steps on the proof:\n\n\n\n* Hash C and D to produce S(C,D).\n\n* Hash S(A,B) and S(C,D) to produce S(S(A,B),S(C,D)).\n\n* Hash S(S(A,B),S(C,D)) and S(S(E,F),S(G,H)) to produce the root.\n\n* Check that the root is the same as what has been stored previously.\n\n\n\nThe efficiency here is that we proved a transaction belonged in a block with only 3 accompanying pieces of information (instead of the 7 other transactions that were stored in the block). This efficiency becomes exponentially more pronounced with larger trees.\n\n\n\n## Running Tests and benchmarking\n\n\n\n- Use `cargo test` to run all the tests.\n\n\n\n- Use `cargo bench` to run all the benchmarks.\n\n\n\n## Documentation\n", "file_path": "README.md", "rank": 31, "score": 6.226007122047646 }, { "content": "## Testing and benchmarking\n\n\n\nSince I wanted to compare the current implementation with initial one, the tests and benchmarking I left as it is, so benchmarks between two projects can be compared.\n\n\n\nBelow is the comparison of the results from cargo bench:\n\n\n\n| Benchmark test name | Original impl, time| Re-written impl, time |\n\n|------------------------------------------|---------------|-----------------------|\n\n| MerkleTree::build_tree - small | 2.4838 us | 457 ns |\n\n| MerkleTree::gen_proof - small | 1.5685 us | 349 ns |\n\n| MerkleTree::validate_proof - small | 3.6923 us | 352 ns |\n\n| MerkleTree::build_tree - big | 219.33 us | 56.775 us |\n\n| MerkleTree::gen_proof - big | 315.60 us | 51.514 us |\n\n| MerkleTree::validate_proof - big | 530.70 us | 53.869 us |\n\n\n\nFor more details there is benchmark files for both implementations inside\n\nbenches directory.\n\n\n\n## Thoughts about possible improvements \n\n- Theoretically it is possible to store nodes of the tree contiguously in the array, it would be nice to experiment with it;\n\n- Would be interesting to support updating the tree along with making it thread-safe; \n\n- Currently 64-bit hashes are used, but it may not be sufficient;\n", "file_path": "how_this_was_made.md", "rank": 32, "score": 6.170994995155429 }, { "content": " let combined_hash = hasher.finish();\n\n\n\n let left_index = left.get_left_index().unwrap();\n\n let right_index = right.get_right_index().unwrap();\n\n let node = Tree::Node {\n\n hash: combined_hash,\n\n left: Box::new(left),\n\n right: Box::new(right),\n\n left_index: left_index,\n\n right_index: right_index,\n\n };\n\n\n\n next.push(node);\n\n }\n\n }\n\n\n\n height += 1;\n\n std::mem::swap(&mut cur, &mut next);\n\n }\n\n\n", "file_path": "src/merkletree.rs", "rank": 33, "score": 5.906763064190836 }, { "content": "//! *merkle* implements a Merkle Tree in Rust.\n\n#![deny(\n\n missing_docs,\n\n missing_debug_implementations,\n\n missing_copy_implementations,\n\n trivial_casts,\n\n trivial_numeric_casts,\n\n unsafe_code,\n\n unstable_features,\n\n unused_import_braces,\n\n unused_qualifications\n\n)]\n\n\n\nmod merkletree;\n\npub use crate::merkletree::MerkleTree;\n\n\n\nmod tree;\n\npub use crate::tree::Tree;\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/lib.rs", "rank": 34, "score": 5.668436481507096 }, { "content": "\n\n path.push(tree.hash());\n\n let proof: Proof<T> = Proof {\n\n path: path.clone(),\n\n value: value.clone(),\n\n };\n\n return Some(proof);\n\n }\n\n Tree::Node {\n\n ref right, left, ..\n\n } => {\n\n if !tree.get_left_index().is_some() {\n\n return None;\n\n }\n\n if !tree.get_right_index().is_some() {\n\n return None;\n\n }\n\n let left_index = tree.get_left_index().unwrap();\n\n let right_index = tree.get_right_index().unwrap();\n\n if leaf_index < left_index {\n", "file_path": "src/merkletree.rs", "rank": 35, "score": 4.977446683429552 }, { "content": " where\n\n T: Hash + PartialEq,\n\n {\n\n // TODO: optimize by traversing through tree instead of comparison\n\n let generated_proof = self.gen_proof(proof_to_verify.value.clone());\n\n if !generated_proof.is_some() {\n\n return false;\n\n }\n\n return generated_proof.unwrap() == *proof_to_verify;\n\n }\n\n}\n", "file_path": "src/merkletree.rs", "rank": 36, "score": 4.350113043322622 }, { "content": "#[macro_use]\n\nextern crate criterion;\n\n\n\nuse criterion::black_box;\n\nuse criterion::Criterion;\n\n\n\nextern crate merkletree;\n\nextern crate rand;\n\n\n\nuse rand::RngCore;\n\nuse merkletree::MerkleTree;\n\n\n", "file_path": "benches/proof.rs", "rank": 37, "score": 4.0896274257442435 }, { "content": "# How this was made\n\n\n\nThe purpose of this document is to describe the flow of thoughts during solving the task.\n\n\n\n## Attitude\n\nI've heard about the Merkle tree kind of week before starting on this task. I thought that it would be very nice to learn about it more.\n\nThis task to me is very interesting cause it's a combo of Merkle tree plus rust, the language about which I'd like to learn more.\n\n\n\n## Resources\n\nDidn't have much time during these days cause of the regular working hours, mainly developed it early morning and late evening.\n\n\n\n## Analysis\n\nThe first thing what I did, I read about the Merkle tree and got the idea: if the blockchain was represented as a linked list to verify a transaction, it would be needed to traverse the list from the very beginning that would be O(n) time. Merkle trees solve the problem by doing it for log(n) time.\n\n\n\nAfter understanding its purpose I've started to analyze existing solutions on github, one that looked nice to me was https://github.com/SpinResearch/merkle.rs.\n\n\n\nWhile reading about merkle trees, I've found one more interesting aspect about working with blockchain related code - it should not only be reliable, run fast, consume as less memory as possible, but also be secure.\n\n\n\nFrom security perspective, I've found that guys from SpinResearch rely only on hashes and don't compare the actual value of data (block) that's in the leafs.\n\n\n\nIn addition, it seemed that code that was used for building the proof and proof validation was really complex and hard to understand and also can be faster.\n\n\n\n## Implementation\n\n\n\nI decided to re-write SpinResearch implementation, in order to speed up proof lookup by introducing tree segmentation: each node contains the right and left indices of the right most and left most leaf indices of the current subtree.\n\n\n\nWhile generating the proof it would be simple binary search along the tree, the same could be done when the proof is validated.\n\n\n\nI've also simplified interface by removing the hash algorithm and use the default one. \n\n\n", "file_path": "how_this_was_made.md", "rank": 38, "score": 3.670637316842988 }, { "content": " return None;\n\n }\n\n if leaf_index > right_index {\n\n return None;\n\n }\n\n\n\n path.push(tree.hash());\n\n let proof = self.get_nth_proof_impl(path, &*right, leaf_index);\n\n if proof.is_some() {\n\n return proof;\n\n }\n\n\n\n let proof = self.get_nth_proof_impl(path, &*left, leaf_index);\n\n return proof;\n\n }\n\n }\n\n }\n\n\n\n /// Returns true if proof is valid, false otherwise.\n\n pub fn validate_proof(&self, proof_to_verify: &Proof<T>) -> bool\n", "file_path": "src/merkletree.rs", "rank": 39, "score": 3.5538251856519296 }, { "content": "new environment.\n\n\n\n\"Author\" refers to any designer, engineer, programmer, technical\n\nwriter or other person who contributed to the Font Software.\n\n\n\nPERMISSION & CONDITIONS\n\nPermission is hereby granted, free of charge, to any person obtaining\n\na copy of the Font Software, to use, study, copy, merge, embed, modify,\n\nredistribute, and sell modified and unmodified copies of the Font\n\nSoftware, subject to the following conditions:\n\n\n\n1) Neither the Font Software nor any of its individual components,\n\nin Original or Modified Versions, may be sold by itself.\n\n\n\n2) Original or Modified Versions of the Font Software may be bundled,\n\nredistributed and/or sold with any software, provided that each copy\n\ncontains the above copyright notice and this license. These can be\n\nincluded either as stand-alone text files, human-readable headers or\n\nin the appropriate machine-readable metadata fields within text or\n\nbinary files as long as those fields can be easily viewed by the user.\n\n\n\n3) No Modified Version of the Font Software may use the Reserved Font\n\nName(s) unless explicit written permission is granted by the corresponding\n\nCopyright Holder. This restriction only applies to the primary font name as\n\npresented to the users.\n\n\n\n4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font\n\nSoftware shall not be used to promote, endorse or advertise any\n\nModified Version, except to acknowledge the contribution(s) of the\n\nCopyright Holder(s) and the Author(s) or with their explicit written\n\npermission.\n\n\n\n5) The Font Software, modified or unmodified, in part or in whole,\n\nmust be distributed entirely under this license, and must not be\n\ndistributed under any other license. The requirement for fonts to\n\nremain under this license does not apply to any document created\n\nusing the Font Software.\n\n\n\nTERMINATION\n\nThis license becomes null and void if any of the above conditions are\n\nnot met.\n\n\n\nDISCLAIMER\n\nTHE FONT SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\n\nOF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE\n\nCOPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n\nINCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL\n\nDAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n\nFROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM\n\nOTHER DEALINGS IN THE FONT SOFTWARE.\n", "file_path": "doc/SourceSerif4-LICENSE.md", "rank": 40, "score": 1.6421451423790832 }, { "content": " });\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n bench_small_str_tree,\n\n bench_small_str_proof_gen,\n\n bench_small_str_proof_check,\n\n bench_big_rnd_tree,\n\n bench_big_rnd_proof_gen,\n\n bench_big_rnd_proof_check,\n\n);\n\n\n\ncriterion_main!(benches);", "file_path": "benches/proof.rs", "rank": 41, "score": 1.6371441103133695 }, { "content": "Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.\n\n\n\nThis Font Software is licensed under the SIL Open Font License, Version 1.1.\n\n\n\nThis license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL\n\n\n\n\n\n-----------------------------------------------------------\n\nSIL OPEN FONT LICENSE Version 1.1 - 26 February 2007\n\n-----------------------------------------------------------\n\n\n\nPREAMBLE\n\nThe goals of the Open Font License (OFL) are to stimulate worldwide\n\ndevelopment of collaborative font projects, to support the font creation\n\nefforts of academic and linguistic communities, and to provide a free and\n\nopen framework in which fonts may be shared and improved in partnership\n\nwith others.\n\n\n\nThe OFL allows the licensed fonts to be used, studied, modified and\n\nredistributed freely as long as they are not sold by themselves. The\n\nfonts, including any derivative works, can be bundled, embedded,\n\nredistributed and/or sold with any software provided that any reserved\n\nnames are not used by derivative works. The fonts and derivatives,\n\nhowever, cannot be released under any other type of license. The\n\nrequirement for fonts to remain under this license does not apply\n\nto any document created using the fonts or their derivatives.\n\n\n\nDEFINITIONS\n\n\"Font Software\" refers to the set of files released by the Copyright\n\nHolder(s) under this license and clearly marked as such. This may\n\ninclude source files, build scripts and documentation.\n\n\n\n\"Reserved Font Name\" refers to any names specified as such after the\n\ncopyright statement(s).\n\n\n\n\"Original Version\" refers to the collection of Font Software components as\n\ndistributed by the Copyright Holder(s).\n\n\n\n\"Modified Version\" refers to any derivative made by adding to, deleting,\n\nor substituting -- in part or in whole -- any of the components of the\n\nOriginal Version, by changing formats or by porting the Font Software to a\n", "file_path": "doc/SourceSerif4-LICENSE.md", "rank": 42, "score": 1.257126558941279 } ]
Rust
src/routes/balances/handlers_v2.rs
tharsis/safe-client-gateway
98206c5bfcd89f10c3429edccdc108017b179d49
use std::cmp::Ordering; use std::str::FromStr; use bigdecimal::BigDecimal; use rocket::futures::{stream, StreamExt}; use crate::cache::cache_operations::RequestCached; use crate::common::models::backend::balances_v2::Balance as BalanceDto; use crate::common::models::backend::balances_v2::TokenPrice as BackendTokenPrice; use crate::common::models::backend::chains::NativeCurrency; use crate::config::{ balances_core_request_cache_duration, balances_request_timeout, concurrent_balance_token_requests, token_price_cache_duration, }; use crate::providers::fiat::FiatInfoProvider; use crate::providers::info::{DefaultInfoProvider, InfoProvider}; use crate::routes::balances::models::{Balance, Balances, TokenPrice}; use crate::utils::context::RequestContext; use crate::utils::errors::ApiResult; pub async fn balances( context: &RequestContext, chain_id: &str, safe_address: &str, fiat: &str, trusted: bool, exclude_spam: bool, ) -> ApiResult<Balances> { let info_provider = DefaultInfoProvider::new(chain_id, context); let fiat_info_provider = FiatInfoProvider::new(context); let url = core_uri!( info_provider, "/v1/safes/{}/balances/?trusted={}&exclude_spam={}", safe_address, trusted, exclude_spam )?; let body = RequestCached::new_from_context(url, context) .cache_duration(balances_core_request_cache_duration()) .request_timeout(balances_request_timeout()) .execute() .await?; let backend_balances: Vec<BalanceDto> = serde_json::from_str(&body)?; let usd_to_fiat = fiat_info_provider .exchange_usd_to(fiat) .await .unwrap_or(BigDecimal::from(0)); let native_currency: NativeCurrency = info_provider.chain_info().await?.native_currency; let mut total_fiat = 0.0; let token_prices: Vec<TokenPrice> = get_token_prices(context, &info_provider, &backend_balances).await; let mut service_balances: Vec<Balance> = backend_balances .iter() .map(|it| { let token_address: String = it .token_address .to_owned() .unwrap_or("0x0000000000000000000000000000000000000000".to_string()); let token_price: Option<&TokenPrice> = token_prices .iter() .find(|&token_price| token_price.address == token_address); let token_to_usd: BigDecimal = token_price .and_then(|t| Some(t.fiat_price.to_owned())) .unwrap_or(BigDecimal::from(0)); let balance = it.to_balance_v2(&token_to_usd, &usd_to_fiat, &native_currency); total_fiat += balance.fiat_balance.parse::<f64>().unwrap_or(0.0); balance }) .collect::<Vec<Balance>>(); service_balances.sort_by(|b1, b2| { BigDecimal::from_str(&b2.fiat_balance) .unwrap() .partial_cmp(&BigDecimal::from_str(&b1.fiat_balance).unwrap()) .unwrap_or(Ordering::Equal) }); Ok(Balances { fiat_total: total_fiat.to_string(), items: service_balances, }) } async fn get_token_prices( context: &RequestContext, info_provider: &impl InfoProvider, backend_balances: &Vec<BalanceDto>, ) -> Vec<TokenPrice> { let token_addresses: Vec<String> = backend_balances .iter() .map(|balance| { balance .token_address .to_owned() .unwrap_or("0x0000000000000000000000000000000000000000".to_string()) }) .collect(); return stream::iter(token_addresses) .map(|token_address| get_token_usd_rate(context, token_address, info_provider)) .buffer_unordered(concurrent_balance_token_requests()) .filter_map(|t| async move { match t { Ok(token_price) => Some(token_price), Err(_) => None, } }) .collect() .await; } async fn get_token_usd_rate( context: &RequestContext, token_address: String, info_provider: &impl InfoProvider, ) -> ApiResult<TokenPrice> { let url = core_uri!(info_provider, "/v1/tokens/{}/prices/usd/", token_address)?; let body = RequestCached::new_from_context(url, context) .cache_duration(token_price_cache_duration()) .execute() .await?; let response: BackendTokenPrice = serde_json::from_str(&body)?; return Ok(TokenPrice { address: token_address.to_string(), fiat_code: response.fiat_code, fiat_price: response.fiat_price, timestamp: response.timestamp, }); }
use std::cmp::Ordering; use std::str::FromStr; use bigdecimal::BigDecimal; use rocket::futures::{stream, StreamExt}; use crate::cache::cache_operations::RequestCached; use crate::common::models::backend::balances_v2::Balance as BalanceDto; use crate::common::models::backend::balances_v2::TokenPrice as BackendTokenPrice; use crate::common::models::backend::chains::NativeCurrency; use crate::config::{ balances_core_request_cache_duration, balances_request_timeout, concurrent_balance_token_requests, token_price_cache_duration, }; use crate::providers::fiat::FiatInfoProvider; use crate::providers::info::{DefaultInfoProvider, InfoProvider}; use crate::routes::balances::models::{Balance, Balances, TokenPrice}; use crate::utils::context::RequestContext; use crate::utils::errors::ApiResult; pub async fn balances( context: &RequestContext, chain_id: &str, safe_address: &str, fiat: &str, trusted: bool, exclude_spam: bool, ) -> ApiResult<Balances> { let info_provider = DefaultInfoProvider::new(chain_id, context); let fiat_info_provider = FiatInfoProvider::new(context); let url = core_uri!( info_provider, "/v1/safes/{}/balances/?trusted={}&exclude_spam={}", safe_address, trusted, exclude_spam )?; let body = RequestCached::new_from_context(url, context) .cache_duration(balances_core_request_cache_duration()) .request_timeout(balances_request_timeout()) .execute() .await?; let backend_balances: Vec<BalanceDto> = serde_json::from_str(&body)?; let usd_to_fiat = fiat_info_provider .exchange_usd_to(fiat) .await .unwrap_or(BigDecimal::from(0)); let native_currency: NativeCurrency = info_provider.chain_info().await?.native_currency; let mut total_fiat = 0.0; let token_prices: Vec<TokenPrice> = get_token_prices(context, &info_provider, &backend_balances).await; let mut service_balances: Vec<Balance> = backend_balances .iter() .map(|it| { let token_address: String = it .token_address .to_owned() .unwrap_or("0x0000000000000000000000000000000000000000".to_string()); let token_price: Option<&TokenPrice> = token_prices .iter() .find(|&token_price| token_price.address == token_address); let token_to_usd: BigDecimal = token_price .and_then(|t| Some(t.fiat_price.to_owned())) .unwrap_or(BigDecimal::from(0)); let balance = it.to_balance_v2(&token_to_usd, &usd_to_fiat, &native_currency); total_fiat += balance.fiat_balance.parse::<f64>().unwrap_or(0.0); balance }) .collect::<Vec<Balance>>(); service_balances.sort_by(|b1, b2| { BigDecimal::from_str(&b2.fiat_balance) .unwrap() .partial_cmp(&BigDecimal::from_str(&b1.fiat_balance).unwrap()) .unwrap_or(Ordering::Equal) }); Ok(Balances { fiat_total: total_fiat.to_string(), items: service_balances, }) } async fn get_token_prices( context: &RequestContext, info_provider: &impl InfoProvider, backend_balances: &Vec<BalanceDto>, ) -> Vec<TokenPrice> { let token_addresses: Vec<String> = backend_balances .iter() .map(|balance| { balance .token_address .to_owned() .unwrap_or("0x0000000000000000000000000000000000000000".to_string()) }) .collect(); return stream::iter(token_addresses) .map(|token_address| get_token_usd_rate(context, token_address, info_provider)) .buffer_unordered(concurrent_balance_token_requests()) .filter_map(|t| async move { match t { Ok(token_price) => Some(token_price), Err(_) => None, } }) .collect() .await; } async fn get_token_usd_rate( context: &RequestContext, token_address: String, info_provider: &impl InfoProvider, ) -> ApiResult<TokenPrice> { let url = core_uri!(info_provider, "/v1/tokens/{}/prices/usd/", token_address)?; let body = RequestCached::new_from_context(url, context) .cache_duration(token_price_cache_duration()) .execute() .await?; let response: BackendTokenPrice = serde_json::from_str(&body)?; return
; }
Ok(TokenPrice { address: token_address.to_string(), fiat_code: response.fiat_code, fiat_price: response.fiat_price, timestamp: response.timestamp, })
call_expression
[ { "content": "pub fn build_manifest_url(url: &str) -> ApiResult<String> {\n\n let mut url_parts = Url::parse(url).or(Err(api_error!(\"Not a valid Url\")))?;\n\n\n\n if !url_parts.scheme().starts_with(\"http\") {\n\n Err(api_error!(\"Invalid scheme\"))\n\n } else if url_parts.host_str().is_none() {\n\n Err(api_error!(\"Invalid host\"))\n\n } else if url_parts.host_str() == Some(\"localhost\") {\n\n Err(api_error!(\"Localhost not accepted\"))\n\n } else if IP_ADDRESS.captures(url_parts.host_str().unwrap()).is_some() {\n\n Err(api_error!(\"IP address not accepted\"))\n\n } else {\n\n url_parts\n\n .path_segments_mut()\n\n .unwrap()\n\n .pop_if_empty()\n\n .push(\"manifest.json\");\n\n url_parts.set_query(None);\n\n Ok(url_parts.to_string())\n\n }\n\n}\n", "file_path": "src/utils/urls.rs", "rank": 0, "score": 335142.9728092004 }, { "content": "#[doc(hidden)]\n\n#[get(\"/about/redis/<token>\")]\n\npub fn redis(context: RequestContext, token: String) -> ApiResult<String> {\n\n if token != webhook_token() {\n\n bail!(\"Invalid token\");\n\n }\n\n Ok(context.cache().info().unwrap_or(String::new()))\n\n}\n", "file_path": "src/routes/about/routes.rs", "rank": 1, "score": 292982.4643488075 }, { "content": "pub fn generate_token_key(chain_id: &str) -> String {\n\n format!(\"{}_{}\", TOKENS_KEY_BASE, chain_id)\n\n}\n", "file_path": "src/providers/info.rs", "rank": 2, "score": 287433.75641370355 }, { "content": "#[post(\"/v1/hook/update/<token>\", format = \"json\", data = \"<update>\")]\n\npub fn update(context: RequestContext, token: String, update: Json<Payload>) -> ApiResult<()> {\n\n if token != webhook_token() {\n\n bail!(\"Invalid token\");\n\n }\n\n invalidate_caches(context.cache(), &update)\n\n}\n\n\n\n#[post(\n\n \"/v1/chains/<chain_id>/hook/update/<token>\",\n\n format = \"json\",\n\n data = \"<payload>\"\n\n)]\n", "file_path": "src/routes/hooks/routes.rs", "rank": 3, "score": 255077.83391566123 }, { "content": "pub fn feature_flag_balances_rate_implementation() -> bool {\n\n env_with_default(\"FEATURE_FLAG_BALANCES_RATE_IMPLEMENTATION\", false)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 4, "score": 251431.92596238962 }, { "content": "fn pipeline_delete(con: &mut redis::Connection, keys: Iter<String>) {\n\n let pipeline = &mut pipe();\n\n for key in keys {\n\n pipeline.del(key);\n\n }\n\n pipeline.execute(con);\n\n}\n\n\n", "file_path": "src/cache/redis.rs", "rank": 5, "score": 244716.01182630134 }, { "content": "pub fn scheme() -> String {\n\n env_with_default(\"SCHEME\", \"https\".into())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 6, "score": 216613.184475829 }, { "content": "pub fn version() -> String {\n\n option_env!(\"VERSION\")\n\n .unwrap_or(env!(\"CARGO_PKG_VERSION\"))\n\n .to_string()\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 7, "score": 216613.184475829 }, { "content": "pub fn redis_uri() -> String {\n\n env::var(\"REDIS_URI\").expect(\"REDIS_URI missing in env\")\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 8, "score": 214135.25122056578 }, { "content": "pub fn webhook_token() -> String {\n\n env::var(\"WEBHOOK_TOKEN\").expect(\"WEBHOOK_TOKEN missing in env\")\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 9, "score": 214135.25122056578 }, { "content": "pub fn log_all_error_responses() -> bool {\n\n env_with_default(\"LOG_ALL_ERROR_RESPONSES\", false)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 10, "score": 212158.87896581469 }, { "content": "pub fn balances_request_timeout() -> u64 {\n\n env_with_default(\"BALANCES_REQUEST_TIMEOUT\", 20000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 11, "score": 212025.9007481099 }, { "content": "pub fn feature_flag_nested_decoding() -> bool {\n\n env_with_default(\"FEATURE_FLAG_NESTED_DECODING\", true)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 12, "score": 209846.91428974617 }, { "content": "pub fn vpc_transaction_service_uri() -> bool {\n\n env_with_default(\"VPC_TRANSACTION_SERVICE_URI\", true)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 13, "score": 209846.91428974614 }, { "content": "pub fn base_config_service_uri() -> String {\n\n format!(\n\n \"{}{}\",\n\n env::var(\"CONFIG_SERVICE_URI\").expect(\"CONFIG_SERVICE_URI missing in env\"),\n\n \"/api\"\n\n )\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 14, "score": 209434.11592504708 }, { "content": "pub fn base_exchange_api_uri() -> String {\n\n format!(\n\n \"{}?access_key={}\",\n\n env::var(\"EXCHANGE_API_BASE_URI\").unwrap(),\n\n env::var(\"EXCHANGE_API_KEY\").unwrap()\n\n )\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 15, "score": 209434.11592504708 }, { "content": "pub fn transaction_service_auth_token() -> String {\n\n let token = env::var(\"TRANSACTION_SERVICE_AUTH_TOKEN\").unwrap_or_else(|_| {\n\n log::warn!(\"TRANSACTION_SERVICE_AUTH_TOKEN missing in env\");\n\n String::new()\n\n });\n\n format!(\"Token {}\", token)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 16, "score": 209434.11592504708 }, { "content": "pub fn build_number() -> Option<String> {\n\n option_env!(\"BUILD_NUMBER\").map(|it| it.to_string())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 17, "score": 205796.43790810107 }, { "content": "#[cfg(not(test))]\n\npub fn setup_http_client() -> impl HttpClient {\n\n reqwest::Client::builder()\n\n .connect_timeout(Duration::from_millis(internal_client_connect_timeout()))\n\n .build()\n\n .unwrap()\n\n}\n", "file_path": "src/utils/http_client.rs", "rank": 18, "score": 199546.3659685058 }, { "content": "pub fn get_transaction_service_host(chain_info: ChainInfo) -> String {\n\n if vpc_transaction_service_uri() {\n\n chain_info.vpc_transaction_service\n\n } else {\n\n chain_info.transaction_service\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! core_uri {\n\n ($info_provider:tt, $path:expr) => {{\n\n let result: ApiResult<String> =\n\n match $info_provider.chain_info().await {\n\n Ok(chain_info) => Ok(format!(\"{}/api{}\", crate::macros::get_transaction_service_host(chain_info), $path)),\n\n Err(error) => Err(error,)\n\n };\n\n result\n\n }};\n\n ($info_provider:tt, $path:literal, $($arg:tt)*) => {{\n", "file_path": "src/macros.rs", "rank": 19, "score": 191915.85850802768 }, { "content": "fn info(con: &mut redis::Connection) -> Option<String> {\n\n redis::cmd(\"INFO\").query(con).ok()\n\n}\n", "file_path": "src/cache/redis.rs", "rank": 20, "score": 189157.73713990595 }, { "content": "pub fn hex_hash<T: Hash>(t: &T) -> String {\n\n let mut s = DefaultHasher::new();\n\n t.hash(&mut s);\n\n format!(\"{:#x}\", s.finish())\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 21, "score": 187891.21354540318 }, { "content": "pub fn collectibles_request_timeout() -> u64 {\n\n env_with_default(\"COLLECTIBLES_REQUEST_TIMEOUT\", 20000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 22, "score": 182825.37826657516 }, { "content": "pub fn balances_cache_duration() -> usize {\n\n env_with_default(\"BALANCES_REQUEST_CACHE_DURATION\", 60 * 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 23, "score": 182695.6874337159 }, { "content": "pub fn concurrent_balance_token_requests() -> usize {\n\n env_with_default(\"CONCURRENT_BALANCE_TOKEN_REQUESTS\", 5)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 24, "score": 180387.21129420598 }, { "content": "fn get_to_param(data_decoded: &Option<DataDecoded>, fallback: &str) -> String {\n\n data_decoded\n\n .as_ref()\n\n .and_then(|it| match it.get_parameter_single_value(\"to\") {\n\n Some(e) => Some(e),\n\n None => it.get_parameter_single_value(\"_to\"),\n\n })\n\n .unwrap_or(String::from(fallback))\n\n}\n\n\n", "file_path": "src/routes/transactions/converters/mod.rs", "rank": 25, "score": 178927.16856731442 }, { "content": "fn get_from_param(data_decoded: &Option<DataDecoded>, fallback: &str) -> String {\n\n data_decoded\n\n .as_ref()\n\n .and_then(|it| match it.get_parameter_single_value(\"from\") {\n\n Some(e) => Some(e),\n\n None => it.get_parameter_single_value(\"_from\"),\n\n })\n\n .unwrap_or(String::from(fallback))\n\n}\n\n\n", "file_path": "src/routes/transactions/converters/mod.rs", "rank": 26, "score": 178927.16856731442 }, { "content": "pub fn balances_core_request_cache_duration() -> usize {\n\n env_with_default(\"BALANCES_CORE_REQUEST_CACHE_DURATION\", indefinite_timeout())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 27, "score": 178156.50635831384 }, { "content": "fn check_sender_or_receiver(data_decoded: &Option<DataDecoded>, expected: &str) -> bool {\n\n if data_decoded.is_none() {\n\n return false;\n\n };\n\n let data = data_decoded.as_ref().unwrap();\n\n data.method == TRANSFER_METHOD\n\n || &get_from_param(data_decoded, \"\") == expected\n\n || &get_to_param(data_decoded, \"\") == expected\n\n}\n", "file_path": "src/routes/transactions/converters/mod.rs", "rank": 28, "score": 177413.72494762207 }, { "content": "#[test]\n\nfn erc20_token_balance_fiat_is_twice_usd() {\n\n std::env::set_var(\"FEATURE_FLAG_BALANCES_RATE_IMPLEMENTATION\", \"false\");\n\n std::env::set_var(\"VPC_TRANSACTION_SERVICE_URI\", \"false\");\n\n let balance_dto = serde_json::from_str::<BalanceDto>(BALANCE_COMPOUND_ETHER).unwrap();\n\n\n\n let expected = Balance {\n\n token_info: TokenInfo {\n\n token_type: TokenType::Erc20,\n\n address: \"0xd6801a1DfFCd0a410336Ef88DeF4320D6DF1883e\".to_string(),\n\n decimals: 8,\n\n symbol: \"cETH\".to_string(),\n\n name: \"Compound Ether 📈\".to_string(),\n\n logo_uri: Some(\"https://gnosis-safe-token-logos.s3.amazonaws.com/0xd6801a1DfFCd0a410336Ef88DeF4320D6DF1883e.png\".to_string()),\n\n },\n\n balance: \"5002\".to_string(),\n\n fiat_balance: \"0.0028\".to_string(),\n\n fiat_conversion: \"57.0924\".to_string(),\n\n };\n\n\n\n let usd_to_fiat = 2.0;\n", "file_path": "src/common/converters/tests/balances.rs", "rank": 29, "score": 162854.03565581908 }, { "content": "#[test]\n\nfn erc20_token_balance_fiat_is_twice_usd() {\n\n std::env::set_var(\"FEATURE_FLAG_BALANCES_RATE_IMPLEMENTATION\", \"true\");\n\n let balance_dto = serde_json::from_str::<BalanceDto>(BALANCE_COMPOUND_ETHER).unwrap();\n\n\n\n let expected = Balance {\n\n token_info: TokenInfo {\n\n token_type: TokenType::Erc20,\n\n address: \"0xd6801a1DfFCd0a410336Ef88DeF4320D6DF1883e\".to_string(),\n\n decimals: 8,\n\n symbol: \"cETH\".to_string(),\n\n name: \"Compound Ether 📈\".to_string(),\n\n logo_uri: Some(\"https://gnosis-safe-token-logos.s3.amazonaws.com/0xd6801a1DfFCd0a410336Ef88DeF4320D6DF1883e.png\".to_string()),\n\n },\n\n balance: \"5002\".to_string(),\n\n fiat_balance: \"0.00285\".to_string(),\n\n fiat_conversion: \"57.09240\".to_string(),\n\n };\n\n\n\n let token_to_usd = BigDecimal::from_str(\"28.5462\").unwrap();\n\n let usd_to_fiat = BigDecimal::from_str(\"2.0\").unwrap();\n", "file_path": "src/common/converters/tests/balances_v2.rs", "rank": 30, "score": 160916.16750798252 }, { "content": "#[test]\n\nfn invalidation_pattern_collectibles_string() {\n\n let invalidation_pattern =\n\n InvalidationPattern::Collectibles(InvalidationScope::Both, \"some_address\".to_string());\n\n let expected = format!(\"{}*/some_address/collectibles*\", CACHE_REQS_RESP_PREFIX);\n\n\n\n let actual = invalidation_pattern.to_pattern_string();\n\n\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "src/cache/tests/cache_operations.rs", "rank": 31, "score": 156646.3069882547 }, { "content": "#[test]\n\nfn invalidation_pattern_balances_string() {\n\n let invalidation_pattern =\n\n InvalidationPattern::Balances(InvalidationScope::Both, \"some_address\".to_string());\n\n let expected = format!(\"{}*/some_address/balances*\", CACHE_REQS_RESP_PREFIX);\n\n\n\n let actual = invalidation_pattern.to_pattern_string();\n\n\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "src/cache/tests/cache_operations.rs", "rank": 32, "score": 156518.83818884206 }, { "content": "#[test]\n\nfn map_status_awaiting_execution() {\n\n let tx = serde_json::from_str::<MultisigTransaction>(\n\n crate::tests::json::MULTISIG_TX_AWAITING_EXECUTION,\n\n )\n\n .unwrap();\n\n let safe_info =\n\n serde_json::from_str::<SafeInfo>(crate::tests::json::SAFE_WITH_MODULES).unwrap();\n\n let actual = tx.map_status(&safe_info);\n\n\n\n assert_eq!(TransactionStatus::AwaitingExecution, actual);\n\n}\n\n\n", "file_path": "src/routes/transactions/converters/tests/map_status.rs", "rank": 33, "score": 152441.0607565546 }, { "content": "#[test]\n\nfn map_status_awaiting_confirmations_required_field_none() {\n\n let tx = serde_json::from_str::<MultisigTransaction>(\n\n crate::tests::json::MULTISIG_TX_AWAITING_CONFIRMATIONS_REQUIRED_NULL,\n\n )\n\n .unwrap();\n\n let safe_info =\n\n serde_json::from_str::<SafeInfo>(crate::tests::json::SAFE_WITH_THRESHOLD_TWO).unwrap();\n\n let actual = tx.map_status(&safe_info);\n\n\n\n assert_eq!(TransactionStatus::AwaitingConfirmations, actual);\n\n}\n\n\n", "file_path": "src/routes/transactions/converters/tests/map_status.rs", "rank": 34, "score": 146364.95727285664 }, { "content": "#[post(\"/v1/flush/<token>\", format = \"json\", data = \"<invalidation_pattern>\")]\n\npub fn flush(\n\n context: RequestContext,\n\n token: String,\n\n invalidation_pattern: Json<InvalidationPattern>,\n\n) -> ApiResult<()> {\n\n if token != webhook_token() {\n\n bail!(\"Invalid token\");\n\n }\n\n Invalidate::new(invalidation_pattern.0, context.cache()).execute();\n\n Ok(())\n\n}\n", "file_path": "src/routes/hooks/routes.rs", "rank": 35, "score": 144962.83697061517 }, { "content": "#[cfg(test)]\n\npub fn setup_rocket(\n\n mock_http_client: MockHttpClient,\n\n routes: impl Into<Vec<Route>>,\n\n) -> Rocket<Build> {\n\n dotenv().ok();\n\n let cache = create_service_cache();\n\n cache.invalidate_pattern(\"*\"); // Clearing cache for test\n\n\n\n rocket::build()\n\n .mount(\"/\", routes)\n\n .manage(Arc::new(mock_http_client) as Arc<dyn HttpClient>)\n\n .manage(Arc::new(cache) as Arc<dyn Cache>)\n\n}\n\n\n", "file_path": "src/tests/main.rs", "rank": 36, "score": 144962.83697061517 }, { "content": "pub fn about() -> About {\n\n About {\n\n name: env!(\"CARGO_PKG_NAME\").to_string(),\n\n version: version(),\n\n build_number: build_number(),\n\n }\n\n}\n\n\n\npub async fn get_master_copies(\n\n context: &RequestContext,\n\n chain_id: &str,\n\n) -> ApiResult<Vec<Implementation>> {\n\n let info_provider = DefaultInfoProvider::new(chain_id, &context);\n\n Ok(info_provider\n\n .master_copies()\n\n .await?\n\n .into_iter()\n\n .map(|master_copy| master_copy.into())\n\n .collect())\n\n}\n", "file_path": "src/routes/about/handlers.rs", "rank": 37, "score": 144049.01050908747 }, { "content": "#[cfg(test)]\n\npub fn setup_rocket_with_mock_cache(\n\n mock_http_client: MockHttpClient,\n\n mock_cache: MockCache,\n\n routes: impl Into<Vec<Route>>,\n\n) -> Rocket<Build> {\n\n dotenv().ok();\n\n\n\n rocket::build()\n\n .mount(\"/\", routes)\n\n .manage(Arc::new(mock_http_client) as Arc<dyn HttpClient>)\n\n .manage(Arc::new(mock_cache) as Arc<dyn Cache>)\n\n}\n\n\n", "file_path": "src/tests/main.rs", "rank": 38, "score": 141733.7935884893 }, { "content": "pub fn build_backend_request(\n\n device_data: &DeviceData,\n\n safe_registration: &SafeRegistration,\n\n) -> BackendRegistrationRequest {\n\n BackendRegistrationRequest {\n\n notification_device_data: device_data.clone(),\n\n safes: safe_registration.safes.to_owned(),\n\n signatures: safe_registration.signatures.to_owned(),\n\n }\n\n}\n", "file_path": "src/routes/notifications/handlers.rs", "rank": 39, "score": 141733.7935884893 }, { "content": "pub fn post_hook_update(\n\n context: RequestContext,\n\n chain_id: String,\n\n token: String,\n\n payload: Json<Payload>,\n\n) -> ApiResult<()> {\n\n update(context, token, payload)\n\n}\n\n\n", "file_path": "src/routes/hooks/routes.rs", "rank": 40, "score": 141733.7935884893 }, { "content": "fn map_headers(headers_input: &HashMap<String, String>) -> HeaderMap {\n\n let mut headers = HeaderMap::new();\n\n for (name, value) in headers_input {\n\n headers.insert(\n\n HeaderName::from_bytes(name.as_bytes())\n\n .expect(&format!(\"Header name '{}' is not supported\", &name)),\n\n HeaderValue::from_str(value).expect(&format!(\"Invalid header value for '{}'\", &name)),\n\n );\n\n }\n\n headers\n\n}\n\n\n", "file_path": "src/utils/http_client.rs", "rank": 41, "score": 140974.91160833792 }, { "content": "#[doc(hidden)]\n\n#[get(\"/\")]\n\npub fn root() -> Redirect {\n\n Redirect::temporary(\"https://gnosis.github.io/safe-client-gateway/\")\n\n}\n", "file_path": "src/routes/mod.rs", "rank": 42, "score": 140577.93635887103 }, { "content": "#[test]\n\npub fn main_produces_valid_rocket_instance() {\n\n crate::rocket();\n\n}\n", "file_path": "src/tests/main.rs", "rank": 43, "score": 140202.25820691377 }, { "content": "pub fn about_cache_duration() -> usize {\n\n env_with_default(\"ABOUT_CACHE_DURATION\", 60 * 15 * 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 44, "score": 138934.7710443971 }, { "content": "pub fn log_threshold() -> f32 {\n\n env_with_default(\"LOG_THRESHOLD\", 1.0)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 45, "score": 138934.7710443971 }, { "content": "pub fn transaction_request_timeout() -> u64 {\n\n env_with_default(\"TRANSACTION_REQUEST_TIMEOUT\", 30000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 46, "score": 137348.89297674515 }, { "content": "pub fn default_request_timeout() -> u64 {\n\n env_with_default(\"DEFAULT_REQUEST_TIMEOUT\", 10000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 47, "score": 137348.89297674515 }, { "content": "// OTHERS\n\npub fn redis_scan_count() -> usize {\n\n env_with_default(\"REDIS_SCAN_COUNT\", 300)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 48, "score": 137348.89297674515 }, { "content": "pub fn request_cache_duration() -> usize {\n\n env_with_default(\"REQUEST_CACHE_DURATION\", indefinite_timeout())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 49, "score": 137348.89297674515 }, { "content": "pub fn short_error_duration() -> usize {\n\n env_with_default(\"SHORT_ERROR_DURATION\", 60 * 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 50, "score": 137348.89297674515 }, { "content": "pub fn long_error_duration() -> usize {\n\n env_with_default(\"LONG_ERROR_DURATION\", 60 * 15 * 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 51, "score": 137348.89297674515 }, { "content": "pub fn token_price_cache_duration() -> usize {\n\n env_with_default(\"TOKEN_PRICE_CACHE_DURATION\", 10 * 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 52, "score": 135817.35759516963 }, { "content": "pub fn contract_info_request_timeout() -> u64 {\n\n env_with_default(\"CONTRACT_INFO_REQUEST_TIMEOUT\", 3000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 53, "score": 135817.35759516963 }, { "content": "// ERRORS\n\npub fn request_error_cache_duration() -> usize {\n\n env_with_default(\"REQS_ERROR_CACHE_DURATION\", short_error_duration())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 54, "score": 135817.35759516963 }, { "content": "pub fn tx_queued_cache_duration() -> usize {\n\n env_with_default(\"TX_QUEUED_CACHE_DURATION\", request_cache_duration())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 55, "score": 135817.35759516963 }, { "content": "pub fn safe_apps_cache_duration() -> usize {\n\n env_with_default(\"SAFE_APPS_CACHE_DURATION\", indefinite_timeout())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 56, "score": 135817.35759516963 }, { "content": "// FUNCTIONAL TIMEOUTS\n\npub fn safe_info_cache_duration() -> usize {\n\n env_with_default(\"SAFE_INFO_CACHE_DURATION\", indefinite_timeout())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 57, "score": 135817.35759516963 }, { "content": "pub fn safe_info_request_timeout() -> u64 {\n\n env_with_default(\"SAFE_INFO_REQUEST_TIMEOUT\", 10000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 58, "score": 135817.35759516963 }, { "content": "pub fn chain_info_request_timeout() -> u64 {\n\n env_with_default(\"CHAIN_INFO_REQUEST_TIMEOUT\", 15000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 59, "score": 135817.35759516963 }, { "content": "pub fn token_info_request_timeout() -> u64 {\n\n env_with_default(\"TOKEN_INFO_REQUEST_TIMEOUT\", 15000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 60, "score": 135817.35759516963 }, { "content": "pub fn token_info_cache_duration() -> usize {\n\n env_with_default(\"TOKEN_INFO_CACHE_DURATION\", 60 * 60 * 24 * 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 61, "score": 135817.35759516963 }, { "content": "pub fn create_service_cache() -> ServiceCache {\n\n ServiceCache(create_pool())\n\n}\n\n\n", "file_path": "src/cache/redis.rs", "rank": 62, "score": 135817.35759516963 }, { "content": "// REQUEST TIMEOUTS\n\npub fn internal_client_connect_timeout() -> u64 {\n\n env_with_default(\"INTERNAL_CLIENT_CONNECT_TIMEOUT\", 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 63, "score": 135817.35759516963 }, { "content": "pub fn address_info_cache_duration() -> usize {\n\n env_with_default(\"ADDRESS_INFO_CACHE_DURATION\", indefinite_timeout())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 64, "score": 135817.35759516963 }, { "content": "pub fn chain_info_cache_duration() -> usize {\n\n env_with_default(\"CHAIN_INFO_CACHE_DURATION\", indefinite_timeout())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 65, "score": 135817.35759516963 }, { "content": "pub fn owners_for_safes_cache_duration() -> usize {\n\n env_with_default(\"OWNERS_FOR_SAFES_CACHE_DURATION\", 60 * 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 66, "score": 135817.35759516963 }, { "content": "pub fn exchange_api_cache_duration() -> usize {\n\n env_with_default(\"EXCHANGE_API_CACHE_DURATION\", 60 * 60 * 12 * 1000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 67, "score": 135817.35759516963 }, { "content": "pub fn safe_app_manifest_cache_duration() -> usize {\n\n env_with_default(\"SAFE_APP_MANIFEST_CACHE_DURATION\", indefinite_timeout())\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 68, "score": 134337.41873996478 }, { "content": "pub fn safe_app_info_request_timeout() -> u64 {\n\n env_with_default(\"SAFE_APP_INFO_REQUEST_TIMEOUT\", 3000)\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 69, "score": 134337.41873996478 }, { "content": "pub fn chain_info_response_cache_duration() -> usize {\n\n env_with_default(\"CHAIN_INFO_RESPONSE_CACHE_DURATION\", 1) // set to negligible value\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 70, "score": 134337.41873996478 }, { "content": "#[doc(hidden)]\n\npub fn error_catchers() -> Vec<Catcher> {\n\n catchers![not_found, panic]\n\n}\n\n\n", "file_path": "src/routes/mod.rs", "rank": 71, "score": 133641.85492934467 }, { "content": "#[doc(hidden)]\n\npub fn active_routes() -> Vec<Route> {\n\n routes![\n\n root,\n\n about::routes::backbone,\n\n about::routes::get_about,\n\n about::routes::get_chains_about,\n\n about::routes::redis,\n\n about::routes::get_master_copies,\n\n balances::routes::get_balances,\n\n balances::routes::get_supported_fiat,\n\n chains::routes::get_chain,\n\n chains::routes::get_chains,\n\n collectibles::routes::get_collectibles,\n\n contracts::routes::post_data_decoder,\n\n contracts::routes::get_contract,\n\n delegates::routes::delete_delegate,\n\n delegates::routes::delete_safe_delegate,\n\n delegates::routes::get_delegates,\n\n delegates::routes::post_delegate,\n\n notifications::routes::post_notification_registration,\n", "file_path": "src/routes/mod.rs", "rank": 72, "score": 133641.85492934467 }, { "content": "fn env_with_default<T: FromStr>(key: &str, default: T) -> T\n\nwhere\n\n <T as FromStr>::Err: std::fmt::Debug,\n\n{\n\n match env::var(key) {\n\n Ok(value) => value\n\n .parse()\n\n .expect(&format!(\"Parsing of {} env var key failed\", &key)),\n\n Err(_) => default,\n\n }\n\n}\n", "file_path": "src/config/mod.rs", "rank": 73, "score": 129332.85236563745 }, { "content": "fn data_size(data: &Option<String>) -> usize {\n\n match data {\n\n Some(actual_data) => {\n\n let length = actual_data.len();\n\n match length {\n\n 0 => 0,\n\n _ => (length - 2) / 2,\n\n }\n\n }\n\n None => 0,\n\n }\n\n}\n\n\n", "file_path": "src/routes/transactions/converters/mod.rs", "rank": 74, "score": 126993.34305144924 }, { "content": "fn get_multisig_tx(source: &str) -> MultisigTransaction {\n\n serde_json::from_str::<MultisigTransaction>(source).unwrap()\n\n}\n", "file_path": "src/routes/transactions/handlers/tests/transactions_queued.rs", "rank": 75, "score": 126540.32298691606 }, { "content": "#[test]\n\nfn erc20_token_balance_usd_balance() {\n\n std::env::set_var(\"FEATURE_FLAG_BALANCES_RATE_IMPLEMENTATION\", \"false\");\n\n std::env::set_var(\"VPC_TRANSACTION_SERVICE_URI\", \"false\");\n\n let balance_dto = serde_json::from_str::<BalanceDto>(BALANCE_COMPOUND_ETHER).unwrap();\n\n\n\n let expected = Balance {\n\n token_info: TokenInfo {\n\n token_type: TokenType::Erc20,\n\n address: \"0xd6801a1DfFCd0a410336Ef88DeF4320D6DF1883e\".to_string(),\n\n decimals: 8,\n\n symbol: \"cETH\".to_string(),\n\n name: \"Compound Ether 📈\".to_string(),\n\n logo_uri: Some(\"https://gnosis-safe-token-logos.s3.amazonaws.com/0xd6801a1DfFCd0a410336Ef88DeF4320D6DF1883e.png\".to_string()),\n\n },\n\n balance: \"5002\".to_string(),\n\n fiat_balance: \"0.0014\".to_string(),\n\n fiat_conversion: \"28.5462\".to_string(),\n\n };\n\n\n\n let usd_to_fiat = 1.0;\n", "file_path": "src/common/converters/tests/balances.rs", "rank": 76, "score": 125600.04909289813 }, { "content": "#[test]\n\nfn erc20_token_balance_usd_balance() {\n\n std::env::set_var(\"FEATURE_FLAG_BALANCES_RATE_IMPLEMENTATION\", \"true\");\n\n let balance_dto = serde_json::from_str::<BalanceDto>(BALANCE_COMPOUND_ETHER).unwrap();\n\n\n\n let expected = Balance {\n\n token_info: TokenInfo {\n\n token_type: TokenType::Erc20,\n\n address: \"0xd6801a1DfFCd0a410336Ef88DeF4320D6DF1883e\".to_string(),\n\n decimals: 8,\n\n symbol: \"cETH\".to_string(),\n\n name: \"Compound Ether 📈\".to_string(),\n\n logo_uri: Some(\"https://gnosis-safe-token-logos.s3.amazonaws.com/0xd6801a1DfFCd0a410336Ef88DeF4320D6DF1883e.png\".to_string()),\n\n },\n\n balance: \"5002\".to_string(),\n\n fiat_balance: \"0.00142\".to_string(),\n\n fiat_conversion: \"28.54620\".to_string(),\n\n };\n\n\n\n let token_to_usd = BigDecimal::from_str(\"28.5462\").unwrap();\n\n let usd_to_fiat = BigDecimal::from_str(\"1.0\").unwrap();\n", "file_path": "src/common/converters/tests/balances_v2.rs", "rank": 77, "score": 124450.22021187429 }, { "content": "#[test]\n\nfn valid_url_with_query_params() {\n\n let input_url = \"https://cloudflare-ipfs.com/ipfs/QmQs6CUbMUyKe3Sa3tU3HcnWWzsuCk8oJEk8CZKhRcJfEh/?foo=bar&bla=blubb\";\n\n\n\n let actual = build_manifest_url(input_url).unwrap();\n\n assert_eq!(actual, \"https://cloudflare-ipfs.com/ipfs/QmQs6CUbMUyKe3Sa3tU3HcnWWzsuCk8oJEk8CZKhRcJfEh/manifest.json\")\n\n}\n", "file_path": "src/utils/tests/urls.rs", "rank": 78, "score": 123665.42591131284 }, { "content": "#[test]\n\nfn valid_url_with_trailing_slash() {\n\n let input_url = \"https://happy.path/\";\n\n\n\n let actual = build_manifest_url(input_url).unwrap();\n\n assert_eq!(actual, \"https://happy.path/manifest.json\")\n\n}\n\n\n", "file_path": "src/utils/tests/urls.rs", "rank": 79, "score": 123665.42591131284 }, { "content": "#[test]\n\nfn valid_url_with_longer_path() {\n\n let input_url =\n\n \"https://cloudflare-ipfs.com/ipfs/QmQs6CUbMUyKe3Sa3tU3HcnWWzsuCk8oJEk8CZKhRcJfEh\";\n\n\n\n let actual = build_manifest_url(input_url).unwrap();\n\n assert_eq!(actual, \"https://cloudflare-ipfs.com/ipfs/QmQs6CUbMUyKe3Sa3tU3HcnWWzsuCk8oJEk8CZKhRcJfEh/manifest.json\")\n\n}\n\n\n", "file_path": "src/utils/tests/urls.rs", "rank": 80, "score": 123665.42591131284 }, { "content": "#[test]\n\nfn valid_url_no_trailing_slash() {\n\n let input_url = \"https://happy.path\";\n\n\n\n let actual = build_manifest_url(input_url).unwrap();\n\n assert_eq!(actual, \"https://happy.path/manifest.json\")\n\n}\n\n\n", "file_path": "src/utils/tests/urls.rs", "rank": 81, "score": 123665.42591131284 }, { "content": "#[test]\n\nfn native_token_balance() {\n\n std::env::set_var(\"FEATURE_FLAG_BALANCES_RATE_IMPLEMENTATION\", \"false\");\n\n let balance_dto = serde_json::from_str::<BalanceDto>(BALANCE_ETHER).unwrap();\n\n\n\n let expected = Balance {\n\n token_info: TokenInfo {\n\n token_type: TokenType::NativeToken,\n\n address: \"0x0000000000000000000000000000000000000000\".to_string(),\n\n decimals: 18,\n\n symbol: \"ETH\".to_string(),\n\n name: \"Ether\".to_string(),\n\n logo_uri: Some(\"https://test.token.image.url\".to_string()),\n\n },\n\n balance: \"7457594371050000001\".to_string(),\n\n fiat_balance: \"2523.7991\".to_string(),\n\n fiat_conversion: \"338.42\".to_string(),\n\n };\n\n\n\n let usd_to_fiat = 1.0;\n\n let native_currency = NativeCurrency {\n\n name: \"Ether\".to_string(),\n\n symbol: \"ETH\".to_string(),\n\n decimals: 18,\n\n logo_uri: \"https://test.token.image.url\".to_string(),\n\n };\n\n let actual = balance_dto.to_balance(usd_to_fiat, &native_currency);\n\n\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "src/common/converters/tests/balances.rs", "rank": 82, "score": 123537.06444798996 }, { "content": "#[test]\n\nfn valid_url_with_trailing_slash_and_port() {\n\n let input_url = \"https://happy.path:8000/\";\n\n\n\n let actual = build_manifest_url(input_url).unwrap();\n\n assert_eq!(actual, \"https://happy.path:8000/manifest.json\")\n\n}\n\n\n", "file_path": "src/utils/tests/urls.rs", "rank": 83, "score": 122322.4026806426 }, { "content": "#[test]\n\nfn native_token_balance() {\n\n std::env::set_var(\"FEATURE_FLAG_BALANCES_RATE_IMPLEMENTATION\", \"true\");\n\n let balance_dto = serde_json::from_str::<BalanceDto>(BALANCE_ETHER).unwrap();\n\n\n\n let expected = Balance {\n\n token_info: TokenInfo {\n\n token_type: TokenType::NativeToken,\n\n address: \"0x0000000000000000000000000000000000000000\".to_string(),\n\n decimals: 18,\n\n symbol: \"ETH\".to_string(),\n\n name: \"Ether\".to_string(),\n\n logo_uri: Some(\"https://test.token.image.url\".to_string()),\n\n },\n\n balance: \"7457594371050000001\".to_string(),\n\n fiat_balance: \"2523.79908\".to_string(),\n\n fiat_conversion: \"338.420\".to_string(),\n\n };\n\n\n\n let token_to_usd = BigDecimal::from_str(\"338.42\").unwrap();\n\n let usd_to_fiat = BigDecimal::from_str(\"1.0\").unwrap();\n", "file_path": "src/common/converters/tests/balances_v2.rs", "rank": 84, "score": 122195.39522564062 }, { "content": "#[test]\n\nfn disallow_localhost() {\n\n let input_url = \"https://localhost\";\n\n\n\n match build_manifest_url(input_url) {\n\n Err(error) => {\n\n assert_eq!(error.details.message.unwrap(), \"Localhost not accepted\");\n\n }\n\n _ => {\n\n panic!(\"Test didn't error as expected\")\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/utils/tests/urls.rs", "rank": 85, "score": 116760.47926269751 }, { "content": "fn setup_exchange_env() {\n\n std::env::set_var(\"EXCHANGE_API_BASE_URI\", EXCHANGE_API_BASE_URI);\n\n std::env::set_var(\"EXCHANGE_API_KEY\", EXCHANGE_API_KEY);\n\n}\n\n\n\n#[rocket::async_test]\n\nasync fn available_currency_codes() {\n\n setup_exchange_env();\n\n let cache = Arc::new(create_service_cache()) as Arc<dyn Cache>;\n\n cache.invalidate_pattern(\"*\");\n\n\n\n let mut mock_http_client = MockHttpClient::new();\n\n let request = Request::new(format!(\n\n \"{}?access_key={}\",\n\n EXCHANGE_API_BASE_URI, EXCHANGE_API_KEY\n\n ));\n\n\n\n mock_http_client\n\n .expect_get()\n\n .times(1)\n", "file_path": "src/providers/tests/fiat.rs", "rank": 86, "score": 115153.49173639584 }, { "content": "#[test]\n\nfn disallow_ip_address() {\n\n let input_url = \"http://127.0.0.1\";\n\n\n\n match build_manifest_url(input_url) {\n\n Err(error) => {\n\n assert_eq!(error.details.message.unwrap(), \"IP address not accepted\");\n\n }\n\n _ => {\n\n panic!(\"Test didn't error as expected\")\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/utils/tests/urls.rs", "rank": 87, "score": 115113.23537518248 }, { "content": "#[test]\n\nfn disallow_non_http_schemes() {\n\n let input_url = \"ipfs://localhost\";\n\n\n\n match build_manifest_url(input_url) {\n\n Err(error) => {\n\n assert_eq!(error.details.message.unwrap(), \"Invalid scheme\");\n\n }\n\n _ => {\n\n panic!(\"Test didn't error as expected\")\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/utils/tests/urls.rs", "rank": 88, "score": 113523.42092968948 }, { "content": "#[test]\n\nfn invalidation_scope_both_to_string() {\n\n assert_eq!(\n\n CACHE_REQS_RESP_PREFIX,\n\n InvalidationScope::Both.invalidation_scope_string()\n\n )\n\n}\n\n\n", "file_path": "src/cache/tests/cache_operations.rs", "rank": 89, "score": 113483.6397800145 }, { "content": "#[test]\n\nfn deserialise_params_value_as_string() {\n\n let json = r#\"\n\n {\n\n \"name\": \"_threshold\",\n\n \"type\": \"uint256\",\n\n \"value\": \"2\"\n\n }\n\n \"#;\n\n\n\n let actual = serde_json::from_str::<Parameter>(json);\n\n\n\n let expected = Parameter {\n\n name: \"_threshold\".to_string(),\n\n param_type: \"uint256\".to_string(),\n\n value: \"2\".to_string().into(),\n\n value_decoded: None,\n\n };\n\n\n\n assert!(actual.is_ok());\n\n assert_eq!(expected, actual.unwrap());\n\n}\n\n\n", "file_path": "src/common/tests/common.rs", "rank": 90, "score": 113483.6397800145 }, { "content": "#[test]\n\nfn invalidation_pattern_any_string() {\n\n let invalidation_pattern =\n\n InvalidationPattern::Any(InvalidationScope::Both, \"some_address\".to_string());\n\n let expected = format!(\"{}*some_address*\", CACHE_REQS_RESP_PREFIX);\n\n\n\n let actual = invalidation_pattern.to_pattern_string();\n\n\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "src/cache/tests/cache_operations.rs", "rank": 91, "score": 113483.6397800145 }, { "content": "#[test]\n\nfn cache_with_code_unwrap_err() {\n\n let cached_with_code = CachedWithCode {\n\n code: 418,\n\n data: \"teapot\".to_string(),\n\n };\n\n let expected = ApiError {\n\n status: 418,\n\n details: ErrorDetails {\n\n code: 42,\n\n message: Some(String::from(\"teapot\")),\n\n arguments: None,\n\n debug: None,\n\n },\n\n };\n\n\n\n assert_eq!(cached_with_code.to_result().expect_err(\"\"), expected);\n\n}\n", "file_path": "src/cache/tests/cache_inner.rs", "rank": 92, "score": 112099.94854501737 }, { "content": "#[test]\n\nfn use_legacy_domain_separator_v130() {\n\n let version = Version::parse(\"1.3.0\").ok();\n\n\n\n assert_eq!(false, use_legacy_domain_separator(version));\n\n}\n\n\n", "file_path": "src/utils/tests/transactions.rs", "rank": 93, "score": 112099.94854501737 }, { "content": "#[test]\n\nfn cache_with_code_unwrap_ok() {\n\n let cached_with_code = CachedWithCode {\n\n code: 200,\n\n data: \"not a teapot\".to_string(),\n\n };\n\n\n\n assert_eq!(cached_with_code.to_result().unwrap(), \"not a teapot\");\n\n}\n\n\n", "file_path": "src/cache/tests/cache_inner.rs", "rank": 94, "score": 112099.94854501737 }, { "content": "#[test]\n\nfn use_legacy_domain_separator_legacy() {\n\n let version = Version::parse(\"1.1.1\").ok();\n\n\n\n assert_eq!(true, use_legacy_domain_separator(version));\n\n}\n", "file_path": "src/utils/tests/transactions.rs", "rank": 95, "score": 112099.94854501737 }, { "content": "#[test]\n\nfn invalidation_pattern_transfers_string() {\n\n let invalidation_pattern =\n\n InvalidationPattern::Transfers(InvalidationScope::Requests, \"some_address\".to_string());\n\n let expected = format!(\"{}*/some_address/*transfer*\", CACHE_REQS_PREFIX);\n\n\n\n let actual = invalidation_pattern.to_pattern_string();\n\n\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "src/cache/tests/cache_operations.rs", "rank": 96, "score": 111948.98448980568 }, { "content": "#[test]\n\nfn invalidation_pattern_tokens_string() {\n\n let invalidation_pattern = InvalidationPattern::Tokens {\n\n chain_id: \"4\".to_string(),\n\n };\n\n let expected = format!(\"{}_{}\", TOKENS_KEY_BASE.to_string(), \"4\");\n\n\n\n let actual = invalidation_pattern.to_pattern_string();\n\n\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "src/cache/tests/cache_operations.rs", "rank": 97, "score": 111948.98448980568 }, { "content": "#[test]\n\nfn invalidation_pattern_transactions_string() {\n\n let invalidation_pattern =\n\n InvalidationPattern::Transactions(InvalidationScope::Both, \"some_address\".to_string());\n\n let expected = format!(\"{}*/some_address/*transactions/*\", CACHE_REQS_RESP_PREFIX);\n\n\n\n let actual = invalidation_pattern.to_pattern_string();\n\n\n\n assert_eq!(expected, actual);\n\n}\n\n\n", "file_path": "src/cache/tests/cache_operations.rs", "rank": 98, "score": 111948.98448980568 }, { "content": " trusted.unwrap_or(false),\n\n exclude_spam.unwrap_or(true),\n\n )\n\n .right_future()\n\n }\n\n })\n\n .execute()\n\n .await\n\n}\n\n\n\n/**\n\n * `/v1/balances/supported-fiat-codes` <br/>\n\n * Returns [Vec] of [String]\n\n *\n\n * Supported fiat codes for balances\n\n * `/v1/balances/supported-fiat-codes` : returns the supported fiat codes to be included int the `<fiat>` segment of the balance endpoint.\n\n * The entries are sorted alphabetically, with the exception of `USD` and `EUR` being placed in the top of the list in that order.\n\n */\n\n#[get(\"/v1/balances/supported-fiat-codes\")]\n\npub async fn get_supported_fiat(context: RequestContext) -> ApiResult<content::Json<String>> {\n\n CacheResponse::new(&context)\n\n .resp_generator(|| fiat_codes(&context))\n\n .execute()\n\n .await\n\n}\n", "file_path": "src/routes/balances/routes.rs", "rank": 99, "score": 48.469286860679496 } ]
Rust
editor/src/inspector/handlers/collider.rs
thomasmatecki/rg3d
e1958b6615ae0a826e83614e8df45a1a6f821f82
use crate::{make_command, physics::Collider, scene::commands::physics::*, SceneCommand}; use rg3d::{ core::pool::Handle, gui::inspector::{FieldKind, PropertyChanged}, physics3d::desc::*, }; use std::any::TypeId; pub fn handle_collider_property_changed( args: &PropertyChanged, handle: Handle<Collider>, collider: &Collider, ) -> Option<SceneCommand> { match args.value { FieldKind::Object(ref value) => match args.name.as_ref() { Collider::FRICTION => { make_command!(SetColliderFrictionCommand, handle, value) } Collider::RESTITUTION => { make_command!(SetColliderRestitutionCommand, handle, value) } Collider::IS_SENSOR => { make_command!(SetColliderIsSensorCommand, handle, value) } Collider::DENSITY => { make_command!(SetColliderDensityCommand, handle, value) } Collider::TRANSLATION => { make_command!(SetColliderPositionCommand, handle, value) } Collider::ROTATION => { make_command!(SetColliderRotationCommand, handle, value) } _ => None, }, FieldKind::Inspectable(ref inner_property) => match args.name.as_ref() { Collider::COLLISION_GROUPS => match inner_property.value { FieldKind::Object(ref value) => match inner_property.name.as_ref() { InteractionGroupsDesc::MEMBERSHIPS => { make_command!(SetColliderCollisionGroupsMembershipsCommand, handle, value) } InteractionGroupsDesc::FILTER => { make_command!(SetColliderCollisionGroupsFilterCommand, handle, value) } _ => None, }, _ => None, }, Collider::SOLVER_GROUPS => match inner_property.value { FieldKind::Object(ref value) => match inner_property.name.as_ref() { InteractionGroupsDesc::MEMBERSHIPS => { make_command!(SetColliderSolverGroupsMembershipsCommand, handle, value) } InteractionGroupsDesc::FILTER => { make_command!(SetColliderSolverGroupsFilterCommand, handle, value) } _ => None, }, _ => None, }, Collider::SHAPE => { if inner_property.owner_type_id == TypeId::of::<CuboidDesc>() { handle_cuboid_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<BallDesc>() { handle_ball_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<CylinderDesc>() { handle_cylinder_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<RoundCylinderDesc>() { handle_round_cylinder_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<ConeDesc>() { handle_cone_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<CapsuleDesc>() { handle_capsule_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<SegmentDesc>() { handle_segment_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<TriangleDesc>() { handle_triangle_desc_property_changed(handle, collider, inner_property) } else { None } } _ => None, }, _ => None, } } fn handle_ball_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Ball(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { BallDesc::RADIUS => make_command!(SetBallRadiusCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_cuboid_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cuboid(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CuboidDesc::HALF_EXTENTS => { make_command!(SetCuboidHalfExtentsCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_cylinder_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cylinder(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CylinderDesc::HALF_HEIGHT => { make_command!(SetCylinderHalfHeightCommand, handle, value) } CylinderDesc::RADIUS => { make_command!(SetCylinderRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_round_cylinder_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::RoundCylinder(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { RoundCylinderDesc::HALF_HEIGHT => { make_command!(SetRoundCylinderHalfHeightCommand, handle, value) } RoundCylinderDesc::RADIUS => { make_command!(SetRoundCylinderRadiusCommand, handle, value) } RoundCylinderDesc::BORDER_RADIUS => { make_command!(SetRoundCylinderBorderRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_cone_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cone(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { ConeDesc::HALF_HEIGHT => { make_command!(SetConeHalfHeightCommand, handle, value) } ConeDesc::RADIUS => make_command!(SetConeRadiusCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_capsule_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Capsule(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CapsuleDesc::BEGIN => make_command!(SetCapsuleBeginCommand, handle, value), CapsuleDesc::END => make_command!(SetCapsuleEndCommand, handle, value), CapsuleDesc::RADIUS => { make_command!(SetCapsuleRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_segment_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Segment(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { SegmentDesc::BEGIN => make_command!(SetSegmentBeginCommand, handle, value), SegmentDesc::END => make_command!(SetSegmentEndCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_triangle_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Triangle(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { TriangleDesc::A => make_command!(SetTriangleACommand, handle, value), TriangleDesc::B => make_command!(SetTriangleBCommand, handle, value), TriangleDesc::C => make_command!(SetTriangleCCommand, handle, value), _ => None, }, _ => None, } } else { None } }
use crate::{make_command, physics::Collider, scene::commands::physics::*, SceneCommand}; use rg3d::{ core::pool::Handle, gui::inspector::{FieldKind, PropertyChanged}, physics3d::desc::*, }; use std::any::TypeId; pub fn handle_collider_property_changed( args: &PropertyChanged, handle: Handle<Collider>, collider: &Collider, ) -> Option<SceneCommand> { match args.value { FieldKind::Object(ref value) => match args.name.as_ref() { Collider::FRICTION => { make_command!(SetColliderFrictionCommand, handle, value) } Collider::RESTITUTION => { make_command!(SetColliderRestitutionCommand, handle, value) } Collider::IS_SENSOR => { make_command!(SetColliderIsSensorCommand, handle, value) } Collider::DENSITY => { make_command!(SetColliderDensityCommand, handle, value) } Collider::TRANSLATION => { make_command!(SetColliderPositionCommand, handle, value) } Collider::ROTATION => { make_command!(SetColliderRotationCommand, handle, value) } _ => None, }, FieldKind::Inspectable(ref inner_property) => match args.name.as_ref() { Collider::COLLISION_GROUPS => match inner_property.value { FieldKind::Object(ref value) => match inner_property.name.as_ref() { InteractionGroupsDesc::MEMBERSHIPS => { make_command!(SetColliderCollisionGroupsMembershipsCommand, handle, value) } InteractionGroupsDesc::FILTER => { make_command!(SetColliderCollisionGroupsFilterCommand, handle, value) } _ => None, }, _ => None, }, Collider::SOLVER_GROUPS => match inner_property.value { FieldKind::Object(ref value) => match inner_property.name.as_ref() { InteractionGroupsDesc::MEMBERSHIPS => { make_command!(SetColliderSolverGroupsMembershipsCommand, handle, value) } InteractionGroupsDesc::FILTER => { make_command!(SetColliderSolverGroupsFilterCommand, handle, value) } _ =>
fn handle_ball_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Ball(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { BallDesc::RADIUS => make_command!(SetBallRadiusCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_cuboid_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cuboid(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CuboidDesc::HALF_EXTENTS => { make_command!(SetCuboidHalfExtentsCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_cylinder_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cylinder(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CylinderDesc::HALF_HEIGHT => { make_command!(SetCylinderHalfHeightCommand, handle, value) } CylinderDesc::RADIUS => { make_command!(SetCylinderRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_round_cylinder_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::RoundCylinder(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { RoundCylinderDesc::HALF_HEIGHT => { make_command!(SetRoundCylinderHalfHeightCommand, handle, value) } RoundCylinderDesc::RADIUS => { make_command!(SetRoundCylinderRadiusCommand, handle, value) } RoundCylinderDesc::BORDER_RADIUS => { make_command!(SetRoundCylinderBorderRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_cone_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cone(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { ConeDesc::HALF_HEIGHT => { make_command!(SetConeHalfHeightCommand, handle, value) } ConeDesc::RADIUS => make_command!(SetConeRadiusCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_capsule_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Capsule(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CapsuleDesc::BEGIN => make_command!(SetCapsuleBeginCommand, handle, value), CapsuleDesc::END => make_command!(SetCapsuleEndCommand, handle, value), CapsuleDesc::RADIUS => { make_command!(SetCapsuleRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_segment_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Segment(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { SegmentDesc::BEGIN => make_command!(SetSegmentBeginCommand, handle, value), SegmentDesc::END => make_command!(SetSegmentEndCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_triangle_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Triangle(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { TriangleDesc::A => make_command!(SetTriangleACommand, handle, value), TriangleDesc::B => make_command!(SetTriangleBCommand, handle, value), TriangleDesc::C => make_command!(SetTriangleCCommand, handle, value), _ => None, }, _ => None, } } else { None } }
None, }, _ => None, }, Collider::SHAPE => { if inner_property.owner_type_id == TypeId::of::<CuboidDesc>() { handle_cuboid_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<BallDesc>() { handle_ball_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<CylinderDesc>() { handle_cylinder_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<RoundCylinderDesc>() { handle_round_cylinder_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<ConeDesc>() { handle_cone_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<CapsuleDesc>() { handle_capsule_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<SegmentDesc>() { handle_segment_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<TriangleDesc>() { handle_triangle_desc_property_changed(handle, collider, inner_property) } else { None } } _ => None, }, _ => None, } }
function_block-function_prefix_line
[ { "content": "/// Returns a list of `pub const [VARIANT_]FIELD: &'static str = \"key_value\"`;\n\npub fn quote_prop_keys(ty_args: &args::TypeArgs) -> TokenStream2 {\n\n let mut prop_idents = Vec::new();\n\n let mut prop_names = Vec::new();\n\n\n\n match &ty_args.data {\n\n ast::Data::Struct(field_args) => {\n\n for (nth, field) in field_args.fields.iter().enumerate() {\n\n // don't expose uninspectable fields' properties\n\n if field.expand || field.skip {\n\n continue;\n\n }\n\n\n\n let prop_ident = self::struct_field_prop(ty_args, nth, field);\n\n let prop_name = utils::prop_name(nth, field);\n\n\n\n prop_idents.push(prop_ident);\n\n prop_names.push(prop_name);\n\n }\n\n }\n\n ast::Data::Enum(variants) => {\n", "file_path": "rg3d-core-derive/src/inspect/utils/prop_keys.rs", "rank": 1, "score": 239962.6997199605 }, { "content": "pub fn prop_name(nth: usize, field: &args::FieldArgs) -> String {\n\n field.name.clone().unwrap_or_else(|| {\n\n let field_ident = match &field.ident {\n\n Some(ident) => quote!(#ident),\n\n None => {\n\n let nth_field = Index::from(nth);\n\n quote!(#nth_field)\n\n }\n\n };\n\n\n\n field_ident.to_string()\n\n })\n\n}\n", "file_path": "rg3d-core-derive/src/inspect/utils.rs", "rank": 2, "score": 236645.65977210304 }, { "content": "#[inline]\n\npub fn type_name_of<T>(_: T) -> &'static str {\n\n std::any::type_name::<T>()\n\n}\n\n\n\n#[cfg(feature = \"enable_profiler\")]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {\n\n let function_name = {\n\n fn scope() {}\n\n $crate::profiler::type_name_of(scope)\n\n };\n\n let _scope_guard = $crate::profiler::ScopeDefinition::new(function_name, line!());\n\n };\n\n}\n\n\n\n#[cfg(not(feature = \"enable_profiler\"))]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {};\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 3, "score": 228016.5778100115 }, { "content": "/// \"Transmutes\" value of any sized type to a slice of bytes.\n\npub fn value_as_u8_slice<T: Sized>(v: &T) -> &'_ [u8] {\n\n // SAFETY: It is safe to reinterpret data to read it.\n\n unsafe { std::slice::from_raw_parts(v as *const T as *const u8, std::mem::size_of::<T>()) }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 4, "score": 226859.75531821547 }, { "content": "pub fn gen_inspect_fn_body(\n\n field_prefix: FieldPrefix,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n // `inspect` function body, consisting of a sequence of quotes\n\n let mut quotes = Vec::new();\n\n\n\n // 1. collect non-expanible field properties\n\n let props = field_args\n\n .fields\n\n .iter()\n\n .enumerate()\n\n .filter(|(_i, f)| !(f.skip || f.expand || f.expand_subtree))\n\n .map(|(i, field)| self::quote_field_prop(field_prefix, i, field, field_args.style));\n\n\n\n quotes.push(quote! {\n\n let mut props = Vec::new();\n\n #(props.push(#props);)*\n\n });\n\n\n", "file_path": "rg3d-core-derive/src/inspect/utils.rs", "rank": 5, "score": 225556.03478712638 }, { "content": "pub fn make_arrow(\n\n ctx: &mut BuildContext,\n\n orientation: ArrowDirection,\n\n size: f32,\n\n) -> Handle<UiNode> {\n\n VectorImageBuilder::new(\n\n WidgetBuilder::new()\n\n .with_foreground(BRUSH_BRIGHT)\n\n .with_horizontal_alignment(HorizontalAlignment::Center)\n\n .with_vertical_alignment(VerticalAlignment::Center),\n\n )\n\n .with_primitives(vec![match orientation {\n\n ArrowDirection::Top => Primitive::Triangle {\n\n points: [\n\n Vector2::new(size * 0.5, 0.0),\n\n Vector2::new(size, size),\n\n Vector2::new(0.0, size),\n\n ],\n\n },\n\n ArrowDirection::Bottom => Primitive::Triangle {\n", "file_path": "rg3d-ui/src/utils.rs", "rank": 6, "score": 225385.98437442986 }, { "content": "#[inline]\n\npub fn barycentric_to_world(\n\n bary: (f32, f32, f32),\n\n pa: Vector3<f32>,\n\n pb: Vector3<f32>,\n\n pc: Vector3<f32>,\n\n) -> Vector3<f32> {\n\n pa.scale(bary.0) + pb.scale(bary.1) + pc.scale(bary.2)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 7, "score": 221743.19922418456 }, { "content": "pub fn make_mark(\n\n ctx: &mut BuildContext,\n\n text: &str,\n\n column: usize,\n\n color: Color,\n\n) -> Handle<UiNode> {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .on_row(0)\n\n .on_column(column)\n\n .with_background(Brush::Solid(color))\n\n .with_foreground(Brush::Solid(Color::TRANSPARENT))\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(text)\n\n .build(ctx),\n\n ),\n\n )\n\n .build(ctx)\n\n}\n", "file_path": "rg3d-ui/src/vec/mod.rs", "rank": 8, "score": 221743.19922418456 }, { "content": "#[inline]\n\npub fn get_barycentric_coords(\n\n p: &Vector3<f32>,\n\n a: &Vector3<f32>,\n\n b: &Vector3<f32>,\n\n c: &Vector3<f32>,\n\n) -> (f32, f32, f32) {\n\n let v0 = *b - *a;\n\n let v1 = *c - *a;\n\n let v2 = *p - *a;\n\n\n\n let d00 = v0.dot(&v0);\n\n let d01 = v0.dot(&v1);\n\n let d11 = v1.dot(&v1);\n\n let d20 = v2.dot(&v0);\n\n let d21 = v2.dot(&v1);\n\n let denom = d00 * d11 - d01.powi(2);\n\n\n\n let v = (d11 * d20 - d01 * d21) / denom;\n\n let w = (d00 * d21 - d01 * d20) / denom;\n\n let u = 1.0 - v - w;\n\n\n\n (u, v, w)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 9, "score": 218270.6210475561 }, { "content": "#[inline]\n\npub fn ray_rect_intersection(\n\n rect: Rect<f32>,\n\n origin: Vector2<f32>,\n\n dir: Vector2<f32>,\n\n) -> Option<IntersectionResult> {\n\n let min = rect.left_top_corner();\n\n let max = rect.right_bottom_corner();\n\n\n\n let (mut tmin, mut tmax) = if dir.x >= 0.0 {\n\n ((min.x - origin.x) / dir.x, (max.x - origin.x) / dir.x)\n\n } else {\n\n ((max.x - origin.x) / dir.x, (min.x - origin.x) / dir.x)\n\n };\n\n\n\n let (tymin, tymax) = if dir.y >= 0.0 {\n\n ((min.y - origin.y) / dir.y, (max.y - origin.y) / dir.y)\n\n } else {\n\n ((max.y - origin.y) / dir.y, (min.y - origin.y) / dir.y)\n\n };\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 10, "score": 218270.6210475561 }, { "content": "pub fn create_impl(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let generics = self::create_impl_generics(&ty_args.generics, field_args);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n quote! {\n\n impl #impl_generics Visit for #ty_ident #ty_generics #where_clause {\n\n fn visit(\n\n &mut self,\n\n name: &str,\n\n visitor: &mut Visitor,\n\n ) -> VisitResult {\n\n #impl_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/visit/utils.rs", "rank": 11, "score": 218270.6210475561 }, { "content": "pub fn make_expander_container(\n\n layer_index: usize,\n\n property_name: &str,\n\n header: Handle<UiNode>,\n\n content: Handle<UiNode>,\n\n ctx: &mut BuildContext,\n\n) -> Handle<UiNode> {\n\n ExpanderBuilder::new(WidgetBuilder::new())\n\n .with_checkbox(make_expander_check_box(layer_index, property_name, ctx))\n\n .with_expander_column(Column::strict(NAME_COLUMN_WIDTH))\n\n .with_expanded(true)\n\n .with_header(header)\n\n .with_content(content)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "rg3d-ui/src/inspector/mod.rs", "rank": 12, "score": 218270.6210475561 }, { "content": "#[inline]\n\npub fn is_point_inside_2d_triangle(\n\n point: Vector2<f32>,\n\n pt_a: Vector2<f32>,\n\n pt_b: Vector2<f32>,\n\n pt_c: Vector2<f32>,\n\n) -> bool {\n\n let ba = pt_b - pt_a;\n\n let ca = pt_c - pt_a;\n\n\n\n let vp = point - pt_a;\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot_02 = ca.dot(&vp);\n\n let dot_12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot_02 - ca_dot_ba * dot_12) * inv_denom;\n\n let v = (ca_dot_ca * dot_12 - ca_dot_ba * dot_02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 13, "score": 218270.6210475561 }, { "content": "#[inline]\n\npub fn vec3_to_vec2_by_plane(\n\n plane_class: PlaneClass,\n\n normal: Vector3<f32>,\n\n point: Vector3<f32>,\n\n) -> Vector2<f32> {\n\n match plane_class {\n\n PlaneClass::XY => {\n\n if normal.z < 0.0 {\n\n Vector2::new(point.y, point.x)\n\n } else {\n\n Vector2::new(point.x, point.y)\n\n }\n\n }\n\n PlaneClass::XZ => {\n\n if normal.y < 0.0 {\n\n Vector2::new(point.x, point.z)\n\n } else {\n\n Vector2::new(point.z, point.x)\n\n }\n\n }\n\n PlaneClass::YZ => {\n\n if normal.x < 0.0 {\n\n Vector2::new(point.z, point.y)\n\n } else {\n\n Vector2::new(point.y, point.z)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 14, "score": 218270.6210475561 }, { "content": "#[inline]\n\npub fn get_barycentric_coords_2d(\n\n p: Vector2<f32>,\n\n a: Vector2<f32>,\n\n b: Vector2<f32>,\n\n c: Vector2<f32>,\n\n) -> (f32, f32, f32) {\n\n let v0 = b - a;\n\n let v1 = c - a;\n\n let v2 = p - a;\n\n\n\n let d00 = v0.dot(&v0);\n\n let d01 = v0.dot(&v1);\n\n let d11 = v1.dot(&v1);\n\n let d20 = v2.dot(&v0);\n\n let d21 = v2.dot(&v1);\n\n let inv_denom = 1.0 / (d00 * d11 - d01.powi(2));\n\n\n\n let v = (d11 * d20 - d01 * d21) * inv_denom;\n\n let w = (d00 * d21 - d01 * d20) * inv_denom;\n\n let u = 1.0 - v - w;\n\n\n\n (u, v, w)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 15, "score": 218270.6210475561 }, { "content": "pub fn handle_joint_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Joint>,\n\n joint: &Joint,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Joint::BODY_1 => {\n\n make_command!(SetJointBody1Command, handle, value)\n\n }\n\n Joint::BODY_2 => {\n\n make_command!(SetJointBody2Command, handle, value)\n\n }\n\n _ => None,\n\n },\n\n FieldKind::Inspectable(ref inner) => {\n\n if let Joint::PARAMS = args.name.as_ref() {\n\n let params = &joint.params;\n\n if inner.owner_type_id == TypeId::of::<BallJointDesc>() {\n\n handle_ball_joint_property_changed(inner, handle, params)\n", "file_path": "editor/src/inspector/handlers/joint.rs", "rank": 16, "score": 217489.13511729683 }, { "content": "pub fn make_simple_tooltip(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_visibility(false)\n\n .with_foreground(Brush::Solid(Color::opaque(160, 160, 160)))\n\n .with_max_size(Vector2::new(250.0, f32::INFINITY))\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_wrap(WrapMode::Word)\n\n .with_text(text)\n\n .build(ctx),\n\n ),\n\n )\n\n .build(ctx)\n\n}\n", "file_path": "rg3d-ui/src/utils.rs", "rank": 17, "score": 216599.09488857238 }, { "content": "pub fn handle_transform_property_changed(\n\n args: &PropertyChanged,\n\n node_handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n \"local_position\" => Some(SceneCommand::new(MoveNodeCommand::new(\n\n node_handle,\n\n **node.local_transform().position(),\n\n *value.cast_value()?,\n\n ))),\n\n \"local_rotation\" => Some(SceneCommand::new(RotateNodeCommand::new(\n\n node_handle,\n\n **node.local_transform().rotation(),\n\n *value.cast_value()?,\n\n ))),\n\n \"local_scale\" => Some(SceneCommand::new(ScaleNodeCommand::new(\n\n node_handle,\n\n **node.local_transform().scale(),\n", "file_path": "editor/src/inspector/handlers/node/transform.rs", "rank": 18, "score": 214266.51620618318 }, { "content": "pub fn handle_ball_joint_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Joint>,\n\n params: &JointParamsDesc,\n\n) -> Option<SceneCommand> {\n\n if let JointParamsDesc::BallJoint(_) = params {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n BallJointDesc::LOCAL_ANCHOR_1 => Some(SceneCommand::new(\n\n SetBallJointAnchor1Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n BallJointDesc::LOCAL_ANCHOR_2 => Some(SceneCommand::new(\n\n SetBallJointAnchor2Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/joint.rs", "rank": 19, "score": 214266.51620618318 }, { "content": "pub fn handle_mesh_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if let Node::Mesh(_) = node {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Mesh::CAST_SHADOWS => {\n\n make_command!(SetMeshCastShadowsCommand, handle, value)\n\n }\n\n Mesh::RENDER_PATH => {\n\n make_command!(SetMeshRenderPathCommand, handle, value)\n\n }\n\n Mesh::DECAL_LAYER_INDEX => {\n\n make_command!(SetMeshDecalLayerIndexCommand, handle, value)\n\n }\n\n _ => None,\n\n },\n\n FieldKind::Collection(ref args) => match **args {\n", "file_path": "editor/src/inspector/handlers/node/mesh.rs", "rank": 20, "score": 214266.51620618318 }, { "content": "pub fn handle_sprite_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if let Node::Sprite(_) = node {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Sprite::TEXTURE => {\n\n make_command!(SetSpriteTextureCommand, handle, value)\n\n }\n\n Sprite::COLOR => {\n\n make_command!(SetSpriteColorCommand, handle, value)\n\n }\n\n Sprite::SIZE => {\n\n make_command!(SetSpriteSizeCommand, handle, value)\n\n }\n\n Sprite::ROTATION => {\n\n make_command!(SetSpriteRotationCommand, handle, value)\n\n }\n", "file_path": "editor/src/inspector/handlers/node/sprite.rs", "rank": 21, "score": 214266.51620618318 }, { "content": "pub fn handle_decal_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if let Node::Decal(_) = node {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Decal::DIFFUSE_TEXTURE => {\n\n make_command!(SetDecalDiffuseTextureCommand, handle, value)\n\n }\n\n Decal::NORMAL_TEXTURE => {\n\n make_command!(SetDecalNormalTextureCommand, handle, value)\n\n }\n\n Decal::COLOR => {\n\n make_command!(SetDecalColorCommand, handle, value)\n\n }\n\n Decal::LAYER => {\n\n make_command!(SetDecalLayerIndexCommand, handle, value)\n\n }\n", "file_path": "editor/src/inspector/handlers/node/decal.rs", "rank": 22, "score": 214266.51620618318 }, { "content": "pub fn handle_generic_source_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<SoundSource>,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n match args.name.as_ref() {\n\n GenericSource::NAME => {\n\n make_command!(SetSoundSourceNameCommand, handle, value)\n\n }\n\n GenericSource::GAIN => {\n\n make_command!(SetSoundSourceGainCommand, handle, value)\n\n }\n\n GenericSource::BUFFER => {\n\n make_command!(SetSoundSourceBufferCommand, handle, value)\n\n }\n\n GenericSource::PANNING => {\n\n make_command!(SetSoundSourcePanningCommand, handle, value)\n\n }\n\n GenericSource::PITCH => {\n", "file_path": "editor/src/inspector/handlers/sound.rs", "rank": 23, "score": 214266.51620618318 }, { "content": "pub fn handle_terrain_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n graph: &Graph,\n\n) -> Option<SceneCommand> {\n\n if let Node::Terrain(_) = node {\n\n match args.value {\n\n FieldKind::Collection(ref collection_changed) => match args.name.as_ref() {\n\n Terrain::LAYERS => match &**collection_changed {\n\n CollectionChanged::Add => Some(SceneCommand::new(AddTerrainLayerCommand::new(\n\n handle, graph,\n\n ))),\n\n CollectionChanged::Remove(index) => Some(SceneCommand::new(\n\n DeleteTerrainLayerCommand::new(handle, *index),\n\n )),\n\n CollectionChanged::ItemChanged { index, property } => {\n\n assert_eq!(property.owner_type_id, TypeId::of::<Layer>());\n\n match property.value {\n\n FieldKind::Object(ref args) => match property.name.as_ref() {\n", "file_path": "editor/src/inspector/handlers/node/terrain.rs", "rank": 24, "score": 214266.51620618318 }, { "content": "pub fn handle_prismatic_joint_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Joint>,\n\n params: &JointParamsDesc,\n\n) -> Option<SceneCommand> {\n\n if let JointParamsDesc::PrismaticJoint(_) = params {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n PrismaticJointDesc::LOCAL_ANCHOR_1 => Some(SceneCommand::new(\n\n SetPrismaticJointAnchor1Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n PrismaticJointDesc::LOCAL_ANCHOR_2 => Some(SceneCommand::new(\n\n SetPrismaticJointAnchor2Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n PrismaticJointDesc::LOCAL_AXIS_1 => Some(SceneCommand::new(\n\n SetPrismaticJointAxis1Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n PrismaticJointDesc::LOCAL_AXIS_2 => Some(SceneCommand::new(\n\n SetPrismaticJointAxis2Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/joint.rs", "rank": 25, "score": 214266.51620618318 }, { "content": "pub fn handle_fixed_joint_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Joint>,\n\n params: &JointParamsDesc,\n\n) -> Option<SceneCommand> {\n\n if let JointParamsDesc::FixedJoint(_) = params {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n FixedJointDesc::LOCAL_ANCHOR_1_TRANSLATION => Some(SceneCommand::new(\n\n SetFixedJointAnchor1TranslationCommand::new(\n\n handle,\n\n value.cast_value().cloned()?,\n\n ),\n\n )),\n\n FixedJointDesc::LOCAL_ANCHOR_2_TRANSLATION => Some(SceneCommand::new(\n\n SetFixedJointAnchor2TranslationCommand::new(\n\n handle,\n\n value.cast_value().cloned()?,\n\n ),\n\n )),\n", "file_path": "editor/src/inspector/handlers/joint.rs", "rank": 26, "score": 214266.51620618318 }, { "content": "pub fn handle_camera_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if let Node::Camera(camera) = node {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Camera::EXPOSURE => Some(SceneCommand::new(SetExposureCommand::new(\n\n handle,\n\n *value.cast_value::<Exposure>()?,\n\n ))),\n\n Camera::Z_NEAR => {\n\n make_command!(SetZNearCommand, handle, value)\n\n }\n\n Camera::Z_FAR => {\n\n make_command!(SetZFarCommand, handle, value)\n\n }\n\n Camera::FOV => {\n\n make_command!(SetFovCommand, handle, value)\n", "file_path": "editor/src/inspector/handlers/node/camera.rs", "rank": 27, "score": 214266.51620618318 }, { "content": "pub fn handle_spatial_source_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<SoundSource>,\n\n source: &SoundSource,\n\n) -> Option<SceneCommand> {\n\n if let SoundSource::Spatial(_) = source {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n SpatialSource::RADIUS => {\n\n make_command!(SetSpatialSoundSourceRadiusCommand, handle, value)\n\n }\n\n SpatialSource::POSITION => {\n\n make_command!(SetSpatialSoundSourcePositionCommand, handle, value)\n\n }\n\n SpatialSource::MAX_DISTANCE => {\n\n make_command!(SetMaxDistanceCommand, handle, value)\n\n }\n\n SpatialSource::ROLLOFF_FACTOR => {\n\n make_command!(SetRolloffFactorCommand, handle, value)\n\n }\n", "file_path": "editor/src/inspector/handlers/sound.rs", "rank": 28, "score": 214266.51620618318 }, { "content": "pub fn handle_revolute_joint_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Joint>,\n\n params: &JointParamsDesc,\n\n) -> Option<SceneCommand> {\n\n if let JointParamsDesc::RevoluteJoint(_) = params {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n RevoluteJointDesc::LOCAL_ANCHOR_1 => Some(SceneCommand::new(\n\n SetRevoluteJointAnchor1Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n RevoluteJointDesc::LOCAL_ANCHOR_2 => Some(SceneCommand::new(\n\n SetRevoluteJointAnchor2Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n RevoluteJointDesc::LOCAL_AXIS_1 => Some(SceneCommand::new(\n\n SetRevoluteJointAxis1Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n RevoluteJointDesc::LOCAL_AXIS_2 => Some(SceneCommand::new(\n\n SetRevoluteJointAxis2Command::new(handle, value.cast_value().cloned()?),\n\n )),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/joint.rs", "rank": 29, "score": 214266.51620618318 }, { "content": "pub fn handle_base_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Base::NAME => {\n\n make_command!(SetNameCommand, handle, value)\n\n }\n\n Base::TAG => {\n\n make_command!(SetTagCommand, handle, value)\n\n }\n\n Base::VISIBILITY => {\n\n make_command!(SetVisibleCommand, handle, value)\n\n }\n\n Base::MOBILITY => {\n\n make_command!(SetMobilityCommand, handle, value)\n\n }\n\n Base::PHYSICS_BINDING => {\n", "file_path": "editor/src/inspector/handlers/node/base.rs", "rank": 30, "score": 214266.51620618318 }, { "content": "pub fn handle_base_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n BaseLight::COLOR => {\n\n make_command!(SetLightColorCommand, handle, value)\n\n }\n\n BaseLight::CAST_SHADOWS => {\n\n make_command!(SetLightCastShadowsCommand, handle, value)\n\n }\n\n BaseLight::SCATTER => {\n\n make_command!(SetLightScatterCommand, handle, value)\n\n }\n\n BaseLight::SCATTER_ENABLED => {\n\n make_command!(SetLightScatterEnabledCommand, handle, value)\n\n }\n\n BaseLight::INTENSITY => {\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 31, "score": 211184.61016277847 }, { "content": "pub fn handle_spot_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if let Node::Light(Light::Spot(_)) = node {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n SpotLight::HOTSPOT_CONE_ANGLE => {\n\n make_command!(SetSpotLightHotspotCommand, handle, value)\n\n }\n\n SpotLight::FALLOFF_ANGLE_DELTA => {\n\n make_command!(SetSpotLightFalloffAngleDeltaCommand, handle, value)\n\n }\n\n SpotLight::SHADOW_BIAS => {\n\n make_command!(SetSpotLightShadowBiasCommand, handle, value)\n\n }\n\n SpotLight::DISTANCE => {\n\n make_command!(SetSpotLightDistanceCommand, handle, value)\n\n }\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 32, "score": 211184.61016277847 }, { "content": "pub fn handle_directional_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if let Node::Light(Light::Directional(_)) = node {\n\n match args.value {\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n DirectionalLight::BASE_LIGHT => {\n\n handle_base_light_property_changed(inner, handle, node)\n\n }\n\n DirectionalLight::CSM_OPTIONS => match inner.name.as_ref() {\n\n CsmOptions::SPLIT_OPTIONS => match inner.value {\n\n FieldKind::Inspectable(ref split_options_value) => {\n\n if let FieldKind::Collection(ref collection_changed) =\n\n split_options_value.value\n\n {\n\n if let CollectionChanged::ItemChanged { .. } = **collection_changed\n\n {\n\n match split_options_value.name.as_ref() {\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 33, "score": 211184.61016277847 }, { "content": "pub fn handle_point_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if let Node::Light(Light::Point(_)) = node {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n PointLight::SHADOW_BIAS => {\n\n make_command!(SetPointLightShadowBiasCommand, handle, value)\n\n }\n\n PointLight::RADIUS => {\n\n make_command!(SetPointLightRadiusCommand, handle, value)\n\n }\n\n _ => None,\n\n },\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n PointLight::BASE_LIGHT => handle_base_light_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 34, "score": 211184.61016277847 }, { "content": "pub fn handle_rigid_body_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<RigidBody>,\n\n rigid_body: &RigidBody,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n RigidBody::MASS => {\n\n make_command!(SetBodyMassCommand, handle, value)\n\n }\n\n RigidBody::POSITION => {\n\n make_command!(SetBodyPositionCommand, handle, value)\n\n }\n\n RigidBody::ROTATION => {\n\n make_command!(SetBodyRotationCommand, handle, value)\n\n }\n\n RigidBody::LIN_VEL => {\n\n make_command!(SetBodyLinVelCommand, handle, value)\n\n }\n\n RigidBody::ANG_VEL => {\n", "file_path": "editor/src/inspector/handlers/rigid_body.rs", "rank": 35, "score": 211184.61016277847 }, { "content": "/// `<prefix>field.visit(\"name\", visitor);`\n\npub fn create_field_visits<'a>(\n\n // None or `f` when bindings tuple variants. NOTE: We can't use `prefix: Ident`\n\n prefix: Option<Ident>,\n\n fields: impl Iterator<Item = &'a args::FieldArgs>,\n\n field_style: ast::Style,\n\n) -> Vec<TokenStream2> {\n\n if field_style == ast::Style::Unit {\n\n // `Unit` (struct/enum variant) has no field to visit.\n\n // We won't even enter this region:\n\n return vec![];\n\n }\n\n\n\n let visit_args = fields\n\n .filter(|field| !field.skip)\n\n .enumerate()\n\n .map(|(field_index, field)| {\n\n let (ident, name) = match field_style {\n\n // `NamedFields { a: f32, .. }`\n\n ast::Style::Struct => {\n\n let ident = field.ident.as_ref().unwrap_or_else(|| unreachable!());\n", "file_path": "rg3d-core-derive/src/visit/utils.rs", "rank": 36, "score": 211121.0120942582 }, { "content": "/// Creates `Inspect` trait impl and field prop keys\n\npub fn create_inspect_impl<'f>(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = &'f args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let prop_keys_impl = self::prop_keys_impl(ty_args);\n\n let trait_impl = self::inspect_trait_impl(ty_args, field_args, impl_body);\n\n\n\n quote! {\n\n #prop_keys_impl\n\n #trait_impl\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/inspect/utils.rs", "rank": 37, "score": 211121.0120942582 }, { "content": "pub fn make_default_anchor(ctx: &mut BuildContext, row: usize, column: usize) -> Handle<UiNode> {\n\n let default_anchor_size = 30.0;\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(default_anchor_size)\n\n .with_height(default_anchor_size)\n\n .with_visibility(false)\n\n .on_row(row)\n\n .on_column(column)\n\n .with_draw_on_top(true)\n\n .with_background(Brush::Solid(DEFAULT_ANCHOR_COLOR)),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl TileBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n\n content: TileContent::Empty,\n", "file_path": "rg3d-ui/src/dock.rs", "rank": 38, "score": 206496.74874501443 }, { "content": "pub fn make_button(ctx: &mut BuildContext, arrow: ArrowDirection, row: usize) -> Handle<UiNode> {\n\n ButtonBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::right(1.0))\n\n .on_row(row),\n\n )\n\n .with_back(\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_foreground(Brush::Solid(Color::opaque(90, 90, 90))),\n\n ))\n\n .with_normal_brush(Brush::Solid(Color::opaque(60, 60, 60)))\n\n .with_hover_brush(Brush::Solid(Color::opaque(80, 80, 80)))\n\n .with_pressed_brush(Brush::Solid(Color::opaque(80, 118, 178)))\n\n .build(ctx),\n\n )\n\n .with_content(make_arrow(ctx, arrow, 6.0))\n\n .build(ctx)\n\n}\n\n\n\nimpl<T: NumericType> NumericUpDownBuilder<T> {\n", "file_path": "rg3d-ui/src/numeric.rs", "rank": 39, "score": 206496.74874501437 }, { "content": "pub fn print() -> Result<String, fmt::Error> {\n\n #[cfg(feature = \"enable_profiler\")]\n\n {\n\n let mut buffer = String::new();\n\n PROFILER.lock().unwrap().print(&mut buffer)?;\n\n Ok(buffer)\n\n }\n\n\n\n #[cfg(not(feature = \"enable_profiler\"))]\n\n {\n\n Ok(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\".to_owned())\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 40, "score": 201700.8089540713 }, { "content": "pub fn make_numeric_input<T: NumericType>(\n\n ctx: &mut BuildContext,\n\n column: usize,\n\n value: T,\n\n) -> Handle<UiNode> {\n\n NumericUpDownBuilder::new(\n\n WidgetBuilder::new()\n\n .on_row(0)\n\n .on_column(column)\n\n .with_margin(Thickness {\n\n left: 1.0,\n\n top: 0.0,\n\n right: 1.0,\n\n bottom: 0.0,\n\n }),\n\n )\n\n .with_precision(3)\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "rg3d-ui/src/vec/mod.rs", "rank": 41, "score": 201473.91749372287 }, { "content": "#[inline]\n\npub fn wrap_angle(angle: f32) -> f32 {\n\n let two_pi = 2.0 * std::f32::consts::PI;\n\n\n\n if angle > 0.0 {\n\n angle % two_pi\n\n } else {\n\n (angle + two_pi) % two_pi\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 42, "score": 201376.56619806966 }, { "content": "#[inline]\n\npub fn get_closest_point_triangles<P: PositionProvider>(\n\n points: &[P],\n\n triangles: &[TriangleDefinition],\n\n triangle_indices: &[u32],\n\n point: Vector3<f32>,\n\n) -> Option<usize> {\n\n let mut closest_sqr_distance = f32::MAX;\n\n let mut closest_index = None;\n\n for triangle_index in triangle_indices {\n\n let triangle = triangles.get(*triangle_index as usize).unwrap();\n\n for point_index in triangle.0.iter() {\n\n let vertex = points.get(*point_index as usize).unwrap();\n\n let sqr_distance = (vertex.position() - point).norm_squared();\n\n if sqr_distance < closest_sqr_distance {\n\n closest_sqr_distance = sqr_distance;\n\n closest_index = Some(*point_index as usize);\n\n }\n\n }\n\n }\n\n closest_index\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 43, "score": 198575.45174564765 }, { "content": "#[proc_macro_derive(Inspect, attributes(inspect))]\n\npub fn inspect(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(inspect::impl_inspect(ast))\n\n}\n", "file_path": "rg3d-core-derive/src/lib.rs", "rank": 44, "score": 198348.71673581772 }, { "content": "#[proc_macro_derive(Visit, attributes(visit))]\n\npub fn visit(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(visit::impl_visit(ast))\n\n}\n\n\n\n/// Implements `Inspect` trait\n\n///\n\n/// User has to import `Inspect` and `PropertyInfo` to use this macro.\n", "file_path": "rg3d-core-derive/src/lib.rs", "rank": 45, "score": 198348.7167358177 }, { "content": "#[inline]\n\npub fn ieee_remainder(x: f32, y: f32) -> f32 {\n\n x - (x / y).round() * y\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 46, "score": 195939.34849798662 }, { "content": "#[inline]\n\npub fn get_closest_point_triangle_set<P: PositionProvider>(\n\n points: &[P],\n\n triangles: &[TriangleDefinition],\n\n point: Vector3<f32>,\n\n) -> Option<usize> {\n\n let mut closest_sqr_distance = f32::MAX;\n\n let mut closest_index = None;\n\n for triangle in triangles {\n\n for point_index in triangle.0.iter() {\n\n let vertex = points.get(*point_index as usize).unwrap();\n\n let sqr_distance = (vertex.position() - point).norm_squared();\n\n if sqr_distance < closest_sqr_distance {\n\n closest_sqr_distance = sqr_distance;\n\n closest_index = Some(*point_index as usize);\n\n }\n\n }\n\n }\n\n closest_index\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 47, "score": 195798.25010536454 }, { "content": "pub fn print_hot_path() -> Result<String, fmt::Error> {\n\n #[cfg(feature = \"enable_profiler\")]\n\n {\n\n let mut buffer = String::new();\n\n PROFILER.lock().unwrap().print_hot_path(&mut buffer)?;\n\n Ok(buffer)\n\n }\n\n\n\n #[cfg(not(feature = \"enable_profiler\"))]\n\n {\n\n Ok(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\".to_owned())\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 48, "score": 195506.86526323648 }, { "content": "pub fn err_code_to_string(err_code: c_int) -> String {\n\n unsafe {\n\n let message = CStr::from_ptr(snd_strerror(err_code) as *const _)\n\n .to_bytes()\n\n .to_vec();\n\n String::from_utf8(message).unwrap()\n\n }\n\n}\n\n\n", "file_path": "rg3d-sound/src/device/alsa.rs", "rank": 49, "score": 195450.2509877425 }, { "content": "// impl `#[derive(Visit)]` for `struct` or `enum`\n\npub fn impl_visit(ast: DeriveInput) -> TokenStream2 {\n\n let ty_args = args::TypeArgs::from_derive_input(&ast).unwrap();\n\n match &ty_args.data {\n\n ast::Data::Struct(ref field_args) => self::impl_visit_struct(&ty_args, field_args),\n\n ast::Data::Enum(ref variants) => self::impl_visit_enum(&ty_args, variants),\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/visit.rs", "rank": 50, "score": 195450.2509877425 }, { "content": "pub fn make_property_margin(layer_index: usize) -> Thickness {\n\n let mut margin = HEADER_MARGIN;\n\n margin.left += 10.0 + layer_index as f32 * 10.0;\n\n margin\n\n}\n\n\n", "file_path": "rg3d-ui/src/inspector/mod.rs", "rank": 51, "score": 195450.25098774247 }, { "content": "pub fn impl_inspect(ast: DeriveInput) -> TokenStream2 {\n\n let ty_args = args::TypeArgs::from_derive_input(&ast).unwrap();\n\n match &ty_args.data {\n\n ast::Data::Struct(ref field_args) => self::impl_inspect_struct(&ty_args, field_args),\n\n ast::Data::Enum(ref variant_args) => self::impl_inspect_enum(&ty_args, variant_args),\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/inspect.rs", "rank": 52, "score": 195450.2509877425 }, { "content": "#[inline]\n\npub fn hash_combine(lhs: u64, rhs: u64) -> u64 {\n\n lhs ^ (rhs\n\n .wrapping_add(0x9e3779b9)\n\n .wrapping_add(lhs << 6)\n\n .wrapping_add(lhs >> 2))\n\n}\n", "file_path": "rg3d-core/src/lib.rs", "rank": 53, "score": 192911.49903573468 }, { "content": "#[inline]\n\npub fn round_to_step(x: f32, step: f32) -> f32 {\n\n x - ieee_remainder(x, step)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 54, "score": 192911.49903573465 }, { "content": "pub fn check(err_code: c_int) -> Result<(), SoundError> {\n\n if err_code < 0 {\n\n Err(SoundError::FailedToInitializeDevice(err_code_to_string(\n\n err_code,\n\n )))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl AlsaSoundDevice {\n\n pub fn new<F: FnMut(&mut [(f32, f32)]) + Send + 'static>(\n\n buffer_len_bytes: u32,\n\n callback: F,\n\n ) -> Result<Self, SoundError> {\n\n unsafe {\n\n let name = CString::new(\"default\").unwrap();\n\n // 16-bit stereo is 4 bytes, so frame count is bufferHalfSize / 4\n\n let frame_count = buffer_len_bytes / 4;\n\n let mut playback_device = std::ptr::null_mut();\n", "file_path": "rg3d-sound/src/device/alsa.rs", "rank": 55, "score": 192608.39951516123 }, { "content": "#[inline]\n\n#[allow(clippy::useless_let_if_seq)]\n\npub fn classify_plane(normal: Vector3<f32>) -> PlaneClass {\n\n let mut longest = 0.0f32;\n\n let mut class = PlaneClass::XY;\n\n\n\n if normal.x.abs() > longest {\n\n longest = normal.x.abs();\n\n class = PlaneClass::YZ;\n\n }\n\n\n\n if normal.y.abs() > longest {\n\n longest = normal.y.abs();\n\n class = PlaneClass::XZ;\n\n }\n\n\n\n if normal.z.abs() > longest {\n\n class = PlaneClass::XY;\n\n }\n\n\n\n class\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 56, "score": 192608.39951516123 }, { "content": "#[inline(always)]\n\npub fn lerpf(a: f32, b: f32, t: f32) -> f32 {\n\n a + (b - a) * t\n\n}\n\n\n\n// https://en.wikipedia.org/wiki/Cubic_Hermite_spline\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 57, "score": 191370.03741552794 }, { "content": "pub fn make_property_enum_editor_definition() -> EnumPropertyEditorDefinition<PropertyValue> {\n\n EnumPropertyEditorDefinition {\n\n variant_generator: |i| match i {\n\n 0 => PropertyValue::NodeHandle(Default::default()),\n\n 1 => PropertyValue::Handle(Default::default()),\n\n 2 => PropertyValue::String(\"\".to_owned()),\n\n 3 => PropertyValue::I64(0),\n\n 4 => PropertyValue::U64(0),\n\n 5 => PropertyValue::I32(0),\n\n 6 => PropertyValue::U32(0),\n\n 7 => PropertyValue::I16(0),\n\n 8 => PropertyValue::U16(0),\n\n 9 => PropertyValue::I8(0),\n\n 10 => PropertyValue::U8(0),\n\n 11 => PropertyValue::F32(0.0),\n\n 12 => PropertyValue::F64(0.0),\n\n _ => unreachable!(),\n\n },\n\n index_generator: |v| match v {\n\n PropertyValue::NodeHandle(_) => 0,\n", "file_path": "editor/src/inspector/editors/mod.rs", "rank": 58, "score": 190371.3657755349 }, { "content": "/// Calculates single coefficient of Hann window.\n\n/// <https://en.wikipedia.org/wiki/Hann_function>\n\npub fn hann_window(i: usize, sample_count: usize) -> f32 {\n\n 0.5 - 0.5 * (2.0 * std::f32::consts::PI * i as f32 / (sample_count - 1) as f32).cos()\n\n}\n\n\n", "file_path": "rg3d-sound/src/dsp/mod.rs", "rank": 59, "score": 190013.03328765943 }, { "content": "/// Calculates single coefficient of Hamming window.\n\n/// <https://en.wikipedia.org/wiki/Window_function#Hamming_window>\n\npub fn hamming_window(i: usize, sample_count: usize) -> f32 {\n\n 0.54 - 0.46 * (2.0 * std::f32::consts::PI * i as f32 / (sample_count - 1) as f32).cos()\n\n}\n\n\n", "file_path": "rg3d-sound/src/dsp/mod.rs", "rank": 60, "score": 190013.03328765946 }, { "content": "#[inline]\n\npub fn barycentric_is_inside(bary: (f32, f32, f32)) -> bool {\n\n (bary.0 >= 0.0) && (bary.1 >= 0.0) && (bary.0 + bary.1 < 1.0)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 61, "score": 190013.03328765943 }, { "content": "#[inline]\n\npub fn vector_to_quat(vec: Vector3<f32>) -> UnitQuaternion<f32> {\n\n let dot = vec.normalize().dot(&Vector3::y());\n\n\n\n if dot.abs() > 1.0 - 10.0 * f32::EPSILON {\n\n // Handle singularity when vector is collinear with Y axis.\n\n UnitQuaternion::from_axis_angle(&Vector3::x_axis(), -dot.signum() * 90.0f32.to_radians())\n\n } else {\n\n UnitQuaternion::face_towards(&vec, &Vector3::y())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::algebra::Vector2;\n\n use crate::math::Rect;\n\n use crate::math::SmoothAngle;\n\n\n\n #[test]\n\n fn ray_rect_intersection() {\n\n let rect = Rect::new(0.0, 0.0, 10.0, 10.0);\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 62, "score": 187235.8316473763 }, { "content": "pub fn make_save_file_selector(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n FileSelectorBuilder::new(\n\n WindowBuilder::new(WidgetBuilder::new().with_width(300.0).with_height(400.0))\n\n .with_title(WindowTitle::Text(\"Save Scene As\".into()))\n\n .open(false),\n\n )\n\n .with_mode(FileBrowserMode::Save {\n\n default_file_name: PathBuf::from(\"unnamed.rgs\"),\n\n })\n\n .with_path(\"./\")\n\n .with_filter(make_scene_file_filter())\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 63, "score": 186845.5226202953 }, { "content": "#[inline]\n\npub fn clampf(v: f32, min: f32, max: f32) -> f32 {\n\n if v < min {\n\n min\n\n } else if v > max {\n\n max\n\n } else {\n\n v\n\n }\n\n}\n\n\n\n/// There are two versions of remainder, the standard `%` operator which does `x - (x/y).trunc()*y` and IEEE remainder which does `x - (x/y).round()*y`.\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 64, "score": 185443.72220520076 }, { "content": "/// Utility function that replaces back slashes \\ to forward /\n\n/// It replaces slashes only on windows!\n\npub fn replace_slashes<P: AsRef<Path>>(path: P) -> PathBuf {\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n if path.as_ref().is_absolute() {\n\n // Absolute Windows paths are incompatible with other operating systems so\n\n // don't bother here and return existing path as owned.\n\n path.as_ref().to_owned()\n\n } else {\n\n // Replace all \\ to /. This is needed because on macos or linux \\ is a valid symbol in\n\n // file name, and not separator (except linux which understand both variants).\n\n let mut os_str = std::ffi::OsString::new();\n\n let count = path.as_ref().components().count();\n\n for (i, component) in path.as_ref().components().enumerate() {\n\n os_str.push(component.as_os_str());\n\n if i != count - 1 {\n\n os_str.push(\"/\");\n\n }\n\n }\n\n PathBuf::from(os_str)\n\n }\n", "file_path": "rg3d-core/src/lib.rs", "rank": 65, "score": 184856.21556477182 }, { "content": "/// Creates new window using specified window function.\n\n/// <https://en.wikipedia.org/wiki/Window_function>\n\npub fn make_window<W: Fn(usize, usize) -> f32>(sample_count: usize, func: W) -> Vec<f32> {\n\n (0..sample_count).map(|i| func(i, sample_count)).collect()\n\n}\n", "file_path": "rg3d-sound/src/dsp/mod.rs", "rank": 66, "score": 184699.34566082124 }, { "content": "/// A value of a property.\n\npub trait PropertyValue: Any + Debug {\n\n /// Casts `self` to a `&dyn Any`\n\n fn as_any(&self) -> &dyn Any;\n\n}\n\n\n\nimpl<T: Debug + 'static> PropertyValue for T {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n}\n\n\n\n/// An error that can occur during \"type casting\"\n\n#[derive(Debug)]\n\npub enum CastError {\n\n /// Given type does not match expected.\n\n TypeMismatch {\n\n /// A name of the property.\n\n property_name: String,\n\n\n\n /// Expected type identifier.\n", "file_path": "rg3d-core/src/inspect.rs", "rank": 67, "score": 184065.2683841571 }, { "content": "#[inline]\n\npub fn quat_from_euler<T: SimdRealField + RealField + Copy + Clone>(\n\n euler_radians: Vector3<T>,\n\n order: RotationOrder,\n\n) -> UnitQuaternion<T> {\n\n let qx = UnitQuaternion::from_axis_angle(&Vector3::x_axis(), euler_radians.x);\n\n let qy = UnitQuaternion::from_axis_angle(&Vector3::y_axis(), euler_radians.y);\n\n let qz = UnitQuaternion::from_axis_angle(&Vector3::z_axis(), euler_radians.z);\n\n match order {\n\n RotationOrder::XYZ => qz * qy * qx,\n\n RotationOrder::XZY => qy * qz * qx,\n\n RotationOrder::YZX => qx * qz * qy,\n\n RotationOrder::YXZ => qz * qx * qy,\n\n RotationOrder::ZXY => qy * qx * qz,\n\n RotationOrder::ZYX => qx * qy * qz,\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 68, "score": 182016.00287692394 }, { "content": "#[inline]\n\npub fn solve_quadratic(a: f32, b: f32, c: f32) -> Option<[f32; 2]> {\n\n let discriminant = b * b - 4.0 * a * c;\n\n if discriminant < 0.0 {\n\n // No real roots\n\n None\n\n } else {\n\n // Dont care if quadratic equation has only one root (discriminant == 0), this is edge-case\n\n // which requires additional branching instructions which is not good for branch-predictor in CPU.\n\n let _2a = 2.0 * a;\n\n let discr_root = discriminant.sqrt();\n\n let r1 = (-b + discr_root) / _2a;\n\n let r2 = (-b - discr_root) / _2a;\n\n Some([r1, r2])\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 69, "score": 181549.87012750318 }, { "content": "///\n\n/// Triangulates specified polygon.\n\n///\n\npub fn triangulate(vertices: &[Vector3<f32>], out_triangles: &mut Vec<[usize; 3]>) {\n\n out_triangles.clear();\n\n if vertices.len() == 3 {\n\n // Triangulating a triangle?\n\n out_triangles.push([0, 1, 2]);\n\n } else if vertices.len() == 4 {\n\n // Special case for quadrilaterals (much faster than generic)\n\n let mut start_vertex = 0;\n\n for i in 0..4 {\n\n let v = vertices[i];\n\n let v0 = vertices[(i + 3) % 4];\n\n if let Some(left) = (v0 - v).try_normalize(f32::EPSILON) {\n\n let v1 = vertices[(i + 2) % 4];\n\n if let Some(diag) = (v1 - v).try_normalize(f32::EPSILON) {\n\n let v2 = vertices[(i + 1) % 4];\n\n if let Some(right) = (v2 - v).try_normalize(f32::EPSILON) {\n\n // Check for concave vertex\n\n let angle = left.dot(&diag).acos() + right.dot(&diag).acos();\n\n if angle > std::f32::consts::PI {\n\n start_vertex = i;\n", "file_path": "rg3d-core/src/math/triangulator.rs", "rank": 77, "score": 180644.88229090307 }, { "content": "/// \"Transmutes\" array of any sized type to a slice of bytes.\n\npub fn array_as_u8_slice<T: Sized>(v: &[T]) -> &'_ [u8] {\n\n // SAFETY: It is safe to reinterpret data to read it.\n\n unsafe {\n\n std::slice::from_raw_parts(v.as_ptr() as *const u8, std::mem::size_of::<T>() * v.len())\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 78, "score": 179599.49737512588 }, { "content": "/// Generates UV map for given surface data.\n\n///\n\n/// # Performance\n\n///\n\n/// This method utilizes lots of \"brute force\" algorithms, so it is not fast as it\n\n/// could be in ideal case. It also allocates some memory for internal needs.\n\npub fn generate_uvs(\n\n data: &mut SurfaceData,\n\n spacing: f32,\n\n) -> Result<SurfaceDataPatch, VertexFetchError> {\n\n let uv_box = generate_uv_box(data);\n\n\n\n let data_id = data.content_hash();\n\n let mut vertex_buffer_mut = data.vertex_buffer.modify();\n\n let mut geometry_buffer_mut = data.geometry_buffer.modify();\n\n let (mut meshes, mut patch) = generate_uv_meshes(\n\n &uv_box,\n\n data_id,\n\n &mut vertex_buffer_mut,\n\n &mut geometry_buffer_mut,\n\n );\n\n drop(geometry_buffer_mut);\n\n\n\n // Step 4. Arrange and scale all meshes on uv map so it fits into [0;1] range.\n\n let area = meshes.iter().fold(0.0, |area, mesh| area + mesh.area());\n\n let square_side = area.sqrt() + spacing * meshes.len() as f32;\n", "file_path": "src/utils/uvgen.rs", "rank": 79, "score": 178892.0998463074 }, { "content": "#[inline]\n\npub fn set_once() {\n\n use std::sync::Once;\n\n static SET_HOOK: Once = Once::new();\n\n SET_HOOK.call_once(|| {\n\n panic::set_hook(Box::new(hook));\n\n });\n\n}\n\n\n", "file_path": "examples/wasm/src/lib.rs", "rank": 80, "score": 178892.0998463074 }, { "content": "/// Performs hashing of a sized value by interpreting it as raw memory.\n\npub fn hash_as_bytes<T: Sized, H: Hasher>(value: &T, hasher: &mut H) {\n\n hasher.write(value_as_u8_slice(value))\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 81, "score": 178456.30672924838 }, { "content": "#[inline]\n\npub fn is_point_inside_triangle(p: &Vector3<f32>, vertices: &[Vector3<f32>; 3]) -> bool {\n\n let ba = vertices[1] - vertices[0];\n\n let ca = vertices[2] - vertices[0];\n\n let vp = *p - vertices[0];\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot02 = ca.dot(&vp);\n\n let dot12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // Calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot02 - ca_dot_ba * dot12) * inv_denom;\n\n let v = (ca_dot_ca * dot12 - ca_dot_ba * dot02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 82, "score": 177981.4906365026 }, { "content": "pub fn make_dropdown_list_option(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_height(26.0).with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_horizontal_text_alignment(HorizontalAlignment::Center)\n\n .with_text(name)\n\n .build(ctx),\n\n ),\n\n ))\n\n .build(ctx)\n\n}\n\n\n\nimpl AssetItemMessage {\n\n define_constructor!(AssetItemMessage:Select => fn select(bool), layout: false);\n\n}\n", "file_path": "editor/src/gui.rs", "rank": 84, "score": 177174.4920759422 }, { "content": "#[wasm_bindgen]\n\npub fn main_js() {\n\n set_once();\n\n\n\n let event_loop = EventLoop::new();\n\n\n\n let window_builder = rg3d::window::WindowBuilder::new()\n\n .with_inner_size(LogicalSize::new(800, 600))\n\n .with_title(\"Example - WASM\")\n\n .with_resizable(true);\n\n\n\n let mut engine = Engine::new(window_builder, &event_loop, true).unwrap();\n\n engine\n\n .renderer\n\n .set_backbuffer_clear_color(Color::opaque(150, 150, 255));\n\n\n\n // Configure resource manager.\n\n engine.resource_manager.state().set_textures_import_options(\n\n TextureImportOptions::default().with_compression(CompressionOptions::NoCompression),\n\n );\n\n\n", "file_path": "examples/wasm/src/lib.rs", "rank": 85, "score": 176299.47909413814 }, { "content": "/// Generates a set of UV meshes.\n\npub fn generate_uv_meshes(\n\n uv_box: &UvBox,\n\n data_id: u64,\n\n vertex_buffer_mut: &mut VertexBufferRefMut,\n\n geometry_buffer_mut: &mut TriangleBufferRefMut,\n\n) -> (Vec<UvMesh>, SurfaceDataPatch) {\n\n let mut mesh_patch = SurfaceDataPatch {\n\n data_id,\n\n ..Default::default()\n\n };\n\n\n\n if !vertex_buffer_mut.has_attribute(VertexAttributeUsage::TexCoord1) {\n\n vertex_buffer_mut\n\n .add_attribute(\n\n VertexAttributeDescriptor {\n\n usage: VertexAttributeUsage::TexCoord1,\n\n data_type: VertexAttributeDataType::F32,\n\n size: 2,\n\n divisor: 0,\n\n shader_location: 6, // HACK: GBuffer renderer expects it to be at 6\n", "file_path": "src/utils/uvgen.rs", "rank": 86, "score": 176299.47909413814 }, { "content": "/// Generates UVs for a specified mesh.\n\npub fn generate_uvs_mesh(\n\n mesh: &Mesh,\n\n spacing: f32,\n\n) -> Result<Vec<SurfaceDataPatch>, VertexFetchError> {\n\n let last = instant::Instant::now();\n\n\n\n let data_set = mesh.surfaces().iter().map(|s| s.data()).collect::<Vec<_>>();\n\n\n\n let patches = data_set\n\n .into_par_iter()\n\n .map(|data| generate_uvs(&mut data.lock(), spacing))\n\n .collect::<Result<Vec<SurfaceDataPatch>, VertexFetchError>>()?;\n\n\n\n println!(\"Generate UVs: {:?}\", instant::Instant::now() - last);\n\n\n\n Ok(patches)\n\n}\n", "file_path": "src/utils/uvgen.rs", "rank": 87, "score": 176299.47909413814 }, { "content": "/// `pub const [VARIANT_]FIELD: &'static str = \"key\";`\n\nfn prop_keys_impl(ty_args: &args::TypeArgs) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let (impl_generics, ty_generics, where_clause) = ty_args.generics.split_for_impl();\n\n\n\n let prop_keys = prop_keys::quote_prop_keys(ty_args);\n\n quote! {\n\n /// Property key constants\n\n impl #impl_generics #ty_ident #ty_generics #where_clause {\n\n #prop_keys\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/inspect/utils.rs", "rank": 88, "score": 176054.48355811439 }, { "content": "#[inline]\n\npub fn spherical_to_cartesian(azimuth: f32, elevation: f32, radius: f32) -> Vector3<f32> {\n\n let x = radius * elevation.sin() * azimuth.sin();\n\n let y = radius * elevation.cos();\n\n let z = -radius * elevation.sin() * azimuth.cos();\n\n Vector3::new(x, y, z)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 89, "score": 175425.05352045072 }, { "content": "#[inline]\n\npub fn triangle_area(a: Vector3<f32>, b: Vector3<f32>, c: Vector3<f32>) -> f32 {\n\n (b - a).cross(&(c - a)).norm() * 0.5\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 90, "score": 174370.5113979745 }, { "content": "fn impl_inspect_enum(ty_args: &args::TypeArgs, variant_args: &[args::VariantArgs]) -> TokenStream2 {\n\n let variant_matches =\n\n variant_args.iter().map(|variant| {\n\n let variant_ident = &variant.ident;\n\n\n\n let field_prefix = utils::FieldPrefix::of_enum_variant(variant.fields.style);\n\n\n\n let field_match_idents =\n\n variant.fields.fields.iter().enumerate().map(|(i, field)| {\n\n field_prefix.field_match_ident(i, field, variant.fields.style)\n\n });\n\n\n\n let variant_match = match variant.fields.style {\n\n ast::Style::Struct => {\n\n quote! {\n\n Self::#variant_ident { #(#field_match_idents),* }\n\n }\n\n }\n\n ast::Style::Tuple => {\n\n quote! {\n", "file_path": "rg3d-core-derive/src/inspect.rs", "rank": 91, "score": 174059.24075638363 }, { "content": "/// impl `Visit` for `enum`\n\nfn impl_visit_enum(ty_args: &args::TypeArgs, variant_args: &[args::VariantArgs]) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let ty_name = format!(\"{}\", ty_ident);\n\n\n\n // variant ID = variant index\n\n let id_type = quote!(u32);\n\n\n\n // `fn id(&self) -> u32`\n\n let fn_id = {\n\n let matchers = variant_args\n\n .iter()\n\n .enumerate()\n\n .map(|(variant_index, variant)| {\n\n let variant_index = variant_index as u32;\n\n let variant_ident = &variant.ident;\n\n\n\n match variant.fields.style {\n\n ast::Style::Struct => quote! {\n\n #ty_ident::#variant_ident { .. } => #variant_index,\n\n },\n", "file_path": "rg3d-core-derive/src/visit.rs", "rank": 92, "score": 174059.24075638363 }, { "content": "pub fn create_menu_item(\n\n text: &str,\n\n items: Vec<Handle<UiNode>>,\n\n ctx: &mut BuildContext,\n\n) -> Handle<UiNode> {\n\n MenuItemBuilder::new(WidgetBuilder::new().with_min_size(Vector2::new(120.0, 22.0)))\n\n .with_content(MenuItemContent::text(text))\n\n .with_items(items)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/menu/mod.rs", "rank": 93, "score": 173830.89477178722 }, { "content": "fn impl_inspect_struct(ty_args: &args::TypeArgs, field_args: &args::Fields) -> TokenStream2 {\n\n let field_prefix = utils::FieldPrefix::of_struct(field_args.style);\n\n let body = utils::gen_inspect_fn_body(field_prefix, field_args);\n\n utils::create_inspect_impl(ty_args, field_args.iter(), body)\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/inspect.rs", "rank": 94, "score": 173641.55064641804 }, { "content": "#[inline]\n\npub fn get_farthest_point(points: &[Vector3<f32>], dir: Vector3<f32>) -> Vector3<f32> {\n\n let mut n_farthest = 0;\n\n let mut max_dot = -f32::MAX;\n\n for (i, point) in points.iter().enumerate() {\n\n let dot = dir.dot(point);\n\n if dot > max_dot {\n\n n_farthest = i;\n\n max_dot = dot\n\n }\n\n }\n\n points[n_farthest]\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 95, "score": 172969.25524979617 }, { "content": "#[inline]\n\npub fn get_polygon_normal(polygon: &[Vector3<f32>]) -> Result<Vector3<f32>, &'static str> {\n\n let mut normal = Vector3::default();\n\n\n\n for (i, current) in polygon.iter().enumerate() {\n\n let next = polygon[(i + 1) % polygon.len()];\n\n normal.x += (current.y - next.y) * (current.z + next.z);\n\n normal.y += (current.z - next.z) * (current.x + next.x);\n\n normal.z += (current.x - next.x) * (current.y + next.y);\n\n }\n\n\n\n normal\n\n .try_normalize(f32::EPSILON)\n\n .ok_or(\"Unable to get normal of degenerated polygon!\")\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 96, "score": 172969.25524979617 }, { "content": "pub fn create_menu_item_shortcut(\n\n text: &str,\n\n shortcut: &str,\n\n items: Vec<Handle<UiNode>>,\n\n ctx: &mut BuildContext,\n\n) -> Handle<UiNode> {\n\n MenuItemBuilder::new(WidgetBuilder::new().with_min_size(Vector2::new(120.0, 22.0)))\n\n .with_content(MenuItemContent::text_with_shortcut(text, shortcut))\n\n .with_items(items)\n\n .build(ctx)\n\n}\n\n\n\nimpl Menu {\n\n pub fn new(\n\n engine: &mut GameEngine,\n\n message_sender: Sender<Message>,\n\n settings: &Settings,\n\n ) -> Self {\n\n let file_menu = FileMenu::new(engine, &message_sender, settings);\n\n let ctx = &mut engine.user_interface.build_ctx();\n", "file_path": "editor/src/menu/mod.rs", "rank": 97, "score": 171477.6535864306 }, { "content": "pub fn calculate_gizmo_distance_scaling(\n\n graph: &Graph,\n\n camera: Handle<Node>,\n\n gizmo_origin: Handle<Node>,\n\n) -> Vector3<f32> {\n\n let distance = distance_scale_factor(graph[camera].as_camera().fov())\n\n * graph[gizmo_origin]\n\n .global_position()\n\n .metric_distance(&graph[camera].global_position());\n\n Vector3::new(distance, distance, distance)\n\n}\n\n\n", "file_path": "editor/src/interaction/mod.rs", "rank": 98, "score": 171477.6535864306 }, { "content": "pub fn create_root_menu_item(\n\n text: &str,\n\n items: Vec<Handle<UiNode>>,\n\n ctx: &mut BuildContext,\n\n) -> Handle<UiNode> {\n\n MenuItemBuilder::new(WidgetBuilder::new().with_margin(Thickness::right(10.0)))\n\n .with_content(MenuItemContent::text(text))\n\n .with_items(items)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/menu/mod.rs", "rank": 99, "score": 171477.6535864306 } ]
Rust
der/src/tag.rs
cipepser/utils
e96656b6b9d4e9313816316f4f287a1d2c04efcc
use crate::{Decodable, Decoder, Encodable, Encoder, Error, ErrorKind, Length, Result}; use core::{convert::TryFrom, fmt}; const CONSTRUCTED_FLAG: u8 = 0b100000; const CONTEXT_SPECIFIC_FLAG: u8 = 0b10000000; #[derive(Copy, Clone, Eq, PartialEq)] #[allow(clippy::identity_op)] #[non_exhaustive] #[repr(u8)] pub enum Tag { Boolean = 0x01, Integer = 0x02, BitString = 0x03, OctetString = 0x04, Null = 0x05, ObjectIdentifier = 0x06, Sequence = 0x10 | CONSTRUCTED_FLAG, ContextSpecific0 = 0 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific1 = 1 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific2 = 2 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific3 = 3 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, } impl TryFrom<u8> for Tag { type Error = Error; fn try_from(byte: u8) -> Result<Tag> { match byte { 0x01 => Ok(Tag::Boolean), 0x02 => Ok(Tag::Integer), 0x03 => Ok(Tag::BitString), 0x04 => Ok(Tag::OctetString), 0x05 => Ok(Tag::Null), 0x06 => Ok(Tag::ObjectIdentifier), 0x30 => Ok(Tag::Sequence), 0xA0 => Ok(Tag::ContextSpecific0), 0xA1 => Ok(Tag::ContextSpecific1), 0xA2 => Ok(Tag::ContextSpecific2), 0xA3 => Ok(Tag::ContextSpecific3), _ => Err(ErrorKind::UnknownTag { byte }.into()), } } } impl Tag { pub fn assert_eq(self, expected: Tag) -> Result<Tag> { if self == expected { Ok(self) } else { Err(ErrorKind::UnexpectedTag { expected: Some(expected), actual: self, } .into()) } } pub fn type_name(self) -> &'static str { match self { Self::Boolean => "BOOLEAN", Self::Integer => "INTEGER", Self::BitString => "BIT STRING", Self::OctetString => "OCTET STRING", Self::Null => "NULL", Self::ObjectIdentifier => "OBJECT IDENTIFIER", Self::Sequence => "SEQUENCE", Self::ContextSpecific0 => "Context Specific 0", Self::ContextSpecific1 => "Context Specific 1", Self::ContextSpecific2 => "Context Specific 2", Self::ContextSpecific3 => "Context Specific 3", } } } impl Decodable<'_> for Tag { fn decode(decoder: &mut Decoder<'_>) -> Result<Self> { decoder.byte().and_then(Self::try_from) } } impl Encodable for Tag { fn encoded_len(&self) -> Result<Length> { Ok(1u8.into()) } fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> { encoder.byte(*self as u8) } } impl fmt::Display for Tag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.type_name()) } } impl fmt::Debug for Tag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Tag(0x{:02x}: {})", *self as u8, self.type_name()) } }
use crate::{Decodable, Decoder, Encodable, Encoder, Error, ErrorKind, Length, Result}; use core::{convert::TryFrom, fmt}; const CONSTRUCTED_FLAG: u8 = 0b100000; const CONTEXT_SPECIFIC_FLAG: u8 = 0b10000000; #[derive(Copy, Clone, Eq, PartialEq)] #[allow(clippy::identity_op)] #[non_exhaustive] #[repr(u8)] pub enum Tag { Boolean = 0x01, Integer = 0x02, BitString = 0x03, OctetString = 0x04, Null = 0x05, ObjectIdentifier = 0x06, Sequence = 0x10 | CONSTRUCTED_FLAG, ContextSpecific0 = 0 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific1 = 1 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific2 = 2 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific3 = 3 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, } impl TryFrom<u8> for Tag { type Error = Error; fn try_from(byte: u8) -> Result<Tag> { match byte { 0x01 => Ok(Tag::Boolean), 0x02 => Ok(Tag::Integer), 0x03 => Ok(Tag::BitString), 0x04 => Ok(Tag::OctetString), 0x05 => Ok(Tag::Null), 0x06 => Ok(Tag::ObjectIdentifier), 0x30 => Ok(Tag::Sequence), 0xA0 => Ok(Tag::ContextSpecific0), 0xA1 => Ok(Tag::ContextSpecific1), 0xA2 => Ok(Tag::ContextSpecific2), 0xA3 => Ok(Tag::ContextSpecific3), _ => Err(ErrorKind::UnknownTag { byte }.into()), } } } impl Tag {
pub fn type_name(self) -> &'static str { match self { Self::Boolean => "BOOLEAN", Self::Integer => "INTEGER", Self::BitString => "BIT STRING", Self::OctetString => "OCTET STRING", Self::Null => "NULL", Self::ObjectIdentifier => "OBJECT IDENTIFIER", Self::Sequence => "SEQUENCE", Self::ContextSpecific0 => "Context Specific 0", Self::ContextSpecific1 => "Context Specific 1", Self::ContextSpecific2 => "Context Specific 2", Self::ContextSpecific3 => "Context Specific 3", } } } impl Decodable<'_> for Tag { fn decode(decoder: &mut Decoder<'_>) -> Result<Self> { decoder.byte().and_then(Self::try_from) } } impl Encodable for Tag { fn encoded_len(&self) -> Result<Length> { Ok(1u8.into()) } fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> { encoder.byte(*self as u8) } } impl fmt::Display for Tag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.type_name()) } } impl fmt::Debug for Tag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Tag(0x{:02x}: {})", *self as u8, self.type_name()) } }
pub fn assert_eq(self, expected: Tag) -> Result<Tag> { if self == expected { Ok(self) } else { Err(ErrorKind::UnexpectedTag { expected: Some(expected), actual: self, } .into()) } }
function_block-full_function
[ { "content": "/// Obtain the length of an ASN.1 `SEQUENCE` of [`Encodable`] values when\n\n/// serialized as ASN.1 DER, including the `SEQUENCE` tag and length prefix.\n\npub fn encoded_len(encodables: &[&dyn Encodable]) -> Result<Length> {\n\n let inner_len = encoded_len_inner(encodables)?;\n\n Header::new(Tag::Sequence, inner_len)?.encoded_len() + inner_len\n\n}\n\n\n\n/// Obtain the inner length of an ASN.1 `SEQUENCE` of [`Encodable`] values\n\n/// excluding the tag and length.\n\npub(crate) fn encoded_len_inner(encodables: &[&dyn Encodable]) -> Result<Length> {\n\n encodables\n\n .iter()\n\n .fold(Ok(Length::zero()), |sum, encodable| {\n\n sum + encodable.encoded_len()?\n\n })\n\n}\n\n\n\n/// ASN.1 `SEQUENCE` type.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub struct Sequence<'a> {\n\n /// Inner value\n\n inner: ByteSlice<'a>,\n", "file_path": "der/src/asn1/sequence.rs", "rank": 0, "score": 260337.08778273297 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\nfn decode_vec(input: &str, padded: bool, hi_bytes: (u8, u8)) -> Result<Vec<u8>, Error> {\n\n let mut output = vec![0u8; decoded_len(input.len())];\n\n let len = decode(input, &mut output, padded, hi_bytes)?.len();\n\n\n\n if len <= output.len() {\n\n output.truncate(len);\n\n Ok(output)\n\n } else {\n\n Err(Error::InvalidLength)\n\n }\n\n}\n\n\n\n/// Get the length of the output from decoding the provided *unpadded*\n\n/// Base64-encoded input (use [`unpadded_len_ct`] to compute this value for\n\n/// a padded input)\n\n///\n\n/// Note that this function does not fully validate the Base64 is well-formed\n\n/// and may return incorrect results for malformed Base64.\n", "file_path": "base64ct/src/lib.rs", "rank": 1, "score": 235688.44015385036 }, { "content": "/// Parse a single byte from a slice\n\nfn parse_byte(bytes: &mut &[u8]) -> Result<u8> {\n\n let byte = *bytes.get(0).ok_or(Error)?;\n\n *bytes = &bytes[1..];\n\n Ok(byte)\n\n}\n\n\n", "file_path": "const-oid/src/lib.rs", "rank": 2, "score": 231191.74865408585 }, { "content": "/// Validate padding is well-formed and compute unpadded length.\n\n///\n\n/// Returns length-related errors eagerly as a [`Result`], and data-dependent\n\n/// errors (i.e. malformed padding bytes) as `i16` to be combined with other\n\n/// encoding-related errors prior to branching.\n\nfn decode_padding(input: &[u8]) -> Result<(usize, i16), InvalidEncodingError> {\n\n if input.len() % 4 != 0 {\n\n return Err(InvalidEncodingError);\n\n }\n\n\n\n let unpadded_len = match *input {\n\n [.., b0, b1] => {\n\n let pad_len = match_eq_ct(b0, PAD, 1) + match_eq_ct(b1, PAD, 1);\n\n input.len() - pad_len as usize\n\n }\n\n _ => input.len(),\n\n };\n\n\n\n let padding_len = input.len() - unpadded_len;\n\n\n\n let err = match *input {\n\n [.., b0] if padding_len == 1 => match_eq_ct(b0, PAD, 1) ^ 1,\n\n [.., b0, b1] if padding_len == 2 => (match_eq_ct(b0, PAD, 1) & match_eq_ct(b1, PAD, 1)) ^ 1,\n\n _ => {\n\n if padding_len == 0 {\n\n 0\n\n } else {\n\n return Err(InvalidEncodingError);\n\n }\n\n }\n\n };\n\n\n\n Ok((unpadded_len, err))\n\n}\n", "file_path": "base64ct/src/lib.rs", "rank": 3, "score": 221505.66216273332 }, { "content": "/// Parse a base 128 (big endian) integer from a bytestring\n\nfn parse_base128(bytes: &mut &[u8]) -> Result<u32> {\n\n let mut result = 0;\n\n let mut shift = 0;\n\n\n\n loop {\n\n let byte = parse_byte(bytes)?;\n\n\n\n if shift == 28 && byte & 0b11110000 != 0 {\n\n // Overflow\n\n return Err(Error);\n\n }\n\n\n\n result = result << 7 | (byte & 0b1111111) as u32;\n\n\n\n if byte & 0b10000000 == 0 {\n\n return Ok(result);\n\n }\n\n\n\n shift += 7;\n\n }\n\n}\n\n\n", "file_path": "const-oid/src/lib.rs", "rank": 4, "score": 203476.62715225306 }, { "content": "/// Write the given unsigned integer in base 128\n\nfn write_base128(bytes: &mut [u8], mut n: u32) -> Result<usize> {\n\n let nbytes = base128_len(n);\n\n let mut i = nbytes.checked_sub(1).expect(\"length underflow\");\n\n let mut mask = 0;\n\n\n\n while n > 0x80 {\n\n let byte = bytes.get_mut(i).ok_or(Error)?;\n\n *byte = (n & 0b1111111 | mask) as u8;\n\n n >>= 7;\n\n i = i.checked_sub(1).unwrap();\n\n mask = 0b10000000;\n\n }\n\n\n\n *bytes.get_mut(0).unwrap() = (n | mask) as u8;\n\n Ok(nbytes)\n\n}\n\n\n", "file_path": "const-oid/src/lib.rs", "rank": 5, "score": 187323.27581307187 }, { "content": "#[inline(always)]\n\nfn encode_3bytes(src: &[u8], dst: &mut [u8], hi_bytes: (u8, u8)) {\n\n debug_assert_eq!(src.len(), 3);\n\n debug_assert!(dst.len() >= 4, \"dst too short: {}\", dst.len());\n\n\n\n let b0 = src[0] as i16;\n\n let b1 = src[1] as i16;\n\n let b2 = src[2] as i16;\n\n\n\n dst[0] = encode_6bits(b0 >> 2, hi_bytes);\n\n dst[1] = encode_6bits(((b0 << 4) | (b1 >> 4)) & 63, hi_bytes);\n\n dst[2] = encode_6bits(((b1 << 2) | (b2 >> 6)) & 63, hi_bytes);\n\n dst[3] = encode_6bits(b2 & 63, hi_bytes);\n\n}\n\n\n", "file_path": "base64ct/src/lib.rs", "rank": 6, "score": 183436.38404533186 }, { "content": "#[inline(always)]\n\nfn encode_3bytes_padded(src: &[u8], dst: &mut [u8], hi_bytes: (u8, u8)) {\n\n let mut tmp = [0u8; 3];\n\n tmp[..src.len()].copy_from_slice(&src);\n\n encode_3bytes(&tmp, dst, hi_bytes);\n\n\n\n dst[3] = PAD;\n\n\n\n if src.len() == 1 {\n\n dst[2] = PAD;\n\n }\n\n}\n\n\n", "file_path": "base64ct/src/lib.rs", "rank": 7, "score": 180954.54830949428 }, { "content": "#[inline(always)]\n\nfn decode_3bytes(src: &[u8], dst: &mut [u8], hi_bytes: (u8, u8)) -> i16 {\n\n debug_assert_eq!(src.len(), 4);\n\n debug_assert!(dst.len() >= 3, \"dst too short: {}\", dst.len());\n\n\n\n let c0 = decode_6bits(src[0], hi_bytes);\n\n let c1 = decode_6bits(src[1], hi_bytes);\n\n let c2 = decode_6bits(src[2], hi_bytes);\n\n let c3 = decode_6bits(src[3], hi_bytes);\n\n\n\n dst[0] = ((c0 << 2) | (c1 >> 4)) as u8;\n\n dst[1] = ((c1 << 4) | (c2 >> 2)) as u8;\n\n dst[2] = ((c2 << 6) | c3) as u8;\n\n\n\n ((c0 | c1 | c2 | c3) >> 8) & 1\n\n}\n\n\n", "file_path": "base64ct/src/lib.rs", "rank": 8, "score": 179544.2996611609 }, { "content": "#[cfg(all(not(unix), feature = \"std\"))]\n\nfn write_secret_file(path: impl AsRef<Path>, data: &[u8]) -> Result<()> {\n\n fs::write(path, data)?;\n\n Ok(())\n\n}\n", "file_path": "pkcs8/src/document.rs", "rank": 9, "score": 178454.76006709295 }, { "content": "#[inline(always)]\n\nfn encode_6bits(src: i16, hi_bytes: (u8, u8)) -> u8 {\n\n let hi_off = 0x1c + (hi_bytes.0 & 4);\n\n let mut diff = 0x41i16;\n\n\n\n diff += match_gt_ct(src, 25, 6);\n\n diff -= match_gt_ct(src, 51, 75);\n\n diff -= match_gt_ct(src, 61, hi_bytes.0 as i16 - hi_off as i16);\n\n diff += match_gt_ct(src, 62, hi_bytes.1 as i16 - hi_bytes.0 as i16 - 1);\n\n\n\n (src + diff) as u8\n\n}\n\n\n", "file_path": "base64ct/src/lib.rs", "rank": 10, "score": 175841.09616189095 }, { "content": "#[inline(always)]\n\nfn decode_6bits(src: u8, hi_bytes: (u8, u8)) -> i16 {\n\n let mut res: i16 = -1;\n\n res += match_range_ct(src, 0x41..0x5a, src as i16 - 64);\n\n res += match_range_ct(src, 0x61..0x7a, src as i16 - 70);\n\n res += match_range_ct(src, 0x30..0x39, src as i16 + 5);\n\n res += match_eq_ct(src, hi_bytes.0, 63);\n\n res + match_eq_ct(src, hi_bytes.1, 64)\n\n}\n\n\n\n/// Match that the given input is greater than the provided threshold.\n", "file_path": "base64ct/src/lib.rs", "rank": 11, "score": 175818.61865933303 }, { "content": "/// Read a git-flavoured VLQ value from `&data[*pos..]`.\n\n/// Increments `pos` to a number of read bytes.\n\n///\n\n/// This function returns `None` if buffer does not contain enough bytes\n\n/// or if VLQ is bigger than 4 bytes.\n\n///\n\n/// See the test submodule for example values.\n\nfn read_vlq(data: &[u8], pos: &mut usize) -> Result<usize, Error> {\n\n let b = data.get(*pos).ok_or(Error::UnexpectedEnd)?;\n\n *pos += 1;\n\n let mut next = b & NEXT_MASK;\n\n let mut val = (b & VAL_MASK) as usize;\n\n\n\n macro_rules! step {\n\n () => {\n\n if next == 0 {\n\n return Ok(val);\n\n }\n\n let b = data.get(*pos).ok_or(Error::UnexpectedEnd)?;\n\n *pos += 1;\n\n next = b & NEXT_MASK;\n\n let t = (b & VAL_MASK) as usize;\n\n val = ((val + 1) << 7) + t;\n\n };\n\n }\n\n\n\n step!();\n", "file_path": "blobby/src/lib.rs", "rank": 12, "score": 174835.7769157015 }, { "content": "#[inline(always)]\n\nfn match_eq_ct(input: u8, expected: u8, ret_on_match: i16) -> i16 {\n\n match_range_ct(input, expected..expected, ret_on_match)\n\n}\n\n\n", "file_path": "base64ct/src/lib.rs", "rank": 13, "score": 169698.1386478881 }, { "content": "fn encode(reader: impl BufRead, mut writer: impl Write) -> io::Result<usize> {\n\n let mut blobs = Vec::new();\n\n for line in reader.lines() {\n\n let blob = hex::decode(line?.as_str())\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;\n\n blobs.push(blob);\n\n }\n\n\n\n let mut idx_map = HashMap::new();\n\n for blob in blobs.iter().filter(|b| b.len() != 0) {\n\n let v = idx_map.entry(blob.as_slice()).or_insert(0);\n\n *v += 1;\n\n }\n\n\n\n let mut idx: Vec<&[u8]> = idx_map\n\n .iter()\n\n .filter(|(_, &v)| v > 1)\n\n .map(|(&k, _)| k)\n\n .collect();\n\n idx.sort_by_key(|e| {\n", "file_path": "blobby/examples/convert.rs", "rank": 14, "score": 167799.84068584885 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\nfn encode_string(input: &[u8], padded: bool, hi_bytes: (u8, u8)) -> String {\n\n let elen = encoded_len(input, padded);\n\n let mut dst = vec![0u8; elen];\n\n let res = encode(input, &mut dst, padded, hi_bytes).expect(\"encoding error\");\n\n\n\n debug_assert_eq!(elen, res.len());\n\n debug_assert!(str::from_utf8(&dst).is_ok());\n\n\n\n // SAFETY: `dst` is fully written and contains only valid one-byte UTF-8 chars\n\n unsafe { String::from_utf8_unchecked(dst) }\n\n}\n\n\n\n/// Get the Base64-encoded length of the given byte slice.\n\n///\n\n/// WARNING: this function will return 0 for lengths greater than `usize::MAX/4`!\n\n#[inline]\n\nconst fn encoded_len(bytes: &[u8], padded: bool) -> usize {\n\n // TODO: replace with `unwrap_or` on stabilization\n\n match encoded_len_inner(bytes.len(), padded) {\n\n Some(v) => v,\n", "file_path": "base64ct/src/lib.rs", "rank": 15, "score": 166979.0669281027 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let args: Vec<String> = env::args().skip(1).collect();\n\n let is_encode = match args[0].as_str() {\n\n \"encode\" => true,\n\n \"decode\" => false,\n\n _ => Err(\"unknown mode\")?,\n\n };\n\n let in_path = args[1].as_str();\n\n let out_path = args[2].as_str();\n\n let in_file = BufReader::new(File::open(in_path)?);\n\n let out_file = BufWriter::new(File::create(out_path)?);\n\n\n\n let n = if is_encode {\n\n encode(in_file, out_file)?\n\n } else {\n\n decode(in_file, out_file)?\n\n };\n\n\n\n println!(\"Processed {} record(s)\", n);\n\n\n\n Ok(())\n\n}\n", "file_path": "blobby/examples/convert.rs", "rank": 16, "score": 155498.089321266 }, { "content": "#[inline(always)]\n\nfn to_blocks<N: ArrayLength<u8>>(data: &[u8]) -> (&[Block<N>], &[u8]) {\n\n let nb = data.len() / N::USIZE;\n\n let (left, right) = data.split_at(nb * N::USIZE);\n\n let p = left.as_ptr() as *const Block<N>;\n\n // SAFETY: we guarantee that `blocks` does not point outside of `data`\n\n let blocks = unsafe { slice::from_raw_parts(p, nb) };\n\n (blocks, right)\n\n}\n\n\n", "file_path": "block-buffer/src/lib.rs", "rank": 17, "score": 138791.39576929278 }, { "content": "#[inline(always)]\n\nfn match_range_ct(input: u8, range: Range<u8>, ret_on_match: i16) -> i16 {\n\n // Compute exclusive range from inclusive one\n\n let start = range.start as i16 - 1;\n\n let end = range.end as i16 + 1;\n\n\n\n (((start - input as i16) & (input as i16 - end)) >> 8) & ret_on_match\n\n}\n\n\n\n/// Match a a byte equals a specified value.\n", "file_path": "base64ct/src/lib.rs", "rank": 18, "score": 137739.49410838622 }, { "content": "fn encode_vlq(mut val: usize, buf: &mut [u8; 4]) -> &[u8] {\n\n macro_rules! step {\n\n ($n:expr) => {\n\n buf[$n] = if $n == 3 {\n\n (val & (VAL_MASK as usize)) as u8\n\n } else {\n\n val -= 1;\n\n NEXT_MASK | (val & (VAL_MASK as usize)) as u8\n\n };\n\n val >>= 7;\n\n if val == 0 {\n\n return &buf[$n..];\n\n }\n\n };\n\n }\n\n\n\n step!(3);\n\n step!(2);\n\n step!(1);\n\n step!(0);\n\n panic!(\"integer is too big\")\n\n}\n\n\n", "file_path": "blobby/examples/convert.rs", "rank": 19, "score": 133369.87361749535 }, { "content": "/// Trait for padding messages divided into blocks\n\npub trait Padding<BlockSize: ArrayLength<u8>> {\n\n /// Pads `block` filled with data up to `pos` (i.e length of a message\n\n /// stored in the block is equal to `pos`).\n\n ///\n\n /// # Panics\n\n /// If `pos` is bigger than `BlockSize`. Most paddin algorithms also\n\n /// panic if they are equal.\n\n fn pad(block: &mut Block<BlockSize>, pos: usize);\n\n\n\n /// Unpad data in the `block`.\n\n ///\n\n /// Returns `Err(UnpadError)` if the block containts malformed padding.\n\n fn unpad(block: &Block<BlockSize>) -> Result<&[u8], UnpadError>;\n\n}\n\n\n\n/// Pad block with zeros.\n\n///\n\n/// ```\n\n/// use block_padding::{ZeroPadding, Padding};\n\n/// use generic_array::{GenericArray, typenum::U8};\n", "file_path": "block-padding/src/lib.rs", "rank": 20, "score": 130797.90616347965 }, { "content": "/// Obtain the length of a collection.\n\npub trait Length {\n\n /// Get the length of this collection.\n\n fn len(&self) -> usize;\n\n\n\n /// Is the collection empty?\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 21, "score": 124002.36173582505 }, { "content": "#[inline(always)]\n\nfn match_gt_ct(input: i16, threshold: u8, ret_on_match: i16) -> i16 {\n\n ((threshold as i16 - input) >> 8) & ret_on_match\n\n}\n\n\n\n/// Match that a byte falls within a provided range.\n", "file_path": "base64ct/src/lib.rs", "rank": 22, "score": 123630.16567475232 }, { "content": "#[allow(clippy::type_complexity)]\n\n#[inline(always)]\n\nfn to_blocks_mut<N: ArrayLength<u8>, M: ArrayLength<Block<N>>>(\n\n data: &mut [u8],\n\n) -> (&mut [ParBlock<N, M>], &mut [Block<N>], &mut [u8]) {\n\n let b_size = N::USIZE;\n\n let pb_size = N::USIZE * M::USIZE;\n\n let npb = match M::USIZE {\n\n 1 => 0,\n\n _ => data.len() / pb_size,\n\n };\n\n let (pb_slice, data) = data.split_at_mut(npb * pb_size);\n\n let nb = data.len() / b_size;\n\n let (b_slice, data) = data.split_at_mut(nb * b_size);\n\n let pb_ptr = pb_slice.as_mut_ptr() as *mut ParBlock<N, M>;\n\n let b_ptr = b_slice.as_mut_ptr() as *mut Block<N>;\n\n // SAFETY: we guarantee that the resulting values do not overlap and do not\n\n // point outside of the input slice\n\n unsafe {\n\n (\n\n slice::from_raw_parts_mut(pb_ptr, npb),\n\n slice::from_raw_parts_mut(b_ptr, nb),\n\n data,\n\n )\n\n }\n\n}\n\n\n", "file_path": "block-buffer/src/lib.rs", "rank": 23, "score": 121425.20353799977 }, { "content": "fn unreachable<S, B: ArrayLength<u8>>(_: &mut S) -> ParBlock<B, U1> {\n\n unreachable!();\n\n}\n", "file_path": "block-buffer/src/lib.rs", "rank": 24, "score": 119582.23898671243 }, { "content": "#[inline(always)]\n\nfn set(a: &mut [u8], b: &[u8]) {\n\n a.copy_from_slice(b);\n\n}\n\n\n", "file_path": "block-buffer/src/lib.rs", "rank": 25, "score": 115623.22325016848 }, { "content": "#[inline(always)]\n\nfn xor(a: &mut [u8], b: &[u8]) {\n\n debug_assert_eq!(a.len(), b.len());\n\n a.iter_mut().zip(b.iter()).for_each(|(a, &b)| *a ^= b);\n\n}\n\n\n", "file_path": "block-buffer/src/lib.rs", "rank": 26, "score": 115623.22325016848 }, { "content": "#[inline(never)]\n\nfn get_raw_data() -> Vec<u8> {\n\n (0..RAW_LEN).map(|i| i as u8).collect()\n\n}\n\n\n", "file_path": "base64ct/benches/mod.rs", "rank": 27, "score": 106330.83666697139 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nenum Asn1Type {\n\n /// ASN.1 `BIT STRING`\n\n BitString,\n\n\n\n /// ASN.1 `OCTET STRING`\n\n OctetString,\n\n}\n\n\n\nimpl Asn1Type {\n\n /// Parse ASN.1 type\n\n pub fn new(s: &str) -> Self {\n\n match s {\n\n \"bit-string\" => Self::BitString,\n\n \"octet-string\" => Self::OctetString,\n\n _ => panic!(\"unrecognized ASN.1 type: {}\", s),\n\n }\n\n }\n\n}\n", "file_path": "der/derive/src/lib.rs", "rank": 28, "score": 99647.57468800482 }, { "content": "fn decode<R: BufRead, W: Write>(mut reader: R, mut writer: W) -> io::Result<usize> {\n\n let mut data = Vec::new();\n\n reader.read_to_end(&mut data)?;\n\n let res: Vec<_> = BlobIterator::new(&data)\n\n .map_err(|e| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"invalid blobby data: {:?}\", e),\n\n )\n\n })?\n\n .collect();\n\n for blob in res.iter() {\n\n let blob = blob.map_err(|e| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"invalid blobby data: {:?}\", e),\n\n )\n\n })?;\n\n writer.write_all(hex::encode(blob).as_bytes())?;\n\n writer.write_all(b\"\\n\")?;\n\n }\n\n Ok(res.len())\n\n}\n\n\n", "file_path": "blobby/examples/convert.rs", "rank": 29, "score": 98577.54064174872 }, { "content": "/// Types with an associated ASN.1 [`Tag`].\n\npub trait Tagged {\n\n /// ASN.1 tag\n\n const TAG: Tag;\n\n}\n\n\n", "file_path": "der/src/traits.rs", "rank": 30, "score": 97870.48918209327 }, { "content": "/// Encoding trait.\n\npub trait Encodable {\n\n /// Compute the length of this value in bytes when encoded as ASN.1 DER.\n\n fn encoded_len(&self) -> Result<Length>;\n\n\n\n /// Encode this value as ASN.1 DER using the provided [`Encoder`].\n\n fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()>;\n\n\n\n /// Encode this value to the provided byte slice, returning a sub-slice\n\n /// containing the encoded message.\n\n fn encode_to_slice<'a>(&self, buf: &'a mut [u8]) -> Result<&'a [u8]> {\n\n let mut encoder = Encoder::new(buf);\n\n self.encode(&mut encoder)?;\n\n Ok(encoder.finish()?)\n\n }\n\n\n\n /// Encode this message as ASN.1 DER, appending it to the provided\n\n /// byte vector.\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn encode_to_vec(&self, buf: &mut Vec<u8>) -> Result<Length> {\n", "file_path": "der/src/traits.rs", "rank": 31, "score": 97676.39914454054 }, { "content": "#[proc_macro]\n\npub fn hex(input: TokenStream) -> TokenStream {\n\n let ts = TokenStream::from_iter(TokenTreeIter::new(input));\n\n TokenStream::from(TokenTree::Group(Group::new(Delimiter::Bracket, ts)))\n\n}\n", "file_path": "hex-literal/src/lib.rs", "rank": 32, "score": 95831.82743627214 }, { "content": "/// Decode the provided Base64 string into the provided destination buffer.\n\nfn decode(\n\n src: impl AsRef<[u8]>,\n\n dst: &mut [u8],\n\n padded: bool,\n\n hi_bytes: (u8, u8),\n\n) -> Result<&[u8], Error> {\n\n let mut src = src.as_ref();\n\n\n\n let mut err = if padded {\n\n let (unpadded_len, e) = decode_padding(src)?;\n\n src = &src[..unpadded_len];\n\n e\n\n } else {\n\n 0\n\n };\n\n\n\n let dlen = decoded_len(src.len());\n\n\n\n if dlen > dst.len() {\n\n return Err(Error::InvalidLength);\n", "file_path": "base64ct/src/lib.rs", "rank": 33, "score": 95734.14287218897 }, { "content": "#[inline]\n\nfn encode<'a>(\n\n src: &[u8],\n\n dst: &'a mut [u8],\n\n padded: bool,\n\n hi_bytes: (u8, u8),\n\n) -> Result<&'a str, InvalidLengthError> {\n\n let elen = match encoded_len_inner(src.len(), padded) {\n\n Some(v) => v,\n\n None => return Err(InvalidLengthError),\n\n };\n\n\n\n if elen > dst.len() {\n\n return Err(InvalidLengthError);\n\n }\n\n\n\n let dst = &mut dst[..elen];\n\n\n\n if padded {\n\n for (s, d) in src.chunks(3).zip(dst.chunks_mut(4)) {\n\n if s.len() == 3 {\n", "file_path": "base64ct/src/lib.rs", "rank": 34, "score": 93884.67589414795 }, { "content": "/// Decode Base64-encoded string in-place.\n\nfn decode_in_place(\n\n mut buf: &mut [u8],\n\n padded: bool,\n\n hi_bytes: (u8, u8),\n\n) -> Result<&[u8], InvalidEncodingError> {\n\n // TODO: eliminate unsafe code when compiler will be smart enough to\n\n // eliminate bound checks, see: https://github.com/rust-lang/rust/issues/80963\n\n let mut err = if padded {\n\n let (unpadded_len, e) = decode_padding(buf)?;\n\n buf = &mut buf[..unpadded_len];\n\n e\n\n } else {\n\n 0\n\n };\n\n\n\n let dlen = decoded_len(buf.len());\n\n let full_chunks = buf.len() / 4;\n\n\n\n for chunk in 0..full_chunks {\n\n // SAFETY: `p3` and `p4` point inside `buf`, while they may overlap,\n", "file_path": "base64ct/src/lib.rs", "rank": 35, "score": 93362.48888665129 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct RootArcs(u8);\n\n\n\nimpl RootArcs {\n\n /// Create [`RootArcs`] from the first and second arc values represented\n\n /// as `u32` integers.\n\n fn new(first_arc: u32, second_arc: u32) -> Result<Self> {\n\n if first_arc > FIRST_ARC_MAX || second_arc > SECOND_ARC_MAX {\n\n return Err(Error);\n\n }\n\n\n\n let byte = (first_arc * (SECOND_ARC_MAX + 1)) as u8 | second_arc as u8;\n\n Ok(Self(byte))\n\n }\n\n\n\n /// Get the value of the first arc\n\n fn first_arc(self) -> u32 {\n\n self.0 as u32 / (SECOND_ARC_MAX + 1)\n\n }\n\n\n\n /// Get the value of the second arc\n", "file_path": "const-oid/src/lib.rs", "rank": 36, "score": 92624.42386060834 }, { "content": "/// Decoding trait.\n\npub trait Decodable<'a>: Sized {\n\n /// Attempt to decode this message using the provided decoder.\n\n fn decode(decoder: &mut Decoder<'a>) -> Result<Self>;\n\n\n\n /// Parse `Self` from the provided byte slice.\n\n fn from_bytes(bytes: &'a [u8]) -> Result<Self> {\n\n let mut decoder = Decoder::new(bytes);\n\n let result = Self::decode(&mut decoder)?;\n\n decoder.finish(result)\n\n }\n\n}\n\n\n\nimpl<'a, T> Decodable<'a> for T\n\nwhere\n\n T: TryFrom<Any<'a>, Error = Error>,\n\n{\n\n fn decode(decoder: &mut Decoder<'a>) -> Result<T> {\n\n Any::decode(decoder)\n\n .and_then(Self::try_from)\n\n .or_else(|e| decoder.error(e.kind()))\n\n }\n\n}\n\n\n", "file_path": "der/src/traits.rs", "rank": 37, "score": 91679.5255961385 }, { "content": "/// Messages encoded as an ASN.1 `SEQUENCE`.\n\n///\n\n/// This wraps up a common pattern for ASN.1 encoding.\n\n///\n\n/// Types which impl this trait receive blanket impls for the [`Decodable`],\n\n/// [`Encodable`], and [`Tagged`] traits.\n\npub trait Message<'a>: Decodable<'a> {\n\n /// Call the provided function with a slice of [`Encodable`] trait objects\n\n /// representing the fields of this message.\n\n ///\n\n /// This method uses a callback because structs with fields which aren't\n\n /// directly [`Encodable`] may need to construct temporary values from\n\n /// their fields prior to encoding.\n\n fn fields<F, T>(&self, f: F) -> Result<T>\n\n where\n\n F: FnOnce(&[&dyn Encodable]) -> Result<T>;\n\n}\n\n\n\nimpl<'a, M> Encodable for M\n\nwhere\n\n M: Message<'a>,\n\n{\n\n fn encoded_len(&self) -> Result<Length> {\n\n self.fields(sequence::encoded_len)\n\n }\n\n\n", "file_path": "der/src/traits.rs", "rank": 38, "score": 90224.18895939979 }, { "content": "#[bench]\n\nfn encode_bench(b: &mut Bencher) {\n\n let mut buf = get_b64_data().into_bytes();\n\n let raw_data = get_raw_data();\n\n b.iter(|| {\n\n let out = base64ct::encode(&raw_data, &mut buf).unwrap();\n\n test::black_box(out);\n\n });\n\n b.bytes = RAW_LEN as u64;\n\n}\n", "file_path": "base64ct/benches/mod.rs", "rank": 39, "score": 84267.70577809331 }, { "content": "#[bench]\n\nfn decode_bench(b: &mut Bencher) {\n\n let b64_data = get_b64_data();\n\n let mut buf = get_raw_data();\n\n b.iter(|| {\n\n let out = base64ct::decode(&b64_data, &mut buf).unwrap();\n\n test::black_box(out);\n\n });\n\n b.bytes = RAW_LEN as u64;\n\n}\n\n\n", "file_path": "base64ct/benches/mod.rs", "rank": 40, "score": 84241.84351192454 }, { "content": "#[bench]\n\nfn decode_in_place_bench(b: &mut Bencher) {\n\n let mut b64_data = get_b64_data().into_bytes();\n\n b.iter(|| {\n\n // since it works on the same buffer over and over,\n\n // almost always `out` will be an error\n\n let out = base64ct::decode_in_place(&mut b64_data);\n\n let _ = test::black_box(out);\n\n });\n\n b.bytes = RAW_LEN as u64;\n\n}\n\n\n", "file_path": "base64ct/benches/mod.rs", "rank": 41, "score": 82270.2325462416 }, { "content": "/// Compute the length of a value when encoded in base 128\n\nfn base128_len(n: u32) -> usize {\n\n match n {\n\n 0..=0x7f => 1,\n\n 0x80..=0x3fff => 2,\n\n 0x4000..=0x1fffff => 3,\n\n 0x200000..=0x1fffffff => 4,\n\n _ => 5,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::ObjectIdentifier;\n\n use std::{convert::TryFrom, string::ToString};\n\n\n\n /// Example OID value\n\n const EXAMPLE_OID: ObjectIdentifier = ObjectIdentifier::new(&[1, 2, 840, 10045, 2, 1]);\n\n\n\n /// Example OID encoded as ASN.1 BER/DER\n\n const EXAMPLE_OID_BER: &[u8] = &[42, 134, 72, 206, 61, 2, 1];\n", "file_path": "const-oid/src/lib.rs", "rank": 42, "score": 82125.45042629483 }, { "content": "#[inline]\n\nfn decoded_len(input_len: usize) -> usize {\n\n // overflow-proof computation of `(3*n)/4`\n\n let k = input_len / 4;\n\n let l = input_len - 4 * k;\n\n 3 * k + (3 * l) / 4\n\n}\n\n\n", "file_path": "base64ct/src/lib.rs", "rank": 43, "score": 80410.63236986304 }, { "content": "/// Custom derive for `der::Message`\n\nfn derive_der_message(s: Structure<'_>) -> TokenStream {\n\n let ast = s.ast();\n\n\n\n // TODO(tarcieri): enum support\n\n match &ast.data {\n\n syn::Data::Struct(data) => DeriveStruct::derive(s, data, &ast.generics),\n\n other => panic!(\"can't derive `Message` on: {:?}\", other),\n\n }\n\n}\n\n\n", "file_path": "der/derive/src/lib.rs", "rank": 44, "score": 67122.03741389755 }, { "content": "/// Truncate the collection to the provided length.\n\npub trait Truncate {\n\n /// Truncate this buffer to the given number of elements.\n\n ///\n\n /// If `len` is bigger than the current number of elements (or the total\n\n /// capacity of the buffer) no changes are made to the contents.\n\n fn truncate(&mut self, len: usize);\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 45, "score": 61426.43533724547 }, { "content": "/// Double and inverse double over GF(2^n).\n\n///\n\n/// This trait is implemented for 64, 128 and 256 bit block sizes. Big-endian\n\n/// order is used.\n\npub trait Dbl {\n\n /// Double block. (alternatively: multiply block by x)\n\n ///\n\n /// If most significant bit of the block equals to zero will return\n\n /// `block<<1`, otherwise `(block<<1)^C`, where `C` is the non-leading\n\n /// coefficients of the lexicographically first irreducible degree-b binary\n\n /// polynomial with the minimal number of ones.\n\n fn dbl(self) -> Self;\n\n\n\n /// Reverse double block. (alternatively: divbide block by x)\n\n ///\n\n /// If least significant bit of the block equals to zero will return\n\n /// `block>>1`, otherwise `(block>>1)^(1<<n)^(C>>1)`\n\n fn inv_dbl(self) -> Self;\n\n}\n\n\n\nimpl Dbl for GenericArray<u8, U8> {\n\n fn dbl(self) -> Self {\n\n let mut val: u64 = unsafe { mem::transmute_copy(&self) };\n\n val = val.to_be();\n", "file_path": "dbl/src/lib.rs", "rank": 46, "score": 61426.09016652702 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\npub trait ToPublicKey {\n\n /// Serialize a [`PublicKeyDocument`] containing a SPKI-encoded public key.\n\n fn to_public_key_der(&self) -> PublicKeyDocument;\n\n\n\n /// Serialize this public key as PEM-encoded SPKI.\n\n #[cfg(feature = \"pem\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"pem\")))]\n\n fn to_public_key_pem(&self) -> String {\n\n self.to_public_key_der().to_pem()\n\n }\n\n\n\n /// Write ASN.1 DER-encoded public key to the given path\n\n #[cfg(feature = \"std\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"std\")))]\n\n fn write_public_key_der_file(&self, path: impl AsRef<Path>) -> Result<()> {\n\n self.to_public_key_der().write_der_file(path)\n\n }\n\n\n\n /// Write ASN.1 DER-encoded public key to the given path\n\n #[cfg(all(feature = \"pem\", feature = \"std\"))]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"pem\")))]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"std\")))]\n\n fn write_public_key_pem_file(&self, path: impl AsRef<Path>) -> Result<()> {\n\n self.to_public_key_der().write_pem_file(path)\n\n }\n\n}\n", "file_path": "pkcs8/src/traits.rs", "rank": 47, "score": 60295.698149034186 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\npub trait ToPrivateKey {\n\n /// Serialize a [`PrivateKeyDocument`] containing a PKCS#8-encoded private key.\n\n fn to_pkcs8_der(&self) -> PrivateKeyDocument;\n\n\n\n /// Serialize this private key as PEM-encoded PKCS#8.\n\n #[cfg(feature = \"pem\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"pem\")))]\n\n fn to_pkcs8_pem(&self) -> Zeroizing<String> {\n\n self.to_pkcs8_der().to_pem()\n\n }\n\n\n\n /// Write ASN.1 DER-encoded PKCS#8 private key to the given path\n\n #[cfg(feature = \"std\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"std\")))]\n\n fn write_pkcs8_der_file(&self, path: impl AsRef<Path>) -> Result<()> {\n\n self.to_pkcs8_der().write_der_file(path)\n\n }\n\n\n\n /// Write ASN.1 DER-encoded PKCS#8 private key to the given path\n\n #[cfg(all(feature = \"pem\", feature = \"std\"))]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"pem\")))]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"std\")))]\n\n fn write_pkcs8_pem_file(&self, path: impl AsRef<Path>) -> Result<()> {\n\n self.to_pkcs8_der().write_pem_file(path)\n\n }\n\n}\n\n\n\n/// Serialize a public key object to a SPKI-encoded document.\n", "file_path": "pkcs8/src/traits.rs", "rank": 48, "score": 60295.698149034186 }, { "content": "/// Collection types implement all of the traits in this crate.\n\npub trait Collection<T>:\n\n AsRef<[T]> + AsMut<[T]> + Default + Length + Truncate + TryExtend<T> + TryPush<T>\n\n{\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 49, "score": 58292.55608166884 }, { "content": "/// [`TryCollect`] is an extension to [`Iterator`] which allows for performing\n\n/// a fallible collection into a collection type.\n\npub trait TryCollect<A> {\n\n fn try_collect<B>(&mut self) -> Result<B, B::Error>\n\n where\n\n B: TryFromIterator<A>;\n\n}\n\n\n\nimpl<A, T> TryCollect<A> for T\n\nwhere\n\n T: Iterator<Item = A>,\n\n{\n\n fn try_collect<B>(&mut self) -> Result<B, B::Error>\n\n where\n\n B: TryFromIterator<A>,\n\n {\n\n B::try_from_iter(self)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\nmod vec_impls {\n", "file_path": "collectable/src/lib.rs", "rank": 50, "score": 58292.33237959203 }, { "content": "/// Fallible equivalent of [`core::iter::Extend`] - extends a collection\n\n/// with the contents of an iterator, but with the option to return an error\n\n/// in the event the container's capacity has been exceeded.\n\n///\n\n/// [`core::iter::Extend`]: https://doc.rust-lang.org/core/iter/trait.Extend.html\n\npub trait TryExtend<A> {\n\n /// Error type.\n\n type Error;\n\n\n\n /// Try to extend the collection from the given iterator.\n\n fn try_extend<T>(&mut self, iter: T) -> Result<(), Self::Error>\n\n where\n\n T: IntoIterator<Item = A>;\n\n\n\n /// Try to extend the collection from the given slice.\n\n fn try_extend_from_slice(&mut self, slice: &[A]) -> Result<(), Self::Error>\n\n where\n\n A: Clone,\n\n {\n\n self.try_extend(slice.iter().cloned())\n\n }\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 51, "score": 58291.4337959033 }, { "content": "/// Parse a private key object from a PKCS#8 encoded document.\n\npub trait FromPrivateKey: Sized {\n\n /// Parse the [`PrivateKeyInfo`] from a PKCS#8-encoded document.\n\n fn from_pkcs8_private_key_info(private_key_info: PrivateKeyInfo<'_>) -> Result<Self>;\n\n\n\n /// Deserialize PKCS#8 private key from ASN.1 DER-encoded data\n\n /// (binary format).\n\n fn from_pkcs8_der(bytes: &[u8]) -> Result<Self> {\n\n PrivateKeyInfo::from_der(bytes).and_then(Self::from_pkcs8_private_key_info)\n\n }\n\n\n\n /// Deserialize PKCS#8 private key from a [`PrivateKeyDocument`].\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn from_pkcs8_doc(doc: &PrivateKeyDocument) -> Result<Self> {\n\n Self::from_pkcs8_private_key_info(doc.private_key_info())\n\n }\n\n\n\n /// Deserialize PKCS#8-encoded private key from PEM.\n\n ///\n\n /// Keys in this format begin with the following delimiter:\n", "file_path": "pkcs8/src/traits.rs", "rank": 52, "score": 57233.49655683384 }, { "content": "/// Parse a public key object from an encoded SPKI document.\n\npub trait FromPublicKey: Sized {\n\n /// Parse [`SubjectPublicKeyInfo`] into a public key object.\n\n fn from_spki(spki: SubjectPublicKeyInfo<'_>) -> Result<Self>;\n\n\n\n /// Deserialize object from ASN.1 DER-encoded [`SubjectPublicKeyInfo`]\n\n /// (binary format).\n\n fn from_public_key_der(bytes: &[u8]) -> Result<Self> {\n\n SubjectPublicKeyInfo::from_der(bytes).and_then(Self::from_spki)\n\n }\n\n\n\n /// Deserialize PKCS#8 private key from a [`PrivateKeyDocument`].\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn from_public_key_doc(doc: &PublicKeyDocument) -> Result<Self> {\n\n Self::from_spki(doc.spki())\n\n }\n\n\n\n /// Deserialize PEM-encoded [`SubjectPublicKeyInfo`].\n\n ///\n\n /// Keys in this format begin with the following delimiter:\n", "file_path": "pkcs8/src/traits.rs", "rank": 53, "score": 57233.49655683384 }, { "content": "/// Try to push an element onto a collection\n\npub trait TryPush<T> {\n\n /// Try to push an element onto a collection.\n\n ///\n\n /// Returns the original element if it's full.\n\n fn try_push(&mut self, item: T) -> Result<(), T>;\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 54, "score": 57229.502323807894 }, { "content": "#[test]\n\nfn debug_formatting() {\n\n let s = format!(\"{:?}\", Foo { secret: 42 });\n\n assert_eq!(s, \"Foo { ... }\");\n\n}\n", "file_path": "opaque-debug/tests/mod.rs", "rank": 55, "score": 56028.19512272854 }, { "content": "/// Try to build a collection type from an [`Iterator`].\n\n///\n\n/// Fallible in the event the capacity of the underlying container type is\n\n/// exceeded.\n\npub trait TryFromIterator<A>: Sized {\n\n /// Error type.\n\n type Error;\n\n\n\n /// Try to create a new collection from the given iterator, potentially\n\n /// returning an error if the underlying collection's capacity is exceeded.\n\n fn try_from_iter<T>(iter: T) -> Result<Self, Self::Error>\n\n where\n\n T: IntoIterator<Item = A>;\n\n}\n\n\n\nimpl<A, C: Default + TryExtend<A>> TryFromIterator<A> for C {\n\n type Error = <Self as TryExtend<A>>::Error;\n\n\n\n fn try_from_iter<T>(iter: T) -> Result<Self, Self::Error>\n\n where\n\n T: IntoIterator<Item = A>,\n\n {\n\n let mut collection = Self::default();\n\n collection.try_extend(iter)?;\n\n Ok(collection)\n\n }\n\n}\n\n\n", "file_path": "collectable/src/lib.rs", "rank": 56, "score": 55523.70850626438 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn parse_rsa_2048_pem() {\n\n let doc: PublicKeyDocument = RSA_2048_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(doc.as_ref(), RSA_2048_DER_EXAMPLE);\n\n\n\n // Ensure `PublicKeyDocument` parses successfully\n\n let spki = SubjectPublicKeyInfo::from_der(RSA_2048_DER_EXAMPLE).unwrap();\n\n assert_eq!(doc.spki(), spki);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 57, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"std\")]\n\nfn read_der_file() {\n\n let pkcs8_doc = PublicKeyDocument::read_der_file(\"tests/examples/p256-pub.der\").unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 58, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"std\")]\n\nfn read_der_file() {\n\n let pkcs8_doc = PrivateKeyDocument::read_der_file(\"tests/examples/p256-priv.der\").unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 59, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn serialize_ed25519_pem() {\n\n let pk = SubjectPublicKeyInfo::from_der(ED25519_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_pem();\n\n assert_eq!(ED25519_PEM_EXAMPLE.trim_end(), pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 60, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn parse_rsa_2048_pem() {\n\n let pkcs8_doc: PrivateKeyDocument = RSA_2048_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), RSA_2048_DER_EXAMPLE);\n\n\n\n // Ensure `PrivateKeyDocument` parses successfully\n\n let pk_info = PrivateKeyInfo::from_der(RSA_2048_DER_EXAMPLE).unwrap();\n\n assert_eq!(pkcs8_doc.private_key_info().algorithm, pk_info.algorithm);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 61, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn serialize_ed25519_der() {\n\n let pk = PrivateKeyInfo::from_der(ED25519_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_der();\n\n assert_eq!(ED25519_DER_EXAMPLE, pk_encoded.as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 62, "score": 54991.17263530972 }, { "content": "#[test]\n\nfn parse_rsa_2048_der() {\n\n let spki = SubjectPublicKeyInfo::from_der(RSA_2048_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(spki.algorithm.oid, \"1.2.840.113549.1.1.1\".parse().unwrap());\n\n assert!(spki.algorithm.parameters.unwrap().is_null());\n\n\n\n assert_eq!(spki.subject_public_key, &hex!(\"3082010A0282010100B6C42C515F10A6AAF282C63EDBE24243A170F3FA2633BD4833637F47CA4F6F36E03A5D29EFC3191AC80F390D874B39E30F414FCEC1FCA0ED81E547EDC2CD382C76F61C9018973DB9FA537972A7C701F6B77E0982DFC15FC01927EE5E7CD94B4F599FF07013A7C8281BDF22DCBC9AD7CABB7C4311C982F58EDB7213AD4558B332266D743AED8192D1884CADB8B14739A8DADA66DC970806D9C7AC450CB13D0D7C575FB198534FC61BC41BC0F0574E0E0130C7BBBFBDFDC9F6A6E2E3E2AFF1CBEAC89BA57884528D55CFB08327A1E8C89F4E003CF2888E933241D9D695BCBBACDC90B44E3E095FA37058EA25B13F5E295CBEAC6DE838AB8C50AF61E298975B872F0203010001\")[..]);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 63, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn parse_ed25519_pem() {\n\n let doc: PublicKeyDocument = ED25519_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(doc.as_ref(), ED25519_DER_EXAMPLE);\n\n\n\n // Ensure `PublicKeyDocument` parses successfully\n\n let spki = SubjectPublicKeyInfo::from_der(ED25519_DER_EXAMPLE).unwrap();\n\n assert_eq!(doc.spki(), spki);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 64, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn serialize_ed25519_der() {\n\n let pk = SubjectPublicKeyInfo::from_der(ED25519_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_der();\n\n assert_eq!(ED25519_DER_EXAMPLE, pk_encoded.as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 65, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn serialize_rsa_2048_der() {\n\n let pk = PrivateKeyInfo::from_der(RSA_2048_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_der();\n\n assert_eq!(RSA_2048_DER_EXAMPLE, pk_encoded.as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 66, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn serialize_rsa_2048_pem() {\n\n let pk = SubjectPublicKeyInfo::from_der(RSA_2048_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_pem();\n\n assert_eq!(RSA_2048_PEM_EXAMPLE.trim_end(), pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 67, "score": 54991.17263530972 }, { "content": "#[test]\n\nfn parse_ed25519_der() {\n\n let pk = PrivateKeyInfo::from_der(ED25519_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(pk.algorithm.oid, \"1.3.101.112\".parse().unwrap());\n\n assert_eq!(pk.algorithm.parameters, None);\n\n\n\n // Extracted with:\n\n // $ openssl asn1parse -in tests/examples/ed25519-priv.der -inform der\n\n assert_eq!(\n\n pk.private_key,\n\n &hex!(\"042017ED9C73E9DB649EC189A612831C5FC570238207C1AA9DFBD2C53E3FF5E5EA85\")[..]\n\n );\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 68, "score": 54991.17263530972 }, { "content": "#[test]\n\nfn parse_ed25519_der() {\n\n let spki = SubjectPublicKeyInfo::from_der(ED25519_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(spki.algorithm.oid, \"1.3.101.112\".parse().unwrap());\n\n assert_eq!(spki.algorithm.parameters, None);\n\n assert_eq!(\n\n spki.subject_public_key,\n\n &hex!(\"4D29167F3F1912A6F7ADFA293A051A15C05EC67B8F17267B1C5550DCE853BD0D\")[..]\n\n );\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 69, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn parse_ed25519_pem() {\n\n let pkcs8_doc: PrivateKeyDocument = ED25519_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), ED25519_DER_EXAMPLE);\n\n\n\n // Ensure `PrivateKeyDocument` parses successfully\n\n let pk_info = PrivateKeyInfo::from_der(ED25519_DER_EXAMPLE).unwrap();\n\n assert_eq!(pkcs8_doc.private_key_info().algorithm, pk_info.algorithm);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 70, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(all(feature = \"pem\", feature = \"std\"))]\n\nfn read_pem_file() {\n\n let pkcs8_doc = PublicKeyDocument::read_pem_file(\"tests/examples/p256-pub.pem\").unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n}\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 71, "score": 54991.17263530972 }, { "content": "#[test]\n\nfn parse_rsa_2048_der() {\n\n let pk = PrivateKeyInfo::from_der(RSA_2048_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(pk.algorithm.oid, \"1.2.840.113549.1.1.1\".parse().unwrap());\n\n assert!(pk.algorithm.parameters.unwrap().is_null());\n\n\n\n // Extracted with:\n\n // $ openssl asn1parse -in tests/examples/rsa2048-priv.der -inform der\n\n assert_eq!(pk.private_key, &hex!(\"308204A30201000282010100B6C42C515F10A6AAF282C63EDBE24243A170F3FA2633BD4833637F47CA4F6F36E03A5D29EFC3191AC80F390D874B39E30F414FCEC1FCA0ED81E547EDC2CD382C76F61C9018973DB9FA537972A7C701F6B77E0982DFC15FC01927EE5E7CD94B4F599FF07013A7C8281BDF22DCBC9AD7CABB7C4311C982F58EDB7213AD4558B332266D743AED8192D1884CADB8B14739A8DADA66DC970806D9C7AC450CB13D0D7C575FB198534FC61BC41BC0F0574E0E0130C7BBBFBDFDC9F6A6E2E3E2AFF1CBEAC89BA57884528D55CFB08327A1E8C89F4E003CF2888E933241D9D695BCBBACDC90B44E3E095FA37058EA25B13F5E295CBEAC6DE838AB8C50AF61E298975B872F0203010001028201007ECC8362C0EDB0741164215E22F74AB9D91BA06900700CF63690E5114D8EE6BDCFBB2E3F9614692A677A083F168A5E52E5968E6407B9D97C6E0E4064F82DA0B758A14F17B9B7D41F5F48E28D6551704F56E69E7AA9FA630FC76428C06D25E455DCFC55B7AC2B4F76643FDED3FE15FF78ABB27E65ACC4AAD0BDF6DB27EF60A6910C5C4A085ED43275AB19C1D997A32C6EFFCE7DF2D1935F6E601EEDE161A12B5CC27CA21F81D2C99C3D1EA08E90E3053AB09BEFA724DEF0D0C3A3C1E9740C0D9F76126A149EC0AA7D8078205484254D951DB07C4CF91FB6454C096588FD5924DBABEB359CA2025268D004F9D66EB3D6F7ADC1139BAD40F16DDE639E11647376C102818100DCC061242D4E92AFAEE72AC513CA65B9F77036F9BD7E0E6E61461A7EF7654225EC153C7E5C31A6157A6E5A13FF6E178E8758C1CB33D9D6BBE3179EF18998E422ECDCBED78F4ECFDBE5F4FCD8AEC2C9D0DC86473CA9BD16D9D238D21FB5DDEFBEB143CA61D0BD6AA8D91F33A097790E9640DBC91085DC5F26343BA3138F6B2D6702818100D3F314757E40E954836F92BE24236AF2F0DA04A34653C180AF67E960086D93FDE65CB23EFD9D09374762F5981E361849AF68CDD75394FF6A4E06EB69B209E4228DB2DFA70E40F7F9750A528176647B788D0E5777A2CB8B22E3CD267FF70B4F3B02D3AAFB0E18C590A564B03188B0AA5FC48156B07622214243BD1227EFA7F2F902818100CE68B7AC1B0D100D636E55488753C5C09843FDB390E2705DF7689457C9BD8D9765E30978617E2EFC8048F4C324206DB86087B654E97BB3D464E7EE3F8CD83FE10436F7DF18E9A963C4E64911D67EDE34042F2E26E3D3A1AD346ADAD6B9B7F67708CB094E62DEE9FF4D5D6669AF988AF2255D1CE8ED317C6A7D8691DA354D12DB02818025F6E5944220286B4DFBBF4235C0EE5843D2198091895120D6CA7B200B826D3ECE738E2E00498FAC0A2A6CA969C7F0C3CA1AB0BC40297132BE7538D7BEDF4CB0EFC6B98EF7DBA54F56AA99AABCE534C49C27947D4678C51C63C78C7CE1687231B4C8EB587AE6EF0480CBAF4FC0173CFD587A7E67AF515FB9B9DE75111839722902818031995406D406207CADEAEA35B38D040C5F8A9A1AE0827E9ED06B153D83B6821935B4B36A82BE9D56C791B58C27271A5793D53A1D657C08997960B1433E5171987F452F144A7C72306D63E1D3FFC0B71B75AB08F2E45A482E988451CBE478E12EB228D07456C924B66F6CED048D853F533E31A68614F1C3CE6D8EC9983CE72AF7\")[..]);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 72, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn serialize_ed25519_pem() {\n\n let pk = PrivateKeyInfo::from_der(ED25519_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_pem();\n\n assert_eq!(ED25519_PEM_EXAMPLE.trim_end(), &*pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 73, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(all(feature = \"pem\", feature = \"std\"))]\n\nfn read_pem_file() {\n\n let pkcs8_doc = PrivateKeyDocument::read_pem_file(\"tests/examples/p256-priv.pem\").unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n}\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 74, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn serialize_rsa_2048_pem() {\n\n let pk = PrivateKeyInfo::from_der(RSA_2048_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_pem();\n\n assert_eq!(RSA_2048_PEM_EXAMPLE.trim_end(), &*pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 75, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn serialize_rsa_2048_der() {\n\n let pk = SubjectPublicKeyInfo::from_der(RSA_2048_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_der();\n\n assert_eq!(RSA_2048_DER_EXAMPLE, pk_encoded.as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 76, "score": 54991.17263530972 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn parse_ec_p256_pem() {\n\n let doc: PublicKeyDocument = EC_P256_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n\n\n // Ensure `PublicKeyDocument` parses successfully\n\n let spki = SubjectPublicKeyInfo::from_der(EC_P256_DER_EXAMPLE).unwrap();\n\n assert_eq!(doc.spki(), spki);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 77, "score": 54014.78773899982 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn parse_ec_p256_pem() {\n\n let pkcs8_doc: PrivateKeyDocument = EC_P256_PEM_EXAMPLE.parse().unwrap();\n\n assert_eq!(pkcs8_doc.as_ref(), EC_P256_DER_EXAMPLE);\n\n\n\n // Ensure `PrivateKeyDocument` parses successfully\n\n let pk_info = PrivateKeyInfo::from_der(EC_P256_DER_EXAMPLE).unwrap();\n\n assert_eq!(pkcs8_doc.private_key_info().algorithm, pk_info.algorithm);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 78, "score": 54014.78773899982 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn serialize_ec_p256_der() {\n\n let pk = PrivateKeyInfo::from_der(EC_P256_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_der();\n\n assert_eq!(EC_P256_DER_EXAMPLE, pk_encoded.as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 79, "score": 54014.78773899982 }, { "content": "#[test]\n\nfn parse_ec_p256_der() {\n\n let spki = SubjectPublicKeyInfo::from_der(EC_P256_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(spki.algorithm.oid, \"1.2.840.10045.2.1\".parse().unwrap());\n\n\n\n assert_eq!(\n\n spki.algorithm.parameters.unwrap().oid().unwrap(),\n\n \"1.2.840.10045.3.1.7\".parse().unwrap()\n\n );\n\n\n\n assert_eq!(spki.subject_public_key, &hex!(\"041CACFFB55F2F2CEFD89D89EB374B2681152452802DEEA09916068137D839CF7FC481A44492304D7EF66AC117BEFE83A8D08F155F2B52F9F618DD447029048E0F\")[..]);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 80, "score": 54014.78773899982 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn serialize_ec_p256_pem() {\n\n let pk = SubjectPublicKeyInfo::from_der(EC_P256_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_pem();\n\n assert_eq!(EC_P256_PEM_EXAMPLE.trim_end(), pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 81, "score": 54014.78773899982 }, { "content": "#[test]\n\nfn parse_ec_p256_der() {\n\n let pk = PrivateKeyInfo::from_der(EC_P256_DER_EXAMPLE).unwrap();\n\n\n\n assert_eq!(pk.algorithm.oid, \"1.2.840.10045.2.1\".parse().unwrap());\n\n\n\n assert_eq!(\n\n pk.algorithm.parameters.unwrap().oid().unwrap(),\n\n \"1.2.840.10045.3.1.7\".parse().unwrap()\n\n );\n\n\n\n // Extracted with:\n\n // $ openssl asn1parse -in tests/examples/p256-priv.der -inform der\n\n assert_eq!(pk.private_key, &hex!(\"306B020101042069624171561A63340DE0E7D869F2A05492558E1A04868B6A9F854A866788188DA144034200041CACFFB55F2F2CEFD89D89EB374B2681152452802DEEA09916068137D839CF7FC481A44492304D7EF66AC117BEFE83A8D08F155F2B52F9F618DD447029048E0F\")[..]);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 82, "score": 54014.78773899982 }, { "content": "#[test]\n\n#[cfg(feature = \"pem\")]\n\nfn serialize_ec_p256_pem() {\n\n let pk = PrivateKeyInfo::from_der(EC_P256_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_pem();\n\n assert_eq!(EC_P256_PEM_EXAMPLE.trim_end(), &*pk_encoded);\n\n}\n\n\n", "file_path": "pkcs8/tests/private_key.rs", "rank": 83, "score": 54014.78773899982 }, { "content": "#[test]\n\n#[cfg(feature = \"alloc\")]\n\nfn serialize_ec_p256_der() {\n\n let pk = SubjectPublicKeyInfo::from_der(EC_P256_DER_EXAMPLE).unwrap();\n\n let pk_encoded = pk.to_der();\n\n assert_eq!(EC_P256_DER_EXAMPLE, pk_encoded.as_ref());\n\n}\n\n\n", "file_path": "pkcs8/tests/public_key.rs", "rank": 84, "score": 54014.78773899982 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"big-uint\")))]\n\npub trait BigUIntSize: Unsigned {}\n\n\n\nmacro_rules! impl_size {\n\n ($($int:ident),+) => {\n\n $(impl BigUIntSize for typenum::consts::$int {})+\n\n };\n\n}\n\n\n\n// Sizes supported by the current implementation (1 - 512 bytes)\n\nimpl_size!(\n\n U1, U2, U3, U4, U5, U6, U7, U8, U9, U10, U11, U12, U13, U14, U15, U16, U17, U18, U19, U20, U21,\n\n U22, U23, U24, U25, U26, U27, U28, U29, U30, U31, U32, U33, U34, U35, U36, U37, U38, U39, U40,\n\n U41, U42, U43, U44, U45, U46, U47, U48, U49, U50, U51, U52, U53, U54, U55, U56, U57, U58, U59,\n\n U60, U61, U62, U63, U64, U65, U66, U67, U68, U69, U70, U71, U72, U73, U74, U75, U76, U77, U78,\n\n U79, U80, U81, U82, U83, U84, U85, U86, U87, U88, U89, U90, U91, U92, U93, U94, U95, U96, U97,\n\n U98, U99, U100, U101, U102, U103, U104, U105, U106, U107, U108, U109, U110, U111, U112, U113,\n\n U114, U115, U116, U117, U118, U119, U120, U121, U122, U123, U124, U125, U126, U127, U128, U129,\n\n U130, U131, U132, U133, U134, U135, U136, U137, U138, U139, U140, U141, U142, U143, U144, U145,\n\n U146, U147, U148, U149, U150, U151, U152, U153, U154, U155, U156, U157, U158, U159, U160, U161,\n\n U162, U163, U164, U165, U166, U167, U168, U169, U170, U171, U172, U173, U174, U175, U176, U177,\n", "file_path": "der/src/asn1/big_uint.rs", "rank": 85, "score": 53564.05574128969 }, { "content": "#[inline(never)]\n\nfn get_b64_data() -> String {\n\n (0..B64_LEN)\n\n .map(|i| match (i % 64) as u8 {\n\n v @ 0..=25 => (v + 'A' as u8) as char,\n\n v @ 26..=51 => (v - 26 + 'a' as u8) as char,\n\n v @ 52..=61 => (v - 52 + '0' as u8) as char,\n\n 62 => '+',\n\n _ => '/',\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "base64ct/benches/mod.rs", "rank": 86, "score": 53117.43268085839 }, { "content": "#[inline(always)]\n\nfn to_be(val: &mut [u64]) {\n\n for v in val.iter_mut() {\n\n *v = v.to_be();\n\n }\n\n}\n\n\n\nimpl Dbl for GenericArray<u8, U16> {\n\n fn dbl(self) -> Self {\n\n let mut val: [u64; 2] = unsafe { mem::transmute_copy(&self) };\n\n to_be(&mut val);\n\n\n\n let b = val[1] >> 63;\n\n let a = val[0] >> 63;\n\n\n\n val[0] <<= 1;\n\n val[0] ^= b;\n\n val[1] <<= 1;\n\n val[1] ^= a * C128;\n\n\n\n to_be(&mut val);\n", "file_path": "dbl/src/lib.rs", "rank": 87, "score": 51238.20590282798 }, { "content": "/// Parse the first lifetime of the \"self\" type of the custom derive\n\n///\n\n/// Returns `None` if there is no first lifetime.\n\nfn parse_lifetime(generics: &Generics) -> Option<&Lifetime> {\n\n generics\n\n .lifetimes()\n\n .next()\n\n .map(|ref lt_ref| &lt_ref.lifetime)\n\n}\n\n\n\n/// Attributes of a field\n", "file_path": "der/derive/src/lib.rs", "rank": 88, "score": 46281.03040630775 }, { "content": "/// Strips any outer `Delimiter::None` groups from the input,\n\n/// returning a `TokenStream` consisting of the innermost\n\n/// non-empty-group `TokenTree`.\n\n/// This is used to handle a proc macro being invoked\n\n/// by a `macro_rules!` expansion.\n\n/// See https://github.com/rust-lang/rust/issues/72545 for background\n\nfn ignore_groups(mut input: TokenStream) -> TokenStream {\n\n let mut tokens = input.clone().into_iter();\n\n loop {\n\n if let Some(TokenTree::Group(group)) = tokens.next() {\n\n if group.delimiter() == Delimiter::None {\n\n input = group.stream();\n\n continue;\n\n }\n\n }\n\n return input;\n\n }\n\n}\n\n\n", "file_path": "hex-literal/src/lib.rs", "rank": 89, "score": 44676.70003557 }, { "content": "//! Length calculations for encoded ASN.1 DER values\n\n\n\nuse crate::{Decodable, Decoder, Encodable, Encoder, Error, ErrorKind, Result};\n\nuse core::{convert::TryFrom, fmt, ops::Add};\n\n\n\n/// ASN.1-encoded length.\n\n///\n\n/// # Limits\n\n///\n\n/// Presently constrained to the range `0..=65535`\n\n#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord)]\n\npub struct Length(u16);\n\n\n\nimpl Length {\n\n /// Return a length of `0`.\n\n pub const fn zero() -> Self {\n\n Length(0)\n\n }\n\n\n\n /// Get the maximum length supported by this crate\n", "file_path": "der/src/length.rs", "rank": 97, "score": 38829.74679199608 }, { "content": " 0x100..=0xFFFF => {\n\n encoder.byte(0x82)?;\n\n encoder.byte((self.0 >> 8) as u8)?;\n\n encoder.byte((self.0 & 0xFF) as u8)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Length {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Length;\n\n use crate::{Decodable, Encodable};\n\n\n", "file_path": "der/src/length.rs", "rank": 98, "score": 38827.19239239861 }, { "content": "//! ASN.1 `ANY` type.\n\n\n\nuse crate::{\n\n BitString, ByteSlice, Decodable, Decoder, Encodable, Encoder, Error, ErrorKind, Header, Length,\n\n Null, OctetString, Result, Sequence, Tag,\n\n};\n\nuse core::convert::{TryFrom, TryInto};\n\n\n\n#[cfg(feature = \"oid\")]\n\nuse crate::ObjectIdentifier;\n\n\n\n/// ASN.1 `ANY` type: represents any explicitly tagged ASN.1 value.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub struct Any<'a> {\n\n /// Tag representing the type of the encoded value\n\n pub(crate) tag: Tag,\n\n\n\n /// Inner value encoded as bytes\n\n pub(crate) value: ByteSlice<'a>,\n\n}\n", "file_path": "der/src/asn1/any.rs", "rank": 99, "score": 44.99709834581692 } ]
Rust
src/xpath/tokenizer/mod.rs
James-LG/Skyscraper
e6fcecc20762d5c4e5d6d22d77cb1da083dd5871
mod helpers; mod tokens; use crate::vecpointer::VecPointerRef; pub use tokens::Token; use thiserror::Error; #[derive(Error, Debug)] pub enum LexError {} pub fn lex(text: &str) -> Result<Vec<Token>, LexError> { let mut symbols: Vec<Token> = Vec::new(); let chars: Vec<char> = text.chars().collect(); let mut pointer = VecPointerRef::new(&chars); while pointer.has_next() { if let Some(s) = helpers::is_double_slash(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_slash(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_open_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_close_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_open_square_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_close_square_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_number(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_wildcard(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_double_dot(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_dot(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_assignment_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_at_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_add_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_minus_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_greater_than_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_less_than_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_double_colon(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_identifier(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_text(&mut pointer) { symbols.push(s); } else { if let Some(c) = pointer.current() { if !c.is_whitespace() { eprintln!("Unknown XPath symbol {}", c); } } pointer.next(); } } Ok(symbols) } #[cfg(test)] mod tests { use super::*; #[test] fn lex_works1() { let text = "//bookstore/book[1]/page[last()-1]"; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("bookstore")), Token::Slash, Token::Identifier(String::from("book")), Token::OpenSquareBracket, Token::Number(1.0), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("page")), Token::OpenSquareBracket, Token::Identifier(String::from("last")), Token::OpenBracket, Token::CloseBracket, Token::MinusSign, Token::Number(1.0), Token::CloseSquareBracket, ]; assert_eq!(expected, result); } #[test] fn lex_works2() { let text = "/bookstore/book[price>35]/price"; let result = lex(text).unwrap(); let expected = vec![ Token::Slash, Token::Identifier(String::from("bookstore")), Token::Slash, Token::Identifier(String::from("book")), Token::OpenSquareBracket, Token::Identifier(String::from("price")), Token::GreaterThanSign, Token::Number(35.0), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("price")), ]; assert_eq!(expected, result); } #[test] fn lex_works3() { let text = r###"//a[@hello="world"]"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("a")), Token::OpenSquareBracket, Token::AtSign, Token::Identifier(String::from("hello")), Token::AssignmentSign, Token::Text(String::from("world")), Token::CloseSquareBracket, ]; assert_eq!(expected, result); } #[test] fn lex_works_alphanumeric_identifier() { let text = r###"//h1[@hello="world"]/h2"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("h1")), Token::OpenSquareBracket, Token::AtSign, Token::Identifier(String::from("hello")), Token::AssignmentSign, Token::Text(String::from("world")), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("h2")), ]; assert_eq!(expected, result); } #[test] fn lex_works_double_colon() { let text = r###"//h1/parent::div"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("h1")), Token::Slash, Token::Identifier(String::from("parent")), Token::DoubleColon, Token::Identifier(String::from("div")), ]; assert_eq!(expected, result); } }
mod helpers; mod tokens; use crate::vecpointer::VecPointerRef; pub use tokens::Token; use thiserror::Error; #[derive(Error, Debug)] pub enum LexError {} pub fn lex(text: &str) -> Result<Vec<Token>, LexError> { let mut symbols: Vec<Token> = Vec::new(); let chars: Vec<char> = text.chars().collect(); let mut pointer = VecPointerRef::new(&chars); while pointer.has_next() { if let Some(s) = helpers::is_double_slash(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_slash(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_open_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_close_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_open_square_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_close_square_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_number(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_wildcard(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_double_dot(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_dot(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_assignment_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_at_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_add_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_minus_sign(&mut pointer) { s
Token::Number(1.0), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("page")), Token::OpenSquareBracket, Token::Identifier(String::from("last")), Token::OpenBracket, Token::CloseBracket, Token::MinusSign, Token::Number(1.0), Token::CloseSquareBracket, ]; assert_eq!(expected, result); } #[test] fn lex_works2() { let text = "/bookstore/book[price>35]/price"; let result = lex(text).unwrap(); let expected = vec![ Token::Slash, Token::Identifier(String::from("bookstore")), Token::Slash, Token::Identifier(String::from("book")), Token::OpenSquareBracket, Token::Identifier(String::from("price")), Token::GreaterThanSign, Token::Number(35.0), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("price")), ]; assert_eq!(expected, result); } #[test] fn lex_works3() { let text = r###"//a[@hello="world"]"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("a")), Token::OpenSquareBracket, Token::AtSign, Token::Identifier(String::from("hello")), Token::AssignmentSign, Token::Text(String::from("world")), Token::CloseSquareBracket, ]; assert_eq!(expected, result); } #[test] fn lex_works_alphanumeric_identifier() { let text = r###"//h1[@hello="world"]/h2"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("h1")), Token::OpenSquareBracket, Token::AtSign, Token::Identifier(String::from("hello")), Token::AssignmentSign, Token::Text(String::from("world")), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("h2")), ]; assert_eq!(expected, result); } #[test] fn lex_works_double_colon() { let text = r###"//h1/parent::div"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("h1")), Token::Slash, Token::Identifier(String::from("parent")), Token::DoubleColon, Token::Identifier(String::from("div")), ]; assert_eq!(expected, result); } }
ymbols.push(s); } else if let Some(s) = helpers::is_greater_than_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_less_than_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_double_colon(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_identifier(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_text(&mut pointer) { symbols.push(s); } else { if let Some(c) = pointer.current() { if !c.is_whitespace() { eprintln!("Unknown XPath symbol {}", c); } } pointer.next(); } } Ok(symbols) } #[cfg(test)] mod tests { use super::*; #[test] fn lex_works1() { let text = "//bookstore/book[1]/page[last()-1]"; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("bookstore")), Token::Slash, Token::Identifier(String::from("book")), Token::OpenSquareBracket,
random
[ { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Number [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_number(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some(c) = pointer.current() {\n\n if c.is_digit(10) {\n\n let mut num = c.to_string();\n\n while let Some(c) = pointer.next() {\n\n if c.is_digit(10) {\n\n num.push(*c);\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n // Check for decimal values\n\n if let Some('.') = pointer.current() {\n\n num.push('.');\n\n while let Some(c) = pointer.next() {\n\n if c.is_digit(10) {\n\n num.push(*c);\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n return Some(Token::Number(num.parse::<f32>().unwrap()));\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 0, "score": 234295.94418351247 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a AtSign [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_at_sign(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('@') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::AtSign);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 1, "score": 234295.94418351247 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Slash [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_slash(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('/') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::Slash);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 2, "score": 234295.94418351253 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Wildcard [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_wildcard(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('*') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::Wildcard);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 3, "score": 234295.94418351247 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Text [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_text(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some(c) = pointer.current() {\n\n if c == &'\"' || c == &'\\'' {\n\n let delimiter = *c;\n\n let mut text = String::from(\"\");\n\n\n\n while let Some(c) = pointer.next() {\n\n if c == &delimiter {\n\n // Move to next character before exiting.\n\n pointer.next();\n\n return Some(Token::Text(text));\n\n } else {\n\n text.push(*c);\n\n }\n\n }\n\n\n\n pointer.back_add(text.len() + 1);\n\n }\n\n }\n\n None\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 4, "score": 234295.94418351247 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to an Identifier [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_identifier(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some(c) = pointer.current() {\n\n // Identifier must start with a letter\n\n if c.is_alphabetic() {\n\n let mut id = c.to_string();\n\n\n\n while let Some(c) = pointer.next() {\n\n // Identifier can contain letters and numbers\n\n if c.is_alphanumeric() {\n\n id.push(*c);\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n return Some(Token::Identifier(id));\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 5, "score": 234295.94418351253 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a DoubleDot [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_dot(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('.') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::Dot);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 6, "score": 234295.8910804668 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Comment [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// Comment is defined as `<!--{{String}}-->`\n\npub fn is_comment(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let (Some('<'), Some('!'), Some('-'), Some('-')) = (\n\n pointer.current(),\n\n pointer.peek(),\n\n pointer.peek_add(2),\n\n pointer.peek_add(3),\n\n ) {\n\n pointer.next_add(3); // peeked before, move up now\n\n\n\n let mut text: Vec<char> = Vec::new();\n\n while let Some(c) = pointer.next() {\n\n let c = *c;\n\n if is_end_comment(pointer) {\n\n let name: String = text.into_iter().collect();\n\n return Some(Token::Comment(name));\n\n }\n\n text.push(c);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 7, "score": 234295.78691988002 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a LessThanSign [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_less_than_sign(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('<') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::LessThanSign);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 8, "score": 229904.6642889691 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a DoubleSlash [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_double_slash(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let (Some('/'), Some('/')) = (pointer.current(), pointer.peek()) {\n\n // Peeked before, move up now.\n\n pointer.next_add(2);\n\n return Some(Token::DoubleSlash);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 9, "score": 229904.6642889691 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a AddSign [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_add_sign(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('+') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::AddSign);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 10, "score": 229904.6642889691 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a GreaterThanSign [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_greater_than_sign(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('>') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::GreaterThanSign);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 11, "score": 229904.66428896907 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a DoubleDot [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_double_dot(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let (Some('.'), Some('.')) = (pointer.current(), pointer.peek()) {\n\n // Peeked before, move up now.\n\n pointer.next_add(2);\n\n return Some(Token::DoubleDot);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 12, "score": 229904.66428896907 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a MinusSign [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_minus_sign(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('-') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::MinusSign);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 13, "score": 229904.6642889691 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a CloseBracket [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_close_bracket(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some(')') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::CloseBracket);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 14, "score": 229904.6642889691 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a DoubleColon [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_double_colon(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let (Some(':'), Some(':')) = (pointer.current(), pointer.peek()) {\n\n pointer.next_add(2);\n\n return Some(Token::DoubleColon);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 15, "score": 229904.6642889691 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a AssignmentSign [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_assignment_sign(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('=') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::AssignmentSign);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 16, "score": 229904.6642889691 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a OpenBracket [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_open_bracket(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('(') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::OpenBracket);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 17, "score": 229904.66428896907 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a AssignmentSign [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// AssignmentSign is defined as `=`\n\npub fn is_assignment_sign(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('=') = pointer.current() {\n\n pointer.next(); // move up for later\n\n return Some(Token::AssignmentSign);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 18, "score": 229904.50904443176 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a TagClose [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// TagClose is defined as `>`\n\npub fn is_tag_close(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('>') = pointer.current() {\n\n pointer.next(); // move up for later\n\n return Some(Token::TagClose);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 19, "score": 229904.5090444317 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to an EndTag [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// EndTag is defined as `</{{String}}`\n\npub fn is_end_tag(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let (Some('<'), Some('/')) = (pointer.current(), pointer.peek()) {\n\n pointer.next(); // peeked before, move up now\n\n\n\n let mut name: Vec<char> = Vec::new();\n\n loop {\n\n match pointer.next() {\n\n Some(' ') | Some('>') => break,\n\n Some(c) => {\n\n name.push(*c);\n\n }\n\n None => break,\n\n };\n\n }\n\n let name: String = name.into_iter().collect();\n\n\n\n return Some(Token::EndTag(name));\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 20, "score": 229904.4586076743 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a StartTag [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// StartTag is defined as `<{{String}}`\n\n///\n\n/// Has additional checks to make sure it is not an end tag.\n\npub fn is_start_tag(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let (Some('<'), Some(c2)) = (pointer.current(), pointer.peek()) {\n\n if *c2 != '/' {\n\n let mut name: Vec<char> = Vec::new();\n\n loop {\n\n match pointer.next() {\n\n Some(' ') | Some('>') | Some('/') => break,\n\n Some(c) => {\n\n name.push(*c);\n\n }\n\n None => break,\n\n };\n\n }\n\n let name: String = name.into_iter().collect();\n\n\n\n return Some(Token::StartTag(name));\n\n }\n\n\n\n return None;\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 21, "score": 229904.16890733602 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a OpenSquareBracket [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_open_square_bracket(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some('[') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::OpenSquareBracket);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 22, "score": 225762.71517637654 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a CloseSquareBracket [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_close_square_bracket(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some(']') = pointer.current() {\n\n pointer.next();\n\n return Some(Token::CloseSquareBracket);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 23, "score": 225762.71517637657 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a TagCloseAndEnd [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// TagCloseAndEnd is defined as `/>`\n\npub fn is_tag_close_and_end(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let (Some('/'), Some('>')) = (pointer.current(), pointer.peek()) {\n\n pointer.next_add(2); // move up for later\n\n return Some(Token::TagCloseAndEnd);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 24, "score": 225762.5121099035 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Identifier [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// Identifier is defined as any text inside a tag definition.\n\npub fn is_identifier(pointer: &mut VecPointerRef<char>, has_open_tag: bool) -> Option<Token> {\n\n if !has_open_tag {\n\n return None;\n\n }\n\n\n\n if let Some(c) = pointer.current() {\n\n if !INAVLID_ID_CHARS.contains(c) {\n\n let mut text: Vec<char> = vec![*c];\n\n loop {\n\n match pointer.next() {\n\n Some(c) if INAVLID_ID_CHARS.contains(c) => break,\n\n Some(c) => {\n\n text.push(*c);\n\n }\n\n None => break,\n\n };\n\n }\n\n let name: String = text.into_iter().collect();\n\n\n\n return Some(Token::Identifier(name));\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 25, "score": 215424.2571574201 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Text [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// Text is defined as any text outside a tag definition.\n\npub fn is_text(pointer: &mut VecPointerRef<char>, has_open_tag: bool) -> Option<Token> {\n\n if has_open_tag {\n\n return None;\n\n }\n\n\n\n if let Some(c) = pointer.current() {\n\n if !INAVLID_TEXT_CHARS.contains(c) {\n\n let start_index = pointer.index;\n\n let mut has_non_whitespace = !c.is_whitespace();\n\n\n\n let mut buffer: Vec<char> = vec![*c];\n\n loop {\n\n match pointer.next() {\n\n Some(c) if INAVLID_TEXT_CHARS.contains(c) => break,\n\n Some(c) => {\n\n if !c.is_whitespace() {\n\n has_non_whitespace = true;\n\n }\n\n\n\n buffer.push(*c);\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 26, "score": 215424.2571574201 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Literal [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// Literal is defined as `\"{{String}}\"` inside a tag definition.\n\npub fn is_literal(pointer: &mut VecPointerRef<char>, has_open_tag: bool) -> Option<Token> {\n\n if !has_open_tag {\n\n return None;\n\n }\n\n\n\n if let Some(c) = pointer.current() {\n\n let c = *c;\n\n if c == '\"' || c == '\\'' {\n\n let start_quote = c;\n\n let mut text: Vec<char> = Vec::new();\n\n let mut escape = false;\n\n loop {\n\n match pointer.next() {\n\n Some('\\\\') => escape = true,\n\n Some(c) => {\n\n // If this quote matches the starting quote, break the loop\n\n if !escape && (*c == '\"' || *c == '\\'') && start_quote == *c {\n\n break;\n\n }\n\n // Otherwise push the different quote to the text\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 27, "score": 215424.2571574201 }, { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to the end of a Comment [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// This is a helper method not used directly in the lexer.\n\n///\n\n/// The end of a comment is defined as `-->`\n\npub fn is_end_comment(pointer: &mut VecPointerRef<char>) -> bool {\n\n if let (Some('-'), Some('-'), Some('>')) =\n\n (pointer.current(), pointer.peek(), pointer.peek_add(2))\n\n {\n\n pointer.next_add(3); // peeked before, move up now; 2+1 to end after comment\n\n\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 28, "score": 213295.9986693925 }, { "content": "/// Tokenize a string of HTML into Symbols used for parsing later on.\n\npub fn lex(text: &str) -> Result<Vec<Token>, LexError> {\n\n let mut symbols: Vec<Token> = Vec::new();\n\n\n\n let chars: Vec<char> = text.chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n let mut has_open_tag = false;\n\n\n\n while pointer.has_next() {\n\n if let Some(s) = helpers::is_comment(&mut pointer) {\n\n symbols.push(s);\n\n } else if let Some(s) = helpers::is_start_tag(&mut pointer) {\n\n has_open_tag = true;\n\n symbols.push(s);\n\n } else if let Some(s) = helpers::is_end_tag(&mut pointer) {\n\n has_open_tag = true;\n\n symbols.push(s);\n\n } else if let Some(s) = helpers::is_tag_close_and_end(&mut pointer) {\n\n has_open_tag = false;\n\n symbols.push(s);\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 29, "score": 180081.053321831 }, { "content": "/// Parses an index selector.\n\n/// \n\n/// Example: `[1]`\n\nfn parse_index(symbols: &mut Peekable<std::vec::IntoIter<Token>>) -> Option<usize> {\n\n if let Some(Token::Number(num)) =\n\n symbols.next_if(|expected| matches!(expected, &Token::Number(_)))\n\n {\n\n if let Some(Token::CloseSquareBracket) = symbols.next_if_eq(&Token::CloseSquareBracket) {\n\n return Some(num as usize);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/xpath/parse.rs", "rank": 31, "score": 109995.23823074688 }, { "content": "pub fn benchmark_html_parse(c: &mut Criterion) {\n\n c.bench_function(\"html parse\", |b| b.iter(|| html::parse(HTML)));\n\n}\n", "file_path": "benches/html_benchmark.rs", "rank": 32, "score": 109203.53957174251 }, { "content": "pub fn benchmark_xpath_parse(c: &mut Criterion) {\n\n c.bench_function(\"xpath parse\", |b| {\n\n b.iter(|| {\n\n xpath::parse(\"//div[@class='BorderGrid-cell']/div[@class=' text-small']/a\").unwrap();\n\n })\n\n });\n\n}\n", "file_path": "benches/xpath_benchmark.rs", "rank": 33, "score": 109203.53957174251 }, { "content": "/// Search for an HTML tag matching the given search parameters in the given list of nodes.\n\npub fn search(\n\n search_params: &XpathSearchItem,\n\n document: &HtmlDocument,\n\n searchable_nodes: &DocumentNodeSet,\n\n) -> Result<DocumentNodeSet, ApplyError> {\n\n let mut matches = DocumentNodeSet::new();\n\n\n\n for node_id in searchable_nodes.iter() {\n\n if let Some(node) = document.get_html_node(node_id) {\n\n match node {\n\n HtmlNode::Tag(rtag) => match &search_params.search_node_type {\n\n XpathSearchNodeType::Element(tag_name) => {\n\n if &rtag.name == tag_name {\n\n if let Some(query) = &search_params.query {\n\n if query.check_node(rtag) {\n\n matches.insert(*node_id);\n\n }\n\n } else {\n\n matches.insert(*node_id);\n\n }\n", "file_path": "src/xpath/mod.rs", "rank": 34, "score": 107367.93715965028 }, { "content": "/// Parse an Xpath expression into an Xpath object.\n\n/// \n\n/// # Example: parse an XPath expression\n\n/// ```rust\n\n/// use skyscraper::xpath::{self, parse::ParseError};\n\n/// # fn main() -> Result<(), ParseError> {\n\n/// let expr = xpath::parse(\"//div[@class='hi']/parent::span//a\")?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn parse(text: &str) -> Result<Xpath, ParseError> {\n\n let elements = inner_parse(text)?;\n\n let mut xpath_items: Vec<XpathSearchItem> = Vec::new();\n\n\n\n let mut cur_search_item: XpathSearchItem = Default::default();\n\n let mut is_first_element = true;\n\n let mut was_last_element_search = false;\n\n for element in elements.into_iter() {\n\n was_last_element_search = false;\n\n match element {\n\n XpathElement::SearchRoot => {\n\n if !is_first_element {\n\n xpath_items.push(cur_search_item);\n\n cur_search_item = Default::default();\n\n }\n\n was_last_element_search = true;\n\n }\n\n XpathElement::SearchAll => {\n\n if !is_first_element {\n\n xpath_items.push(cur_search_item);\n", "file_path": "src/xpath/parse.rs", "rank": 35, "score": 105153.85941941137 }, { "content": "/// Parse the HTML text into a document object.\n\npub fn parse(text: &str) -> Result<HtmlDocument, ParseError> {\n\n let tokens = tokenizer::lex(text)?;\n\n\n\n let mut arena: Arena<HtmlNode> = Arena::new();\n\n let mut root_key_o: Option<NodeId> = None;\n\n let mut cur_key_o: Option<NodeId> = None;\n\n let mut has_tag_open = false;\n\n\n\n let mut tokens = tokens.into_iter().peekable();\n\n\n\n while let Some(token) = tokens.next() {\n\n match token {\n\n Token::StartTag(tag_name) => {\n\n // Skip the special doctype tag so a proper root is selected.\n\n if is_doctype(&tag_name, &mut tokens)? {\n\n continue;\n\n }\n\n\n\n if has_tag_open {\n\n return Err(ParseError::OpenTagBeforePreviousClosed);\n", "file_path": "src/html/parse.rs", "rank": 36, "score": 102861.02833842927 }, { "content": "fn get_mut_tree_node(key: Option<NodeId>, arena: &mut Arena<HtmlNode>) -> &mut Node<HtmlNode> {\n\n let key = key.expect(\"Attempted to get a node on a none value\");\n\n arena.get_mut(key).expect(\"Node not found in arena\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashMap;\n\n\n\n use super::*;\n\n\n\n fn assert_tag(\n\n document: &HtmlDocument,\n\n doc_node: DocumentNode,\n\n tag_name: &str,\n\n attributes: Option<HashMap<&str, &str>>,\n\n ) -> Vec<DocumentNode> {\n\n let html_node = document.get_html_node(&doc_node).unwrap();\n\n\n\n match html_node {\n", "file_path": "src/html/parse.rs", "rank": 37, "score": 69601.58907684212 }, { "content": "/// First stage of parsing, converts tokens into more structured [XpathElements](XpathElement).\n\nfn inner_parse(text: &str) -> Result<Vec<XpathElement>, ParseError> {\n\n let mut symbols = tokenizer::lex(text)?.into_iter().peekable();\n\n let mut elements: Vec<XpathElement> = Vec::new();\n\n\n\n while let Some(symbol) = symbols.next() {\n\n match symbol {\n\n Token::Slash => elements.push(XpathElement::SearchRoot),\n\n Token::DoubleSlash => elements.push(XpathElement::SearchAll),\n\n Token::OpenSquareBracket => {\n\n if let Some(num) = parse_index(&mut symbols) {\n\n elements.push(XpathElement::Index(num));\n\n } else {\n\n let query = parse_query(&mut symbols)?;\n\n elements.push(XpathElement::Query(query));\n\n }\n\n }\n\n Token::Identifier(identifier) => {\n\n elements.push(XpathElement::Tag(identifier));\n\n }\n\n Token::DoubleColon => {\n\n parse_axis_selector(&mut elements)?;\n\n }\n\n _ => continue,\n\n }\n\n }\n\n\n\n Ok(elements)\n\n}\n\n\n", "file_path": "src/xpath/parse.rs", "rank": 38, "score": 65516.84331463209 }, { "content": "fn apply_axis(\n\n is_first_search: bool,\n\n document: &HtmlDocument,\n\n axis: &XpathAxes,\n\n searchable_nodes: &mut DocumentNodeSet,\n\n) {\n\n match axis {\n\n XpathAxes::Child => {\n\n // Child axis is implied for first search of Xpath expression.\n\n // For example when given the root node we should not move down to a child\n\n // on the first search so that we can match on the root node itself rather than\n\n // only on its children. E.g. in `/root/something`, we shouldn't move down on the\n\n // leading slash or else we will have passed the root node already.\n\n if !is_first_search {\n\n *searchable_nodes = get_all_children(document, searchable_nodes);\n\n }\n\n }\n\n XpathAxes::DescendantOrSelf => {\n\n *searchable_nodes = get_all_descendants_or_self(document, searchable_nodes);\n\n }\n\n XpathAxes::Parent => {\n\n *searchable_nodes = get_all_parents(document, searchable_nodes);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/xpath/mod.rs", "rank": 39, "score": 60729.403013724324 }, { "content": "/// Parses tree selectors. Triggered when a DoubleColon (Symbol)[Symbol] is found and expects a tag to\n\n/// have preceded it which will now be converted to an axis.\n\n/// \n\n/// Example: `/div/parent::div`\n\nfn parse_axis_selector(elements: &mut Vec<XpathElement>) -> Result<(), ParseError> {\n\n let last_item = elements.pop().ok_or(ParseError::MissingAxis)?;\n\n let axis = match last_item {\n\n XpathElement::Tag(last_tag) => match last_tag.as_str() {\n\n \"parent\" => XpathAxes::Parent,\n\n _ => return Err(ParseError::UnknownAxisType(last_tag)),\n\n },\n\n _ => return Err(ParseError::MissingAxis),\n\n };\n\n elements.push(XpathElement::Axis(axis));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/xpath/parse.rs", "rank": 40, "score": 59687.102314460855 }, { "content": "/// Get all descendants and self for all the given matched nodes.\n\nfn get_all_descendants_or_self(\n\n document: &HtmlDocument,\n\n matched_nodes: &DocumentNodeSet,\n\n) -> DocumentNodeSet {\n\n let mut descendant_or_self_nodes: DocumentNodeSet = DocumentNodeSet::new();\n\n\n\n for node_id in matched_nodes {\n\n descendant_or_self_nodes.insert(*node_id);\n\n let mut children: DocumentNodeSet = node_id.children(document).collect();\n\n if !children.is_empty() {\n\n children.insert_all(get_all_descendants_or_self(document, &children).into_iter())\n\n }\n\n descendant_or_self_nodes.insert_all(children.into_iter());\n\n }\n\n\n\n descendant_or_self_nodes\n\n}\n\n\n", "file_path": "src/xpath/mod.rs", "rank": 41, "score": 58364.79709945514 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn is_number_should_be_some_with_trailing_text() {\n\n let chars: Vec<char> = \"1234abc\".chars().collect();\n\n let symbol = is_number(&mut VecPointerRef::new(&chars)).unwrap();\n\n\n\n if let Token::Number(f) = symbol {\n\n assert_eq!(1234f32, f);\n\n } else {\n\n panic!(\"Expected number symbol\")\n\n }\n\n }\n\n\n\n #[test]\n\n fn is_number_should_capture_decimal() {\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 42, "score": 56122.71332134117 }, { "content": " let chars: Vec<char> = r###\"\"world\"\"###.chars().collect();\n\n let pointer = &mut VecPointerRef::new(&chars);\n\n let symbol = is_text(pointer);\n\n\n\n if let Some(Token::Text(text)) = symbol {\n\n assert_eq!(\"world\", text);\n\n matches!(pointer.next(), None);\n\n } else {\n\n panic!(\"Expected text symbol\")\n\n }\n\n }\n\n\n\n #[test]\n\n fn is_text_should_capture_single_quoted_text() {\n\n let chars: Vec<char> = r###\"'world'\"###.chars().collect();\n\n let pointer = &mut VecPointerRef::new(&chars);\n\n let symbol = is_text(pointer);\n\n\n\n if let Some(Token::Text(text)) = symbol {\n\n assert_eq!(\"world\", text);\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 43, "score": 56121.33855748954 }, { "content": "mod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn is_start_tag_finds_and_moves_pointer() {\n\n // arrange\n\n let chars: Vec<char> = \"<a>\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_start_tag(&mut pointer).unwrap();\n\n\n\n // assert\n\n assert_eq!(Token::StartTag(String::from(\"a\")), result);\n\n assert_eq!(2, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_start_tag_does_not_move_pointer_if_not_found() {\n\n // arrange\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 44, "score": 56120.80248073284 }, { "content": " let chars: Vec<char> = \"1234.5678\".chars().collect();\n\n let symbol = is_number(&mut VecPointerRef::new(&chars)).unwrap();\n\n\n\n if let Token::Number(f) = symbol {\n\n assert_eq!(1234.5678f32, f);\n\n } else {\n\n panic!(\"Expected number symbol\")\n\n }\n\n }\n\n\n\n #[test]\n\n fn is_number_should_be_none_with_leading_text() {\n\n let chars: Vec<char> = \"abc1234\".chars().collect();\n\n let symbol = is_number(&mut VecPointerRef::new(&chars));\n\n\n\n assert!(symbol.is_none());\n\n }\n\n\n\n #[test]\n\n fn is_text_should_capture_quoted_text() {\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 45, "score": 56119.95606489802 }, { "content": " // arrange\n\n let chars: Vec<char> = \"foo bar\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_identifier(&mut pointer, true).unwrap();\n\n\n\n // assert\n\n assert_eq!(Token::Identifier(String::from(\"foo\")), result);\n\n assert_eq!(3, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_identifier_not_move_pointer_if_not_found() {\n\n // arrange\n\n let chars: Vec<char> = \" \".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_identifier(&mut pointer, true);\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 46, "score": 56117.98441098129 }, { "content": " matches!(pointer.next(), None);\n\n } else {\n\n panic!(\"Expected text symbol\")\n\n }\n\n }\n\n\n\n #[test]\n\n fn is_text_should_not_capture_mismatched_quoted_text() {\n\n let chars: Vec<char> = r###\"\"world'\"###.chars().collect();\n\n let pointer = &mut VecPointerRef::new(&chars);\n\n let symbol = is_text(pointer);\n\n\n\n matches!(symbol, None);\n\n matches!(pointer.current(), Some('\"')); // Assert cursor was not moved.\n\n }\n\n}\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 47, "score": 56117.97120145062 }, { "content": "use crate::vecpointer::VecPointerRef;\n\n\n\nuse super::Token;\n\n\n\n/// Checks if the [TextPointer](TextPointer) is currently pointing to a DoubleSlash [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n", "file_path": "src/xpath/tokenizer/helpers.rs", "rank": 48, "score": 56117.88116275047 }, { "content": " fn is_comment_works() {\n\n // arrange\n\n let chars: Vec<char> = \"<!--bean is-nice -->\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_comment(&mut pointer).unwrap();\n\n\n\n // assert\n\n assert_eq!(Token::Comment(String::from(\"bean is-nice \")), result);\n\n assert_eq!(20, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_comment_does_not_move_pointer_if_not_found() {\n\n // arrange\n\n let chars: Vec<char> = \"abcd\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 49, "score": 56117.648233002925 }, { "content": " fn is_literal_works_double_quote() {\n\n // arrange\n\n let chars: Vec<char> = r###\"\"yo\"\"###.chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_literal(&mut pointer, true).unwrap();\n\n\n\n // assert\n\n assert_eq!(Token::Literal(String::from(\"yo\")), result);\n\n assert_eq!(4, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_literal_works_escaped_quote() {\n\n // arrange\n\n let chars: Vec<char> = r###\"\"the cow says \\\"moo\\\".\"\"###.chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 50, "score": 56117.32469138367 }, { "content": "\n\n // act\n\n let result = is_tag_close(&mut pointer).unwrap();\n\n\n\n // assert\n\n assert_eq!(Token::TagClose, result);\n\n assert_eq!(1, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_tag_close_does_not_move_pointer_if_not_found() {\n\n // arrange\n\n let chars: Vec<char> = \"abcd\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_tag_close(&mut pointer);\n\n\n\n // assert\n\n assert_eq!(None, result);\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 51, "score": 56117.31184906229 }, { "content": " let result = is_literal(&mut pointer, true).unwrap();\n\n\n\n // assert\n\n assert_eq!(\n\n Token::Literal(String::from(r#\"the cow says \"moo\".\"#)),\n\n result\n\n );\n\n assert_eq!(23, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_literal_works_single_quote() {\n\n // arrange\n\n let chars: Vec<char> = r###\"'yo'\"###.chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_literal(&mut pointer, true).unwrap();\n\n\n\n // assert\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 52, "score": 56117.295281611725 }, { "content": "use crate::vecpointer::VecPointerRef;\n\n\n\nuse super::Token;\n\n\n\n/// Checks if the [TextPointer](TextPointer) is currently pointing to a StartTag [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\n///\n\n/// StartTag is defined as `<{{String}}`\n\n///\n\n/// Has additional checks to make sure it is not an end tag.\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 53, "score": 56117.09662430029 }, { "content": " // assert\n\n assert_eq!(Token::AssignmentSign, result);\n\n assert_eq!(1, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_assignment_sign_does_not_move_pointer_if_not_found() {\n\n // arrange\n\n let chars: Vec<char> = \"abcd\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_assignment_sign(&mut pointer);\n\n\n\n // assert\n\n assert_eq!(None, result);\n\n assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 54, "score": 56116.95549873421 }, { "content": " assert_eq!(Token::Literal(String::from(\"yo\")), result);\n\n assert_eq!(4, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_literal_does_not_move_pointer_if_not_found() {\n\n // arrange\n\n let chars: Vec<char> = \"abcd\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_literal(&mut pointer, true);\n\n\n\n // assert\n\n assert!(matches!(result, None));\n\n assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_identifier_works() {\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 55, "score": 56116.8783578366 }, { "content": " // assert\n\n assert_eq!(Token::EndTag(String::from(\"c\")), result);\n\n assert_eq!(3, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_end_tag_does_not_move_pointer_if_not_found() {\n\n // arrange\n\n let chars: Vec<char> = \"abcd\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_end_tag(&mut pointer);\n\n\n\n // assert\n\n assert!(matches!(result, None));\n\n assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 56, "score": 56116.8783578366 }, { "content": "\n\n // assert\n\n assert!(matches!(result, None));\n\n assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_text_works() {\n\n // arrange\n\n let chars: Vec<char> = \"foo bar\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_text(&mut pointer, false).unwrap();\n\n\n\n // assert\n\n assert_eq!(Token::Text(String::from(\"foo bar\")), result);\n\n assert_eq!(7, pointer.index);\n\n }\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 57, "score": 56116.789079412854 }, { "content": " assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_tag_close_and_end_works() {\n\n // arrange\n\n let chars: Vec<char> = \"/>\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_tag_close_and_end(&mut pointer).unwrap();\n\n\n\n // assert\n\n assert_eq!(Token::TagCloseAndEnd, result);\n\n assert_eq!(2, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_tag_close_and_end_does_not_move_pointer_if_not_found() {\n\n // arrange\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 58, "score": 56116.72728136603 }, { "content": " let chars: Vec<char> = \"abcd\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_start_tag(&mut pointer);\n\n\n\n // assert\n\n assert!(matches!(result, None));\n\n assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_end_tag_works() {\n\n // arrange\n\n let chars: Vec<char> = \"</c>\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_end_tag(&mut pointer).unwrap();\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 59, "score": 56116.36036522571 }, { "content": " let chars: Vec<char> = \"abcd\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_tag_close_and_end(&mut pointer);\n\n\n\n // assert\n\n assert_eq!(None, result);\n\n assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_assignment_sign_works() {\n\n // arrange\n\n let chars: Vec<char> = \"=\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_assignment_sign(&mut pointer).unwrap();\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 60, "score": 56116.316916358584 }, { "content": "\n\n #[test]\n\n fn is_end_comment_does_not_move_pointer_if_not_found() {\n\n // arrange\n\n let chars: Vec<char> = \"abcd\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_end_comment(&mut pointer);\n\n\n\n // assert\n\n assert_eq!(false, result);\n\n assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_tag_close_works() {\n\n // arrange\n\n let chars: Vec<char> = \">\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 61, "score": 56116.02487393316 }, { "content": " #[test]\n\n fn is_text_not_move_pointer_if_not_found() {\n\n // arrange\n\n let chars: Vec<char> = \"<\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_text(&mut pointer, false);\n\n\n\n // assert\n\n assert!(matches!(result, None));\n\n assert_eq!(0, pointer.index);\n\n }\n\n}\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 62, "score": 56115.69957054576 }, { "content": " let result = is_comment(&mut pointer);\n\n\n\n // assert\n\n assert_eq!(None, result);\n\n assert_eq!(0, pointer.index);\n\n }\n\n\n\n #[test]\n\n fn is_end_comment_works() {\n\n // arrange\n\n let chars: Vec<char> = \"-->\".chars().collect();\n\n let mut pointer = VecPointerRef::new(&chars);\n\n\n\n // act\n\n let result = is_end_comment(&mut pointer);\n\n\n\n // assert\n\n assert_eq!(true, result);\n\n assert_eq!(3, pointer.index);\n\n }\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 63, "score": 56115.59674370644 }, { "content": " else {\n\n text.push(*c);\n\n }\n\n escape = false;\n\n }\n\n None => break,\n\n };\n\n }\n\n\n\n let name: String = text.into_iter().collect();\n\n\n\n pointer.next(); // skip over closing `\"`\n\n\n\n return Some(Token::Literal(name));\n\n }\n\n }\n\n None\n\n}\n\n\n\nlazy_static! {\n\n /// List of characters that end an Identifier [Symbol](Symbol).\n\n static ref INAVLID_ID_CHARS: Vec<char> = vec![' ', '<', '>', '/', '=', '\"'];\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 64, "score": 56115.295507291296 }, { "content": " }\n\n return None;\n\n }\n\n None\n\n}\n\n\n\nlazy_static! {\n\n /// List of characters that end a Text [Symbol](Symbol).\n\n static ref INAVLID_TEXT_CHARS: Vec<char> = vec!['<', '>'];\n\n}\n\n\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 65, "score": 56111.820461335315 }, { "content": " }\n\n None => break,\n\n };\n\n }\n\n\n\n if has_non_whitespace {\n\n let text: String = buffer.into_iter().collect();\n\n return Some(Token::Text(text));\n\n } else {\n\n // roll back pointer\n\n pointer.index = start_index;\n\n return None;\n\n }\n\n }\n\n return None;\n\n }\n\n None\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/html/tokenizer/helpers.rs", "rank": 66, "score": 56110.303577732426 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum XpathElement {\n\n SearchRoot,\n\n SearchAll,\n\n Tag(String),\n\n Query(XpathQuery),\n\n Index(usize),\n\n Axis(XpathAxes),\n\n}\n\n\n\n/// An error occurring during XPath expression parsing.\n\n#[derive(Error, Debug)]\n\npub enum ParseError {\n\n /// Closing bracket appeared before a corresponding open bracket.\n\n /// \n\n /// ```text\n\n /// //div @class=\"node\"]\n\n /// ^\n\n /// └ Missing: Open bracket \"[\"\n\n #[error(\"close square bracket has no matching opening square bracket\")]\n\n LeadingCloseBracket,\n", "file_path": "src/xpath/parse.rs", "rank": 67, "score": 55729.829862874736 }, { "content": "mod tokens;\n\nmod helpers;\n\n\n\nuse crate::vecpointer::VecPointerRef;\n\npub use tokens::Token;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum LexError {}\n\n\n\n/// Tokenize a string of HTML into Symbols used for parsing later on.\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 68, "score": 54877.98407896538 }, { "content": " } else if let Some(s) = helpers::is_tag_close(&mut pointer) {\n\n has_open_tag = false;\n\n symbols.push(s);\n\n } else if let Some(s) = helpers::is_assignment_sign(&mut pointer) {\n\n symbols.push(s);\n\n } else if let Some(s) = helpers::is_literal(&mut pointer, has_open_tag) {\n\n symbols.push(s);\n\n } else if let Some(s) = helpers::is_identifier(&mut pointer, has_open_tag) {\n\n symbols.push(s);\n\n } else if let Some(s) = helpers::is_text(&mut pointer, has_open_tag) {\n\n symbols.push(s);\n\n } else {\n\n if let Some(c) = pointer.current() {\n\n if !c.is_whitespace() {\n\n // Unknown symbol, move on ¯\\_(ツ)_/¯\n\n eprintln!(\"Unknown HTML symbol {}\", c);\n\n }\n\n }\n\n pointer.next();\n\n }\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 72, "score": 54869.26236156178 }, { "content": " }\n\n Ok(symbols)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn lex_should_work_with_single_char_text() {\n\n // arrange\n\n let text = \"<node>1</node>\";\n\n\n\n // act\n\n let result = lex(text).unwrap();\n\n\n\n // assert\n\n let expected = vec![\n\n Token::StartTag(String::from(\"node\")),\n\n Token::TagClose,\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 73, "score": 54865.738304913444 }, { "content": " Token::TagClose,\n\n Token::EndTag(String::from(\"html\")),\n\n Token::TagClose,\n\n ];\n\n\n\n // looping makes debugging much easier than just asserting the entire vectors are equal\n\n for (e, r) in expected.into_iter().zip(result) {\n\n assert_eq!(e, r);\n\n }\n\n }\n\n}\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 74, "score": 54859.61124050843 }, { "content": " Token::TagClose,\n\n Token::StartTag(String::from(\"section\")),\n\n Token::Identifier(String::from(\"id\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"language-values\")),\n\n Token::Identifier(String::from(\"class\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"green\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"div\")),\n\n Token::Identifier(String::from(\"class\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"w-100 mw-none ph3 mw8-m mw9-l center f3\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"header\")),\n\n Token::Identifier(String::from(\"class\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"pb0\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"h2\")),\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 76, "score": 54857.7067120558 }, { "content": " Token::TagClose,\n\n Token::Text(String::from(\n\n r#\"\n\n Why Rust?\n\n \"#,\n\n )),\n\n Token::EndTag(String::from(\"h2\")),\n\n Token::TagClose,\n\n Token::EndTag(String::from(\"header\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"div\")),\n\n Token::Identifier(String::from(\"class\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"flex-none flex-l\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"section\")),\n\n Token::Identifier(String::from(\"class\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"w-100 pv2 pv0-l mt4\")),\n\n Token::TagClose,\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 77, "score": 54857.698603168814 }, { "content": " Token::AssignmentSign,\n\n Token::Literal(String::from(\"viewport\")),\n\n Token::Identifier(String::from(\"content\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"width=device-width,initial-scale=1.0\")),\n\n Token::TagClose,\n\n Token::Comment(String::from(\" Twitter card \")),\n\n Token::StartTag(String::from(\"meta\")),\n\n Token::Identifier(String::from(\"name\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"twitter:card\")),\n\n Token::Identifier(String::from(\"content\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"summary\")),\n\n Token::TagClose,\n\n Token::EndTag(String::from(\"head\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"body\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"main\")),\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 78, "score": 54857.69530090595 }, { "content": " Token::EndTag(String::from(\"p\")),\n\n Token::TagClose,\n\n Token::EndTag(String::from(\"section\")),\n\n Token::TagClose,\n\n Token::EndTag(String::from(\"div\")),\n\n Token::TagClose,\n\n Token::EndTag(String::from(\"div\")),\n\n Token::TagClose,\n\n Token::EndTag(String::from(\"section\")),\n\n Token::TagClose,\n\n Token::EndTag(String::from(\"main\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"script\")),\n\n Token::Identifier(String::from(\"src\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\n\n \"./Rust Programming Language_files/languages.js.download\",\n\n )),\n\n Token::TagCloseAndEnd,\n\n Token::EndTag(String::from(\"body\")),\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 80, "score": 54857.66222734279 }, { "content": " Token::StartTag(String::from(\"!DOCTYPE\")),\n\n Token::Identifier(String::from(\"html\")),\n\n Token::TagClose,\n\n Token::Comment(String::from(\n\n \" saved from url=(0026)https://www.rust-lang.org/ \",\n\n )),\n\n Token::StartTag(String::from(\"html\")),\n\n Token::Identifier(String::from(\"lang\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"en-US\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"head\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"title\")),\n\n Token::TagClose,\n\n Token::Text(String::from(\"Rust Programming Language\")),\n\n Token::EndTag(String::from(\"title\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"meta\")),\n\n Token::Identifier(String::from(\"name\")),\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 81, "score": 54857.64360560623 }, { "content": " Token::StartTag(String::from(\"h3\")),\n\n Token::Identifier(String::from(\"class\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"f2 f1-l\")),\n\n Token::TagClose,\n\n Token::Text(String::from(\"Performance\")),\n\n Token::EndTag(String::from(\"h3\")),\n\n Token::TagClose,\n\n Token::StartTag(String::from(\"p\")),\n\n Token::Identifier(String::from(\"class\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"f3 lh-copy\")),\n\n Token::TagClose,\n\n Token::Text(String::from(\n\n r#\"\n\n Rust is blazingly fast and memory-efficient: with no runtime or\n\n garbage collector, it can power performance-critical services, run on\n\n embedded devices, and easily integrate with other languages.\n\n \"#,\n\n )),\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 82, "score": 54857.55497751194 }, { "content": " Token::StartTag(String::from(\"start-tag\")),\n\n Token::Identifier(String::from(\"id\")),\n\n Token::AssignmentSign,\n\n Token::Literal(String::from(\"bean\")),\n\n Token::TagClose,\n\n Token::Comment(String::from(\"comment\")),\n\n Token::StartTag(String::from(\"inner\")),\n\n Token::TagCloseAndEnd,\n\n Token::Text(String::from(\"hello\")),\n\n Token::EndTag(String::from(\"end-tag\")),\n\n Token::TagClose,\n\n ];\n\n\n\n assert_eq!(expected, result);\n\n }\n\n\n\n #[test]\n\n fn lex_should_work_with_html() {\n\n // arrange\n\n let html = r###\"<!DOCTYPE html>\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 83, "score": 54857.5287117081 }, { "content": " Token::Text(String::from(\"1\")),\n\n Token::EndTag(String::from(\"node\")),\n\n Token::TagClose,\n\n ];\n\n\n\n assert_eq!(expected, result);\n\n }\n\n\n\n #[test]\n\n fn lex_should_handle_attribute_without_value() {\n\n // arrange\n\n let text = \"<script defer></script>\";\n\n\n\n // act\n\n let result = lex(text).unwrap();\n\n\n\n // assert\n\n let expected = vec![\n\n Token::StartTag(String::from(\"script\")),\n\n Token::Identifier(String::from(\"defer\")),\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 88, "score": 54857.09217674579 }, { "content": " Token::AssignmentSign,\n\n Token::Literal(String::from(r#\"{\"hello\":\"world\"}\"#)),\n\n Token::TagClose,\n\n Token::EndTag(String::from(\"script\")),\n\n Token::TagClose,\n\n ];\n\n\n\n assert_eq!(expected, result);\n\n }\n\n\n\n #[test]\n\n fn lex_works() {\n\n // arrange\n\n let text = \"<start-tag id=\\\"bean\\\"><!--comment--><inner/>hello</end-tag>\";\n\n\n\n // act\n\n let result = lex(text).unwrap();\n\n\n\n // assert\n\n let expected = vec![\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 89, "score": 54857.07508592031 }, { "content": " Token::TagClose,\n\n Token::EndTag(String::from(\"script\")),\n\n Token::TagClose,\n\n ];\n\n\n\n assert_eq!(expected, result);\n\n }\n\n\n\n #[test]\n\n fn lex_should_handle_encoded_json() {\n\n // arrange\n\n let text = r###\"<script json='{\"hello\":\"world\"}'></script>\"###;\n\n\n\n // act\n\n let result = lex(text).unwrap();\n\n\n\n // assert\n\n let expected = vec![\n\n Token::StartTag(String::from(\"script\")),\n\n Token::Identifier(String::from(\"json\")),\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 90, "score": 54857.07508592031 }, { "content": " <!-- saved from url=(0026)https://www.rust-lang.org/ -->\n\n <html lang=\"en-US\">\n\n <head>\n\n <title>Rust Programming Language</title>\n\n <meta name=\"viewport\" content=\"width=device-width,initial-scale=1.0\">\n\n \n\n <!-- Twitter card -->\n\n <meta name=\"twitter:card\" content=\"summary\">\n\n </head>\n\n <body>\n\n <main>\n\n <section id=\"language-values\" class=\"green\">\n\n <div class=\"w-100 mw-none ph3 mw8-m mw9-l center f3\">\n\n <header class=\"pb0\">\n\n <h2>\n\n Why Rust?\n\n </h2>\n\n </header>\n\n <div class=\"flex-none flex-l\">\n\n <section class=\"w-100 pv2 pv0-l mt4\">\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 91, "score": 54853.11524465293 }, { "content": " <h3 class=\"f2 f1-l\">Performance</h3>\n\n <p class=\"f3 lh-copy\">\n\n Rust is blazingly fast and memory-efficient: with no runtime or\n\n garbage collector, it can power performance-critical services, run on\n\n embedded devices, and easily integrate with other languages.\n\n </p>\n\n </section>\n\n </div>\n\n </div>\n\n </section>\n\n </main>\n\n <script src=\"./Rust Programming Language_files/languages.js.download\"/>\n\n </body>\n\n </html>\"###;\n\n\n\n // act\n\n let result = lex(html).unwrap();\n\n\n\n // assert\n\n let expected = vec![\n", "file_path": "src/html/tokenizer/mod.rs", "rank": 92, "score": 54853.11524465293 }, { "content": "/// Get all the parents for all the given matched nodes.\n\nfn get_all_parents(document: &HtmlDocument, matched_nodes: &DocumentNodeSet) -> DocumentNodeSet {\n\n let mut parent_nodes = DocumentNodeSet::new();\n\n for node_id in matched_nodes {\n\n if let Some(parent) = node_id.parent(document) {\n\n parent_nodes.insert(parent);\n\n }\n\n }\n\n\n\n parent_nodes\n\n}\n\n\n", "file_path": "src/xpath/mod.rs", "rank": 93, "score": 40964.67014437927 }, { "content": "/// Get all the children for all the given matched nodes.\n\nfn get_all_children(document: &HtmlDocument, matched_nodes: &DocumentNodeSet) -> DocumentNodeSet {\n\n let mut child_nodes = DocumentNodeSet::new();\n\n for node_id in matched_nodes {\n\n let children = node_id.children(document);\n\n child_nodes.insert_all(children);\n\n }\n\n\n\n child_nodes\n\n}\n\n\n", "file_path": "src/xpath/mod.rs", "rank": 94, "score": 40964.67014437927 }, { "content": "fn is_doctype(\n\n tag_name: &String,\n\n tokens: &mut Peekable<std::vec::IntoIter<Token>>,\n\n) -> Result<bool, ParseError> {\n\n if tag_name == \"!DOCTYPE\" {\n\n let token = tokens.next().ok_or(ParseError::UnexpectedEndOfTokens)?;\n\n\n\n if let Token::Identifier(iden) = token {\n\n if iden != \"html\" {\n\n return Err(ParseError::MissingHtmlAfterDoctype);\n\n }\n\n let token = tokens.next().ok_or(ParseError::UnexpectedEndOfTokens)?;\n\n\n\n if !matches!(token, Token::TagClose) {\n\n return Err(ParseError::MissingTagCloseAfterDocstring);\n\n }\n\n } else {\n\n return Err(ParseError::MissingHtmlAfterDoctype);\n\n }\n\n\n\n return Ok(true);\n\n }\n\n\n\n Ok(false)\n\n}\n\n\n", "file_path": "src/html/parse.rs", "rank": 95, "score": 36003.792453019196 }, { "content": "/// Enum representing all possible symbols output by the lexer.\n\n#[derive(Debug, PartialEq)]\n\npub enum Token {\n\n /// `/`\n\n Slash,\n\n\n\n /// `//`\n\n DoubleSlash,\n\n\n\n /// `[`\n\n OpenSquareBracket,\n\n\n\n /// `]`\n\n CloseSquareBracket,\n\n\n\n /// `(`\n\n OpenBracket,\n\n\n\n /// `)`\n\n CloseBracket,\n", "file_path": "src/xpath/tokenizer/tokens.rs", "rank": 96, "score": 35665.217739475134 }, { "content": "/// Enum representing all possible symbols output by the lexer.\n\n#[derive(Debug, PartialEq)]\n\npub enum Token {\n\n /// The start of a new tag. Example: `<{{string}}`.\n\n StartTag(String),\n\n\n\n /// The start of an end tag. Example: `</{{string}}`.\n\n EndTag(String),\n\n\n\n /// End *and* close a tag. Example: `/>`.\n\n TagCloseAndEnd,\n\n\n\n /// End a tag. Example: `>`.\n\n TagClose,\n\n\n\n /// Assignment sign. Example: `=`.\n\n AssignmentSign,\n\n\n\n /// A quoted string literal. Contained string does not include quotes. Example: `\"{{string}}\"`.\n\n Literal(String),\n", "file_path": "src/html/tokenizer/tokens.rs", "rank": 97, "score": 35661.2007630262 }, { "content": "\n\n /// `*`\n\n Wildcard,\n\n\n\n /// `.`\n\n Dot,\n\n\n\n /// `..`\n\n DoubleDot,\n\n\n\n /// `=`\n\n AssignmentSign,\n\n\n\n /// `@`\n\n AtSign,\n\n\n\n /// `-`\n\n MinusSign,\n\n\n\n /// `+`\n", "file_path": "src/xpath/tokenizer/tokens.rs", "rank": 98, "score": 35651.96386465693 }, { "content": "\n\n /// Text contained in tags.\n\n Text(String),\n\n\n\n /// An identifier written in a tag declaration.\n\n Identifier(String),\n\n\n\n /// Xml comments. Example: `<!--{{string}}-->`.\n\n Comment(String),\n\n}\n", "file_path": "src/html/tokenizer/tokens.rs", "rank": 99, "score": 35651.96386465693 } ]
Rust
src/graphics.rs
catid/rust_webgl_demo
b09490c0baba251009b8a9b9bfbf6040b4d43785
use stdweb::unstable::TryInto; use webgl_rendering_context::{ WebGLRenderingContext as WebGL, WebGLUniformLocation, WebGLBuffer, WebGLShader, WebGLProgram, }; use stdweb::web::{ IHtmlElement, IParentNode, document, TypedArray, }; use stdweb::web::html_element::CanvasElement; use glm::{Vec2, Vec3, Quat, Mat4}; use tools::js_log; /* WebGL Context with Right-Handed Projection Matrix */ pub struct Context { canvas: CanvasElement, webgl: WebGL, width: i32, height: i32, projection_matrix: Mat4, } impl Context { pub fn new(element_id: &str) -> Self { let canvas : CanvasElement = document().query_selector(&element_id).unwrap().unwrap().try_into().unwrap(); let webgl : WebGL = canvas.get_context().unwrap(); webgl.enable(WebGL::CULL_FACE); webgl.front_face(WebGL::CCW); webgl.cull_face(WebGL::BACK); webgl.enable(WebGL::DEPTH_TEST); webgl.depth_func(WebGL::LESS); Self { canvas: canvas, webgl: webgl, width: 0, height: 0, projection_matrix: Mat4::identity(), } } pub fn UpdateViewport(&mut self) { let width = self.canvas.offset_width(); let height = self.canvas.offset_height(); if width != self.width || height != self.height { self.canvas.set_width(width as u32); self.canvas.set_height(height as u32); self.webgl.viewport(0, 0, width, height); const fov : f32 = 60.; const near : f32 = 2.; const far : f32 = 20.; self.projection_matrix = glm::perspective_fov_rh_zo( fov.to_radians(), width as f32, height as f32, near, far, ); self.width = width; self.height = height; } } pub fn Clear(&self) { self.webgl.clear_color(0.0, 0.0, 0.0, 1.0); self.webgl.clear_depth(1.0); self.webgl.clear(WebGL::COLOR_BUFFER_BIT | WebGL::DEPTH_BUFFER_BIT); } } /* WebGL Shader Program */ pub struct ShaderProgram { fs: WebGLShader, vs: WebGLShader, webGlProgram: WebGLProgram, } impl ShaderProgram { pub fn new(context: &Context, vsCode: &str, fsCode: &str) -> Self { let webgl = &context.webgl; let vs = webgl.create_shader(WebGL::VERTEX_SHADER).unwrap(); webgl.shader_source(&vs, &vsCode); webgl.compile_shader(&vs); let vs_success : bool = webgl.get_shader_parameter(&vs, WebGL::COMPILE_STATUS).try_into().unwrap(); if !vs_success { let info = webgl.get_shader_info_log(&vs); js_log(format!("CompileShader failed: {}", info.unwrap())); } let fs = webgl.create_shader(WebGL::FRAGMENT_SHADER).unwrap(); webgl.shader_source(&fs, &fsCode); webgl.compile_shader(&fs); let fs_success : bool = webgl.get_shader_parameter(&fs, WebGL::COMPILE_STATUS).try_into().unwrap(); if !fs_success { let info = webgl.get_shader_info_log(&fs); js_log(format!("CompileShader failed: {}", info.unwrap())); } let program = webgl.create_program().unwrap(); webgl.attach_shader(&program, &vs); webgl.attach_shader(&program, &fs); webgl.link_program(&program); let success : bool = webgl.get_program_parameter(&program, WebGL::LINK_STATUS).try_into().unwrap(); if !success { let info = webgl.get_program_info_log(&program); js_log(format!("LinkProgram failed: {}", info.unwrap())); } Self { fs: fs, vs: vs, webGlProgram: program, } } fn GetUniform(&self, context: &Context, name: &str) -> WebGLUniformLocation { context.webgl.get_uniform_location(&self.webGlProgram, name).unwrap() } fn GetAttrib(&self, context: &Context, name: &str) -> u32 { context.webgl.get_attrib_location(&self.webGlProgram, name) as u32 } } /* Cube Renderer */ pub struct Cube { program: ShaderProgram, unifMvpMatrix: WebGLUniformLocation, attrVertexPosition: u32, attrVertexColor: u32, attrVertexNormal: u32, positionVbo: WebGLBuffer, colorVbo: WebGLBuffer, normalVbo: WebGLBuffer, tri_count: i32, } impl Cube { pub fn new(context: &Context) -> Self { let webgl = &context.webgl; let vsCode = include_str!("shaders/flat_vs.glsl"); let fsCode = include_str!("shaders/flat_fs.glsl"); let program = ShaderProgram::new(context, vsCode, fsCode); /* Corner vertices of a cube, oriented x+right, y+top, z+up, centered at 0,0,0, scaled to span from -1 to +1 on each axis. Vertex and side names are based on a perspective looking down. */ let corners = vec![ /* Down-z side of cube */ -1.,-1.,-1., /* LL */ 1.,-1.,-1., /* LR */ 1., 1.,-1., /* UR */ -1., 1.,-1., /* UL */ /* Up+z side of cube */ -1.,-1., 1., /* LL */ 1.,-1., 1., /* LR */ 1., 1., 1., /* UR */ -1., 1., 1., /* UL */ ]; let triIndices : Vec<u8> = vec![ /* Down-z */ 2, 1, 0, 0, 3, 2, /* Up+z */ 4, 5, 6, 6, 7, 4, /* Bottom-y */ 0, 5, 4, 0, 1, 5, /* Top+y */ 3, 7, 6, 3, 6, 2, /* Left-x */ 0, 4, 3, 4, 7, 3, /* Right+x */ 2, 6, 5, 2, 5, 1, ]; let triColors : Vec<u8> = vec![ /* Down-z */ 255,0,200, 255,0,255, /* Up+z */ 200,200,200, 200,200,255, /* Bottom-y */ 100,200,100, 100,255,100, /* Top+y */ 200,200,100, 200,255,100, /* Left-x */ 200,0,0, 255,0,0, /* Right+x */ 0,200,0, 0,255,0, ]; let tri_count = triIndices.len() / 3; let mut vertices = Vec::with_capacity(tri_count * 3); let mut colors = Vec::with_capacity(tri_count * 3); let mut normals = Vec::with_capacity(tri_count * 3); for i in 0..tri_count { let triIndicesOffset = i * 3; let mut triVertices : [Vec3; 3] = unsafe { std::mem::uninitialized() }; for j in 0..3 { let vertexIndex = triIndices[triIndicesOffset + j]; let cornersOffset = vertexIndex as usize * 3; let scale = 1.0f32; let x = corners[cornersOffset] * scale; let y = corners[cornersOffset + 1] * scale; let z = corners[cornersOffset + 2] * scale; triVertices[j] = glm::vec3(x, y, z); vertices.push(x); vertices.push(y); vertices.push(z); } let normal = glm::triangle_normal( &triVertices[0], &triVertices[1], &triVertices[2] ); for _j in 0..3 { normals.push(normal.x); normals.push(normal.y); normals.push(normal.z); } let colorOffset = i as usize * 3; let r = triColors[colorOffset]; let g = triColors[colorOffset + 1]; let b = triColors[colorOffset + 2]; for _j in 0..3 { colors.push(r); colors.push(g); colors.push(b); } } let webVertices = TypedArray::<f32>::from(vertices.as_slice()).buffer(); let webColors = TypedArray::<u8>::from(colors.as_slice()).buffer(); let webNormals = TypedArray::<f32>::from(normals.as_slice()).buffer(); let positionVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&positionVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webVertices), WebGL::STATIC_DRAW); let colorVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&colorVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webColors), WebGL::STATIC_DRAW); let normalVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&normalVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webNormals), WebGL::STATIC_DRAW); let unifMvpMatrix = program.GetUniform(&context, "MVPMatrix"); let attrVertexPosition = program.GetAttrib(&context, "VertexPosition"); let attrVertexColor = program.GetAttrib(&context, "VertexColor"); let attrVertexNormal = program.GetAttrib(&context, "VertexNormal"); Self { program: program, unifMvpMatrix: unifMvpMatrix, attrVertexPosition: attrVertexPosition, attrVertexColor: attrVertexColor, attrVertexNormal: attrVertexNormal, positionVbo: positionVbo, colorVbo: colorVbo, normalVbo: normalVbo, tri_count: tri_count as i32, } } pub fn DrawMultiple(&mut self, context: &Context, mvp_matrices: &Vec<Mat4>) { let webgl = &context.webgl; webgl.use_program(Some(&self.program.webGlProgram)); if self.attrVertexPosition != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.positionVbo)); webgl.vertex_attrib_pointer(self.attrVertexPosition, 3, WebGL::FLOAT, false, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexPosition); } if self.attrVertexColor != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.colorVbo)); webgl.vertex_attrib_pointer(self.attrVertexColor, 3, WebGL::UNSIGNED_BYTE, true, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexColor); } if self.attrVertexNormal != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.normalVbo)); webgl.vertex_attrib_pointer(self.attrVertexNormal, 3, WebGL::FLOAT, false, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexNormal); } for mat in mvp_matrices { webgl.uniform_matrix4fv(Some(&self.unifMvpMatrix), false, mat.as_slice()); webgl.draw_arrays(WebGL::TRIANGLES, 0, self.tri_count * 3); } } pub fn Draw(&mut self, context: &Context, mvp_matrix: Mat4) { let matrices = vec![ mvp_matrix ]; self.DrawMultiple(context, &matrices); } } /* Graphics Subsystem State */ pub struct GraphicsState { context: Context, cube: Cube, positions: Vec<Vec2>, } impl GraphicsState { pub fn new() -> Self { let context = Context::new("#canvas"); let cube = Cube::new(&context); let positions = vec![ glm::vec2(-2.0f32, 0.0f32), glm::vec2(0.0f32, 0.0f32), glm::vec2(2.0f32, 0.0f32), glm::vec2(0.0f32, -2.0f32), glm::vec2(0.0f32, 2.0f32), ]; Self { context: context, cube: cube, positions: positions, } } pub fn RenderScene(&mut self, nowSeconds: f64) { self.context.UpdateViewport(); self.context.Clear(); let eye = glm::vec3(0.0, 0.0, -10.0); let center = glm::vec3(0.0, 0.0, 0.0); let up = glm::vec3(0.0, 1.0, 0.0); let view_matrix = glm::look_at_rh( &eye, &center, &up, ); let proj_view_matrix = self.context.projection_matrix * view_matrix; let angle = glm::modf(nowSeconds / 1000.0f64, glm::two_pi()) as f32; let mut mvp_matrices = vec![]; for position in &self.positions { /* This will scale the whole object, rotate the whole object. Translation is applied to each render within the object frame. mvp = view * translate(rm * sm) Translating the scale matrix will scale the whole thing down. Scaling the translate matrix will scale each object down. */ let translate_matrix = glm::translate(&glm::identity(), &glm::vec3(position.x, position.y, 0.0f32) ); let scale_matrix = glm::scale( &translate_matrix, &glm::vec3(0.5, 0.5, 0.5) ); let quat_angle = &nalgebra_glm::quat_angle_axis( angle, &glm::vec3(1.0, 1.0, 1.0) ); let rotate_matrix = scale_matrix * glm::quat_to_mat4(quat_angle); let mvp = proj_view_matrix * rotate_matrix; mvp_matrices.push(mvp); } self.cube.DrawMultiple(&self.context, &mvp_matrices); } }
use stdweb::unstable::TryInto; use webgl_rendering_context::{ WebGLRenderingContext as WebGL, WebGLUniformLocation, WebGLBuffer, WebGLShader, WebGLProgram, }; use stdweb::web::{ IHtmlElement, IParentNode, document, TypedArray, }; use stdweb::web::html_element::CanvasElement; use glm::{Vec2, Vec3, Quat, Mat4}; use tools::js_log; /* WebGL Context with Right-Handed Projection Matrix */ pub struct Context { canvas: CanvasElement, webgl: WebGL, width: i32, height: i32, projection_matrix: Mat4, } impl Context { pub fn new(element_id: &str) -> Self { let canvas : CanvasElement = document().query_selector(&element_id).unwrap().unwrap().try_into().unwrap(); let webgl : WebGL = canvas.get_context().unwrap(); webgl.enable(WebGL::CULL_FACE); webgl.front_face(WebGL::CCW); webgl.cull_face(WebGL::BACK); webgl.enable(WebGL::DEPTH_TEST); webgl.depth_func(WebGL::LESS); Self { canvas: canvas, webgl: webgl, width: 0, height: 0, projection_matrix: Mat4::identity(), } } pub fn UpdateViewport(&mut self) { let width = self.canvas.offset_width(); let height = self.canvas.offset_height(); if width != self.width || height != self.height { self.canvas.set_width(width as u32); self.canvas.set_height(height as u32); self.webgl.viewport(0, 0, width, height); const fov : f32 = 60.; const near : f32 = 2.; const far : f32 = 20.; self.projection_matrix = glm::perspective_fov_rh_zo( fov.to_radians(), width as f32, height as f32, near, far, ); self.width = width; self.height = height; } } pub fn Clear(&self) { self.webgl.clear_color(0.0, 0.0, 0.0, 1.0); self.webgl.clear_depth(1.0); self.webgl.clear(WebGL::COLOR_BUFFER_BIT | WebGL::DEPTH_BUFFER_BIT); } } /* WebGL Shader Program */ pub struct ShaderProgram { fs: WebGLShader, vs: WebGLShader, webGlProgram: WebGLProgram, } impl ShaderProgram { pub fn new(context: &Context, vsCode: &str, fsCode: &str) -> Self { let webgl = &context.webgl; let vs = webgl.create_shader(WebGL::VERTEX_SHADER).unwrap(); webgl.shader_source(&vs, &vsCode); webgl.compile_shader(&vs); let vs_success : bool = webgl.get_shader_parameter(&vs, WebGL::COMPILE_STATUS).try_into().unwrap(); if !vs_success { let info = webgl.get_shader_info_log(&vs); js_log(format!("CompileShader failed: {}", info.unwrap())); } let fs = webgl.create_shader(WebGL::FRAGMENT_SHADER).unwrap(); webgl.shader_source(&fs, &fsCode); webgl.compile_shader(&fs); let fs_success : bool = webgl.get_shader_parameter(&fs, WebGL::COMPILE_STATUS).try_into().unwrap(); if !fs_success { let info = webgl.get_shader_info_log(&fs); js_log(format!("CompileShader failed: {}", info.unwrap())); } let program = webgl.create_program().unwrap(); webgl.attach_shader(&program, &vs); webgl.attach_shader(&program, &fs); webgl.link_program(&program); let success : bool = webgl.get_program_parameter(&program, WebGL::LINK_STATUS).try_into().unwrap(); if !success { let info = webgl.get_program_info_log(&program); js_log(format!("LinkProgram failed: {}", info.unwrap())); } Self { fs: fs, vs: vs, webGlProgram: program, } } fn GetUniform(&self, context: &Context, name: &str) -> WebGLUniformLocation { context.webgl.get_uniform_location(&self.webGlProgram, name).unwrap() } fn GetAttrib(&self, context: &Context, name: &str) -> u32 { context.webgl.get_attrib_location(&self.webGlProgram, name) as u32 } } /* Cube Renderer */ pub struct Cube { program: ShaderProgram, unifMvpMatrix: WebGLUniformLocation, attrVertexPosition: u32, attrVertexColor: u32, attrVertexNormal: u32, positionVbo: WebGLBuffer, colorVbo: WebGLBuffer, normalVbo: WebGLBuffer, tri_count: i32, } impl Cube { pub fn new(context: &Context) -> Self { let webgl = &context.webgl; let vsCode = include_str!("shaders/flat_vs.glsl"); let fsCode = include_str!("shaders/flat_fs.glsl"); let program = ShaderProgram::new(context, vsCode, fsCode); /* Corner vertices of a cube, oriented x+right, y+top, z+up, centered at 0,0,0, scaled to span from -1 to +1 on each axis. Vertex and side names are based on a perspective looking down. */ let corners = vec![ /* Down-z side of cube */ -1.,-1.,-1., /* LL */ 1.,-1.,-1., /* LR */ 1., 1.,-1., /* UR */ -1., 1.,-1., /* UL */ /* Up+z side of cube */ -1.,-1., 1., /* LL */ 1.,-1., 1., /* LR */ 1., 1., 1., /* UR */ -1., 1., 1., /* UL */ ]; let triIndices : Vec<u8> = vec![ /* Down-z */ 2, 1, 0, 0, 3, 2, /* Up+z */ 4, 5, 6, 6, 7, 4, /* Bottom-y */ 0, 5, 4, 0, 1, 5, /* Top+y */ 3, 7, 6, 3, 6, 2, /* Left-x */ 0, 4, 3, 4, 7, 3, /* Right+x */ 2, 6, 5, 2, 5, 1, ]; let triColors : Vec<u8> = vec![ /* Down-z */ 255,0,200, 255,0,255, /* Up+z */ 200,200,200, 200,200,255, /* Bottom-y */ 100,200,100, 100,255,100, /* Top+y */ 200,200,100, 200,255,100, /* Left-x */ 200,0,0, 255,0,0, /* Right+x */ 0,200,0, 0,255,0, ]; let tri_count = triIndices.len() / 3; let mut vertices = Vec::with_capacity(tri_count * 3); let mut colors = Vec::with_capacity(tri_count * 3); let mut normals = Vec::with_capacity(tri_count * 3); for i in 0..tri_count { let triIndicesOffset = i * 3; let mut triVertices : [Vec3; 3] = unsafe { std::mem::uninitialized() }; for j in 0..3 { let vertexIndex = triIndices[triIndicesOffset + j]; let cornersOffset = vertexIndex as usize * 3; let scale = 1.0f32; let x = corners[cornersOffset] * scale; let y = corners[cornersOffset + 1] * scale; let z = corners[cornersOffset + 2] * scale; triVertices[j] = glm::vec3(x, y, z); vertices.push(x); vertices.push(y); vertices.push(z); } let normal = glm::triangle_normal( &triVertices[0], &triVertices[1], &triVertices[2] ); for _j in 0..3 { normals.push(normal.x); normals.push(normal.y); normals.push(normal.z); } let colorOffset = i as usize * 3; let r = triColors[colorOffset]; let g = triColors[colorOffset + 1]; let b = triColors[colorOffset + 2]; for _j in 0..3 { colors.push(r); colors.push(g); colors.push(b); } } let webVertices = TypedArray::<f32>::from(vertices.as_slice()).buffer(); let webColors = TypedArray::<u8>::from(colors.as_slice()).buffer(); let webNormals = TypedArray::<f32>::from(normals.as_slice()).buffer(); let positionVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&positionVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webVertices), WebGL::STATIC_DRAW); let colorVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&colorVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webColors), WebGL::STATIC_DRAW); let normalVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&normalVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webNormals), WebGL::STATIC_DRAW); let unifMvpMatrix = program.GetUniform(&context, "MVPMatrix"); let attrVertexPosition = program.GetAttrib(&context, "VertexPosition"); let attrVertexColor = program.GetAttrib(&context, "VertexColor"); let attrVertexNormal = program.GetAttrib(&context, "VertexNormal"); Self { program: program, unifMvpMatrix: unifMvpMatrix, attrVertexPosition: attrVertexPosition, attrVertexColor: attrVertexColor, attrVertexNormal: attrVertexNormal, positionVbo: positionVbo, colorVbo: colorVbo, normalVbo: normalVbo, tri_count: tri_count as i32, } } pub fn DrawMultiple(&mut self, context: &Context, mvp_matrices: &Vec<Mat4>) { let webgl = &context.webgl; webgl.use_program(Some(&self.program.webGlProgram)); if self.attrVertexPosition != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.positionVbo)); webgl.vertex_attrib_pointer(self.attrVertexPosition, 3, WebGL::FLOAT, false, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexPosition); } if self.attrVertexColor != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.colorVbo)); webgl.vertex_attrib_pointer(self.attrVertexColor, 3, WebGL::UNSIGNED_BYTE, true, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexColor); } if self.attrVertexNormal != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.normalVbo)); webgl.vertex_attrib_pointer(self.attrVertexNormal, 3, WebGL::FLOAT, false, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexNormal); } for mat in mvp_matrices { webgl.uniform_matrix4fv(Some(&self.unifMvpMatrix), false, mat.as_slice()); webgl.draw_arrays(WebGL::TRIANGLES, 0, self.tri_count * 3); } } pub fn Draw(&mut self, context: &Context, mvp_matrix: Mat4) { let matrices = vec![ mvp_matrix ]; self.DrawMultiple(context, &matrices); } } /* Graphics Subsystem State */ pub struct GraphicsState { context: Context, cube: Cube, positions: Vec<Vec2>, } impl GraphicsState { pub fn new() -> Self { let context = Context::new("#canvas"); let cube = Cube::new(&context); let positions = vec![ glm::vec2(-2.0f32, 0.0f32), glm::vec2(0.0f32, 0.0f32), glm::vec2(2.0f32, 0.0f32), glm::vec2(0.0f32, -2.0f32), glm::vec2(0.0f32, 2.0f32), ]; Self { context: context, cube: cube, positions: positions, } }
}
pub fn RenderScene(&mut self, nowSeconds: f64) { self.context.UpdateViewport(); self.context.Clear(); let eye = glm::vec3(0.0, 0.0, -10.0); let center = glm::vec3(0.0, 0.0, 0.0); let up = glm::vec3(0.0, 1.0, 0.0); let view_matrix = glm::look_at_rh( &eye, &center, &up, ); let proj_view_matrix = self.context.projection_matrix * view_matrix; let angle = glm::modf(nowSeconds / 1000.0f64, glm::two_pi()) as f32; let mut mvp_matrices = vec![]; for position in &self.positions { /* This will scale the whole object, rotate the whole object. Translation is applied to each render within the object frame. mvp = view * translate(rm * sm) Translating the scale matrix will scale the whole thing down. Scaling the translate matrix will scale each object down. */ let translate_matrix = glm::translate(&glm::identity(), &glm::vec3(position.x, position.y, 0.0f32) ); let scale_matrix = glm::scale( &translate_matrix, &glm::vec3(0.5, 0.5, 0.5) ); let quat_angle = &nalgebra_glm::quat_angle_axis( angle, &glm::vec3(1.0, 1.0, 1.0) ); let rotate_matrix = scale_matrix * glm::quat_to_mat4(quat_angle); let mvp = proj_view_matrix * rotate_matrix; mvp_matrices.push(mvp); } self.cube.DrawMultiple(&self.context, &mvp_matrices); }
function_block-full_function
[ { "content": "#[js_export]\n\nfn js_ontouch(x: i32, y: i32, w: i32, h: i32) {\n\n let norm_x = x as f32 / w as f32;\n\n let norm_y = y as f32 / h as f32;\n\n // FIXME: Convert x, y to normalized coordinates between -1..1\n\n js_log(format!(\"Tap at {}, {}\", norm_x, norm_y));\n\n}\n\n\n\nimpl InputState {\n\n pub fn new() -> InputState {\n\n InputState {\n\n }\n\n }\n\n\n\n pub fn CheckFingerTips(&mut self, _nowSeconds: f64) {\n\n\n\n }\n\n}\n\n//#[cfg(feature=\"jsexports\")]\n", "file_path": "src/input.rs", "rank": 0, "score": 70893.46920016888 }, { "content": "// Buffer an audio buffer sample to the given channel\n\nfn js_play_buffer(js_ctx: &stdweb::Value, sample_buffer: &Vec<f32>) {\n\n js! {\n\n var h = @{js_ctx};\n\n var samples = @{unsafe { stdweb::UnsafeTypedArray::new(sample_buffer) }};\n\n\n\n var sample_count = samples.length;\n\n var sample_rate = 48000;\n\n\n\n var audio_buffer = h.audio.createBuffer(1, sample_count, sample_rate);\n\n\n\n audio_buffer.getChannelData(0).set(samples);\n\n\n\n var node = h.audio.createBufferSource();\n\n node.connect(h.audio.destination);\n\n node.buffer = audio_buffer;\n\n\n\n var latency = 0.1;\n\n var play_timestamp = h.audio.currentTime + latency;\n\n node.start(play_timestamp);\n\n }\n", "file_path": "src/audio.rs", "rank": 1, "score": 59522.20167458012 }, { "content": "#![allow(dead_code, unused_parens, unused_imports)]\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/webgl_rendering_context.rs\"));\n", "file_path": "src/webgl_rendering_context.rs", "rank": 2, "score": 55356.47249434387 }, { "content": "pub fn js_log(message: String) {\n\n js! {\n\n console.log(@{message});\n\n };\n\n}\n", "file_path": "src/tools.rs", "rank": 3, "score": 50491.17021016144 }, { "content": "fn render_loop(looper: Rc<RefCell<GameLoop>>) {\n\n stdweb::web::window().request_animation_frame(move |nowSeconds: f64| {\n\n {\n\n let mut mlooper = looper.borrow_mut();\n\n mlooper.inst_graphics.RenderScene(nowSeconds);\n\n mlooper.inst_audio.PlayBleepsAndBloops(nowSeconds);\n\n mlooper.inst_input.CheckFingerTips(nowSeconds);\n\n }\n\n render_loop(looper);\n\n });\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 35061.85523204844 }, { "content": "struct GameLoop {\n\n inst_graphics: graphics::GraphicsState,\n\n inst_audio: audio::AudioState,\n\n inst_input: input::InputState,\n\n}\n\n\n\nimpl GameLoop {\n\n fn new() -> GameLoop {\n\n GameLoop {\n\n inst_audio: audio::AudioState::new(),\n\n inst_graphics: graphics::GraphicsState::new(),\n\n inst_input: input::InputState::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 33522.201292530626 }, { "content": "fn main() {\n\n let dest = env::var(\"OUT_DIR\").unwrap();\n\n let mut file = File::create(&Path::new(&dest).join(\"webgl_rendering_context.rs\")).unwrap();\n\n\n\n Registry::new(Api::WebGl2, Exts::NONE)\n\n .write_bindings(StdwebGenerator, &mut file)\n\n .unwrap();\n\n}\n", "file_path": "build.rs", "rank": 6, "score": 32491.767499142974 }, { "content": "fn main() {\n\n stdweb::initialize();\n\n\n\n let looper = Rc::new(RefCell::new(GameLoop::new()));\n\n\n\n render_loop(looper);\n\n\n\n stdweb::event_loop();\n\n}\n", "file_path": "src/main.rs", "rank": 7, "score": 31199.307608447038 }, { "content": "var canvas = document.getElementById('canvas');\n", "file_path": "src/runtime.js", "rank": 8, "score": 29938.790247937384 }, { "content": "BSD 3-Clause License\n\n\n\nCopyright (c) 2019, Christopher A. Taylor\n\nAll rights reserved.\n\n\n\nRedistribution and use in source and binary forms, with or without\n\nmodification, are permitted provided that the following conditions are met:\n\n\n\n* Redistributions of source code must retain the above copyright notice, this\n\n list of conditions and the following disclaimer.\n\n\n\n* Redistributions in binary form must reproduce the above copyright notice,\n\n this list of conditions and the following disclaimer in the documentation\n\n and/or other materials provided with the distribution.\n\n\n\n* Neither the name of the copyright holder nor the names of its\n\n contributors may be used to endorse or promote products derived from\n\n this software without specific prior written permission.\n\n\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n", "file_path": "LICENSE.md", "rank": 9, "score": 23051.92285217715 }, { "content": "## Rust Web Game\n\n\n\nThis is a hello world example of implementing a Web Assembly Triangle demo in Rust.\n\n\n\nIt uses Rust crates: cargo-web, stdweb\n\n\n\n## Setup\n\n\n\nThis project depends on Python and Rust. Basically: `setup.py install`\n\n\n\nSteps to set up and run the example:\n\n\n\n* Install Python 2.7: https://www.python.org/downloads/\n\n* Add Python executable to the PATH.\n\n* Install the Rust toolkit: https://www.rust-lang.org/learn/get-started\n\n* [Windows]: I recommend Cmder ( http://cmder.net/ ) instead of the Windows Console.\n\n* Clone this repo: `git clone [email protected]:catid/rustwebgame.git`\n\n* Run `setup.py install`. This will install some Python dependencies and my scripts.\n\n* Run `cargo web start --auto-reload`\n\n\n\nPoint your web browser at http://127.0.0.1:8080/\n", "file_path": "README.md", "rank": 10, "score": 23050.205085903333 }, { "content": "// This file is prepended to the Javascript produced by Cargo-Web\n\n\n\nvar canvas = document.getElementById('canvas');\n\n\n\nfunction invoke_js_on_touch(canvas_x, canvas_y) {\n\n var width = canvas.offsetWidth, height = canvas.offsetHeight;\n\n if (width <= 0 || height <= 0) {\n\n return;\n\n }\n\n var gfx_x = canvas_x - canvas.offsetLeft;\n\n var gfx_y = canvas_y - canvas.offsetTop;\n\n Module.exports.js_ontouch(gfx_x, gfx_y, width, height);\n\n}\n\n\n\n// Handle taps:\n\n\n\ncanvas.addEventListener(\"touchstart\", onTouch, false);\n\ncanvas.addEventListener(\"touchmove\", onTouch, false);\n\n\n\nfunction onTouch(ev) {\n\n var len = ev.changedTouches.length;\n\n for (var i = 0; i < len; i++) {\n\n var touch = ev.changedTouches.item(i);\n\n invoke_js_on_touch(touch.clientX, touch.clientY);\n\n }\n\n}\n\n\n\ncanvas.addEventListener(\"pointerdown\", onPointer, false);\n\ncanvas.addEventListener(\"pointermove\", onPointer, false);\n\n\n\nfunction onPointer(ev) {\n\n invoke_js_on_touch(ev.clientX, ev.clientY);\n\n}\n\n\n\ncanvas.addEventListener(\"mousedown\", onMouse, false);\n\ncanvas.addEventListener(\"mousemove\", onMouse, false);\n\n\n\nfunction onMouse(ev) {\n\n invoke_js_on_touch(ev.clientX, ev.clientY);\n\n}\n", "file_path": "src/runtime.js", "rank": 11, "score": 22225.415724242 }, { "content": "#!/usr/bin/env python2\n\n\n\nimport setuptools, os, subprocess\n\n\n\nsetup_file = os.path.realpath(__file__)\n\nbase_dir = os.path.dirname(setup_file)\n\nrequirements_file = \"{}/requirements.txt\".format(base_dir)\n\n\n\n# Parse requirements from requirements.txt\n\ninstall_requires = []\n\nwith open(requirements_file) as file:\n\n install_requires = file.read().splitlines()\n\n\n\nprint 'Loaded requirements: {}'.format(install_requires)\n\n\n\nsetuptools.setup(\n\n name = 'RustWebGame',\n\n version = '0.1.0',\n\n description = 'Hello World for Rust Web Assembly',\n\n\n\n author = 'Christopher A. Taylor',\n\n author_email = '[email protected]',\n\n url = 'https://github.com/catid/rustwebgame',\n\n classifiers = [\n\n 'Programming Language :: Rust',\n\n 'License :: BSD3',\n\n 'Operating System :: OS Independent',\n\n 'Intended Audience :: Developers',\n\n ],\n\n\n\n install_requires = install_requires,\n\n\n\n packages = [], #setuptools.find_packages(),\n\n)\n\n\n\n# Install Rust dependencies\n\ndef setup_rust():\n\n print \"Configuring Rust...\"\n\n rustup_cmd = \"rustup target add wasm32-unknown-unknown\"\n\n print \" * Running: {}\".format(rustup_cmd)\n\n retval = subprocess.call(rustup_cmd, shell=True, cwd=base_dir)\n\n print \" * Rustup returned: {}\".format(retval)\n\n\n\n cargo_cmd = \"cargo install cargo-web\"\n\n print \" * Running: {}\".format(cargo_cmd)\n\n retval = subprocess.call(cargo_cmd, shell=True, cwd=base_dir)\n\n if retval != 0:\n\n print \" * Cargo install failed - This may be a warning if the package is already installed\"\n\n print \" * Cargo install returned: {}\".format(retval)\n\n\n\nsetup_rust()\n\n\n\nprint \"Success! To rebuild and host a web server:\"\n\nprint \"\"\n\nprint \" cargo web start --auto-reload\"\n\nprint \"\"\n", "file_path": "setup.py", "rank": 34, "score": 16206.015789784968 }, { "content": "def setup_rust():\n\n print \"Configuring Rust...\"\n\n rustup_cmd = \"rustup target add wasm32-unknown-unknown\"\n\n print \" * Running: {}\".format(rustup_cmd)\n\n retval = subprocess.call(rustup_cmd, shell=True, cwd=base_dir)\n\n print \" * Rustup returned: {}\".format(retval)\n\n\n\n cargo_cmd = \"cargo install cargo-web\"\n\n print \" * Running: {}\".format(cargo_cmd)\n\n retval = subprocess.call(cargo_cmd, shell=True, cwd=base_dir)\n\n if retval != 0:\n\n print \" * Cargo install failed - This may be a warning if the package is already installed\"\n", "file_path": "setup.py", "rank": 35, "score": 14662.400094594383 }, { "content": "}\n\n\n\n\n\npub struct AudioState {\n\n js_ctx: stdweb::Value,\n\n old_timestamp: f64,\n\n beep_a: Vec<f32>,\n\n beep_b: Vec<f32>,\n\n beep_c: Vec<f32>,\n\n}\n\n\n\nimpl AudioState {\n\n pub fn new() -> AudioState {\n\n let element_audio = js! {\n\n return {\n\n audio: new AudioContext()\n\n };\n\n };\n\n\n\n let signal0 = sample::signal::rate(SAMPLE_HZ).const_hz(300.0).square().scale_amp(0.05);\n", "file_path": "src/audio.rs", "rank": 36, "score": 13.52943346576379 }, { "content": " AudioState {\n\n js_ctx: element_audio,\n\n old_timestamp: 0.0,\n\n beep_a: buffer_a,\n\n beep_b: buffer_b,\n\n beep_c: buffer_c,\n\n }\n\n }\n\n\n\n fn play(&mut self, note: u32) {\n\n let beep: &Vec<f32>;\n\n\n\n match note {\n\n 0 => beep = &self.beep_a,\n\n 1 => beep = &self.beep_b,\n\n _ => beep = &self.beep_c,\n\n }\n\n\n\n js_play_buffer(\n\n &self.js_ctx,\n\n beep);\n\n }\n\n\n\n pub fn PlayBleepsAndBloops(&mut self, _nowSeconds: f64) {\n\n // FIXME\n\n }\n\n}\n", "file_path": "src/audio.rs", "rank": 37, "score": 7.795225555591871 }, { "content": "extern crate webgl_generator;\n\n\n\nuse webgl_generator::*;\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::path::*;\n\n\n", "file_path": "build.rs", "rank": 38, "score": 6.7760252835085915 }, { "content": "#![recursion_limit=\"256\"]\n\n\n\n#[macro_use]\n\nextern crate stdweb;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n#[macro_use]\n\nextern crate stdweb_derive;\n\n\n\nextern crate nalgebra_glm as glm;\n\nextern crate sample;\n\n\n\nextern crate specs;\n\n\n\nmod webgl_rendering_context;\n\nmod input;\n\nmod graphics;\n\nmod audio;\n\nmod tools;\n\n\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\n\n", "file_path": "src/main.rs", "rank": 39, "score": 6.711334046853638 }, { "content": "use tools::js_log;\n\n\n\npub struct InputState {\n\n // Nothing here yet.\n\n}\n\n\n\n#[js_export]\n", "file_path": "src/input.rs", "rank": 40, "score": 6.061275292318207 }, { "content": " let signal1 = sample::signal::rate(SAMPLE_HZ).const_hz(400.0).square().scale_amp(0.05);\n\n let signal2 = sample::signal::rate(SAMPLE_HZ).const_hz(500.0).sine().scale_amp(0.05);\n\n let buffer_a: Vec<_> = signal0.add_amp(signal1).add_amp(signal2)\n\n .take(BEEP_SAMPLES)\n\n .map(|x|{x[0] as f32}).collect();\n\n\n\n let signal3 = sample::signal::rate(SAMPLE_HZ).const_hz(400.0).square().scale_amp(0.05);\n\n let signal4 = sample::signal::rate(SAMPLE_HZ).const_hz(500.0).square().scale_amp(0.05);\n\n let signal5 = sample::signal::rate(SAMPLE_HZ).const_hz(600.0).sine().scale_amp(0.05);\n\n let buffer_b: Vec<_> = signal3.add_amp(signal4).add_amp(signal5)\n\n .take(BEEP_SAMPLES)\n\n .map(|x|{x[0] as f32}).collect();\n\n\n\n let signal6 = sample::signal::rate(SAMPLE_HZ).const_hz(500.0).square().scale_amp(0.05);\n\n let signal7 = sample::signal::rate(SAMPLE_HZ).const_hz(600.0).square().scale_amp(0.05);\n\n let signal8 = sample::signal::rate(SAMPLE_HZ).const_hz(700.0).sine().scale_amp(0.05);\n\n let buffer_c: Vec<_> = signal6.add_amp(signal7).add_amp(signal8)\n\n .take(BEEP_SAMPLES)\n\n .map(|x|{x[0] as f32}).collect();\n\n\n", "file_path": "src/audio.rs", "rank": 41, "score": 4.773738729744022 }, { "content": "use sample::Signal;\n\n\n\n\n\nconst SAMPLE_RATE: usize = 48_000;\n\nconst SAMPLE_HZ: f64 = SAMPLE_RATE as f64;\n\nconst BEEP_SAMPLES: usize = SAMPLE_RATE/10;\n\n\n\n\n\n// Buffer an audio buffer sample to the given channel\n", "file_path": "src/audio.rs", "rank": 42, "score": 3.920262910387078 } ]
Rust
crate/divvunspell/src/ffi/fbs/tokenizer.rs
divvun/divvunspell-swift
9231ffb655752c3c3ad646216abd9aff3ec02ef0
#![allow(dead_code, unused_imports)] use std::cmp::Ordering; use std::mem; extern crate flatbuffers; use self::flatbuffers::EndianScalar; pub enum IndexedWordOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct IndexedWord<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for IndexedWord<'a> { type Inner = IndexedWord<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf: buf, loc: loc }, } } } impl<'a> IndexedWord<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { IndexedWord { _tab: table } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args IndexedWordArgs<'args>, ) -> flatbuffers::WIPOffset<IndexedWord<'bldr>> { let mut builder = IndexedWordBuilder::new(_fbb); builder.add_index(args.index); if let Some(x) = args.value { builder.add_value(x); } builder.finish() } pub const VT_INDEX: flatbuffers::VOffsetT = 4; pub const VT_VALUE: flatbuffers::VOffsetT = 6; #[inline] pub fn index(&self) -> u64 { self._tab .get::<u64>(IndexedWord::VT_INDEX, Some(0)) .unwrap() } #[inline] pub fn value(&self) -> Option<&'a str> { self._tab .get::<flatbuffers::ForwardsUOffset<&str>>(IndexedWord::VT_VALUE, None) } } pub struct IndexedWordArgs<'a> { pub index: u64, pub value: Option<flatbuffers::WIPOffset<&'a str>>, } impl<'a> Default for IndexedWordArgs<'a> { #[inline] fn default() -> Self { IndexedWordArgs { index: 0, value: None, } } } pub struct IndexedWordBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> IndexedWordBuilder<'a, 'b> { #[inline] pub fn add_index(&mut self, index: u64) { self.fbb_.push_slot::<u64>(IndexedWord::VT_INDEX, index, 0); } #[inline] pub fn add_value(&mut self, value: flatbuffers::WIPOffset<&'b str>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<_>>(IndexedWord::VT_VALUE, value); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> IndexedWordBuilder<'a, 'b> { let start = _fbb.start_table(); IndexedWordBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<IndexedWord<'a>> { let o = self.fbb_.end_table(self.start_); flatbuffers::WIPOffset::new(o.value()) } } pub enum WordContextOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct WordContext<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for WordContext<'a> { type Inner = WordContext<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf: buf, loc: loc }, } } } impl<'a> WordContext<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { WordContext { _tab: table } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args WordContextArgs<'args>, ) -> flatbuffers::WIPOffset<WordContext<'bldr>> { let mut builder = WordContextBuilder::new(_fbb); if let Some(x) = args.second_after { builder.add_second_after(x); } if let Some(x) = args.first_after { builder.add_first_after(x); } if let Some(x) = args.second_before { builder.add_second_before(x); } if let Some(x) = args.first_before { builder.add_first_before(x); } if let Some(x) = args.current { builder.add_current(x); } builder.finish() } pub const VT_CURRENT: flatbuffers::VOffsetT = 4; pub const VT_FIRST_BEFORE: flatbuffers::VOffsetT = 6; pub const VT_SECOND_BEFORE: flatbuffers::VOffsetT = 8; pub const VT_FIRST_AFTER: flatbuffers::VOffsetT = 10; pub const VT_SECOND_AFTER: flatbuffers::VOffsetT = 12; #[inline] pub fn current(&self) -> IndexedWord<'a> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>(WordContext::VT_CURRENT, None) .unwrap() } #[inline] pub fn first_before(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_FIRST_BEFORE, None, ) } #[inline] pub fn second_before(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_SECOND_BEFORE, None, ) } #[inline] pub fn first_after(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>(WordContext::VT_FIRST_AFTER, None) } #[inline] pub fn second_after(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_SECOND_AFTER, None, ) } } pub struct WordContextArgs<'a> { pub current: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub first_before: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub second_before: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub first_after: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub second_after: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, } impl<'a> Default for WordContextArgs<'a> { #[inline] fn default() -> Self { WordContextArgs { current: None, first_before: None, second_before: None, first_after: None, second_after: None, } } } pub struct WordContextBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> WordContextBuilder<'a, 'b> { #[inline] pub fn add_current(&mut self, current: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_CURRENT, current, ); } #[inline] pub fn add_first_before(&mut self, first_before: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_FIRST_BEFORE, first_before, ); } #[inline] pub fn add_second_before(&mut self, second_before: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_SECOND_BEFORE, second_before, ); } #[inline] pub fn add_first_after(&mut self, first_after: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_FIRST_AFTER, first_after, ); } #[inline] pub fn add_second_after(&mut self, second_after: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_SECOND_AFTER, second_after, ); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WordContextBuilder<'a, 'b> { let start = _fbb.start_table(); WordContextBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<WordContext<'a>> { let o = self.fbb_.end_table(self.start_); self.fbb_.required(o, WordContext::VT_CURRENT, "current"); flatbuffers::WIPOffset::new(o.value()) } } #[inline] pub fn get_root_as_word_context<'a>(buf: &'a [u8]) -> WordContext<'a> { flatbuffers::get_root::<WordContext<'a>>(buf) } #[inline] pub fn get_size_prefixed_root_as_word_context<'a>(buf: &'a [u8]) -> WordContext<'a> { flatbuffers::get_size_prefixed_root::<WordContext<'a>>(buf) } #[inline] pub fn finish_word_context_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<WordContext<'a>>, ) { fbb.finish(root, None); } #[inline] pub fn finish_size_prefixed_word_context_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<WordContext<'a>>, ) { fbb.finish_size_prefixed(root, None); }
#![allow(dead_code, unused_imports)] use std::cmp::Ordering; use std::mem; extern crate flatbuffers; use self::flatbuffers::EndianScalar; pub enum IndexedWordOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct IndexedWord<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for IndexedWord<'a> { type Inner = IndexedWord<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf: buf, loc: loc }, } } } impl<'a> IndexedWord<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { IndexedWord { _tab: table } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args IndexedWordArgs<'args>, ) -> flatbuffers::WIPOffset<IndexedWord<'bldr>> { let mut builder = IndexedWordBuilder::new(_fbb); builder.add_index(args.index); if let Some(x) = args.value { builder.add_value(x); } builder.finish() } pub const VT_INDEX: flatbuffers::VOffsetT = 4; pub const VT_VALUE: flatbuffers::VOffsetT = 6; #[inline] pub fn index(&self) -> u64 { self._tab .get::<u64>(IndexedWord::VT_INDEX, Some(0)) .unwrap() } #[inline] pub fn value(&self) -> Option<&'a str> { self._tab .get::<flatbuffers::ForwardsUOffset<&str>>(IndexedWord::VT_VALUE, None) } } pub struct IndexedWordArgs<'a> { pub index: u64, pub value: Option<flatbuffers::WIPOffset<&'a str>>, } impl<'a> Default for IndexedWordArgs<'a> { #[inline] fn default() -> Self { IndexedWordArgs { index: 0, value: None, } } } pub struct IndexedWordBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> IndexedWordBuilder<'a, 'b> { #[inline] pub fn add_index(&mut self, index: u64) { self.fbb_.push_slot::<u64>(IndexedWord::VT_INDEX, index, 0); } #[inline] pub fn add_value(&mut self, value: flatbuffers::WIPOffset<&'b str>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<_>>(IndexedWord::VT_VALUE, value); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> IndexedWordBuilder<'a, 'b> { let start = _fbb.start_table(); IndexedWordBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<IndexedWord<'a>> { let o = self.fbb_.end_table(self.start_); flatbuffers::WIPOffset::new(o.value()) } } pub enum WordContextOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct WordContext<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for WordContext<'a> { type Inner = WordContext<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf: buf, loc: loc }, } } } impl<'a> WordContext<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { WordContext { _tab: table } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args WordContextArgs<'args>, ) -> flatbuffers::WIPOffset<WordContext<'bldr>> { let mut builder = WordContextBuilder::new(_fbb); if let Some(x) = args.second_after { builder.add_second_after(x); } if let Some(x) = args.first_after { builder.add_first_after(x); } if let Some(x) = args.second_before { builder.add_second_before(x); } if let Some(x) = args.first_before { builder.add_first_before(x); } if let Some(x) = args.current { builder.add_current(x); } builder.finish() } pub const VT_CURRENT: flatbuffers::VOffsetT = 4; pub const VT_FIRST_BEFORE: flatbuffers::VOffsetT = 6; pub const VT_SECOND_BEFORE: flatbuffers::VOffsetT = 8; pub const VT_FIRST_AFTER: flatbuffers::VOffsetT = 10; pub const VT_SECOND_AFTER: flatbuffers::VOffsetT = 12; #[inline] pub fn current(&self) -> IndexedWord<'a> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>(WordContext::VT_CURRENT, None) .unwrap() } #[inline] pub fn first_before(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_FIRST_BEFORE, None, ) } #[inline] pub fn second_before(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_SECOND_BEFORE, None, ) } #[inline] pub fn first_after(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>(WordContext::VT_FIRST_AFTER, None) } #[inline] pub fn second_after(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_SECOND_AFTER, None, ) } } pub struct WordContextArgs<'a> { pub current: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub first_before: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub second_before: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub first_after: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub second_after: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, } impl<'a> Default for WordContextArgs<'a> { #[inline] fn default() -> Self { WordContextArgs { current: None, first_before: None, second_before: None, first_after: None, second_after: None, } } } pub struct WordContextBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> WordContextBuilder<'a, 'b> { #[inline] pub fn add_current(&mut self, current: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_CURRENT, current, ); } #[inline] pub fn add_first_before(&mut self, first_before: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_FIRST_BEFORE, first_before, ); } #[inline] pub fn add_second_before(&mut self, second_before: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .
#[inline] pub fn add_first_after(&mut self, first_after: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_FIRST_AFTER, first_after, ); } #[inline] pub fn add_second_after(&mut self, second_after: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_SECOND_AFTER, second_after, ); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WordContextBuilder<'a, 'b> { let start = _fbb.start_table(); WordContextBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<WordContext<'a>> { let o = self.fbb_.end_table(self.start_); self.fbb_.required(o, WordContext::VT_CURRENT, "current"); flatbuffers::WIPOffset::new(o.value()) } } #[inline] pub fn get_root_as_word_context<'a>(buf: &'a [u8]) -> WordContext<'a> { flatbuffers::get_root::<WordContext<'a>>(buf) } #[inline] pub fn get_size_prefixed_root_as_word_context<'a>(buf: &'a [u8]) -> WordContext<'a> { flatbuffers::get_size_prefixed_root::<WordContext<'a>>(buf) } #[inline] pub fn finish_word_context_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<WordContext<'a>>, ) { fbb.finish(root, None); } #[inline] pub fn finish_size_prefixed_word_context_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<WordContext<'a>>, ) { fbb.finish_size_prefixed(root, None); }
push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_SECOND_BEFORE, second_before, ); }
function_block-function_prefix_line
[ { "content": "pub fn cursor_context(first_half: &str, second_half: &str) -> WordContext {\n\n // Find the point in the first half where the first \"word\" happens\n\n let mut first_half_iter = first_half.word_bound_indices().rev();\n\n let mut second_half_iter = second_half.word_bound_indices();\n\n\n\n let current = {\n\n let first_half_last_item = match first_half_iter.next() {\n\n Some(v) if v.1.chars().any(is_alphanumeric) => v,\n\n _ => (0, \"\"),\n\n };\n\n\n\n let second_half_first_item = match second_half_iter.next() {\n\n Some(v) if v.1.chars().any(is_alphanumeric) => v,\n\n _ => (0, \"\"),\n\n };\n\n\n\n let first_word = format!(\"{}{}\", first_half_last_item.1, second_half_first_item.1);\n\n let first_index = if first_half_last_item.1 == \"\" {\n\n first_half.len() + second_half_first_item.0\n\n } else {\n", "file_path": "crate/divvunspell/src/tokenizer/mod.rs", "rank": 2, "score": 197568.1167837366 }, { "content": "#[inline(always)]\n\npub fn upper_first(s: &str) -> SmolStr {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => SmolStr::new(\"\"),\n\n Some(f) => SmolStr::from(f.to_uppercase().collect::<String>() + c.as_str()),\n\n }\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 3, "score": 192205.34890069521 }, { "content": "#[inline(always)]\n\npub fn lower_case(s: &str) -> SmolStr {\n\n s.chars()\n\n .map(|c| c.to_lowercase().collect::<String>())\n\n .collect::<SmolStr>()\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 4, "score": 192205.34890069521 }, { "content": "#[inline(always)]\n\npub fn upper_case(s: &str) -> SmolStr {\n\n s.chars()\n\n .map(|c| c.to_uppercase().collect::<String>())\n\n .collect::<SmolStr>()\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 5, "score": 192205.34890069521 }, { "content": "#[inline(always)]\n\npub fn lower_first(s: &str) -> SmolStr {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => SmolStr::new(\"\"),\n\n Some(f) => SmolStr::from(f.to_lowercase().collect::<String>() + c.as_str()),\n\n }\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 6, "score": 192205.34890069521 }, { "content": "pub fn is_all_caps(word: &str) -> bool {\n\n upper_case(word) == word\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 7, "score": 184769.13999004418 }, { "content": "pub fn is_first_caps(word: &str) -> bool {\n\n upper_first(word) == word\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum CaseMutation {\n\n FirstCaps,\n\n AllCaps,\n\n None,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum CaseMode {\n\n FirstResults,\n\n MergeAll,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct CaseHandler {\n\n pub original_input: SmolStr,\n\n pub mutation: CaseMutation,\n\n pub mode: CaseMode,\n\n pub words: Vec<SmolStr>,\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 8, "score": 181925.14177951595 }, { "content": "pub fn is_mixed_case(word: &str) -> bool {\n\n let mut chars = word.chars();\n\n let mut last_case = match chars.next() {\n\n Some(ch) => Case::new(ch),\n\n None => return false,\n\n };\n\n\n\n if last_case == Case::Neither {\n\n return false;\n\n }\n\n\n\n let mut case_changes = 0;\n\n\n\n for ch in chars {\n\n let next_case = Case::new(ch);\n\n\n\n match (last_case, next_case) {\n\n (_, Case::Neither) => return false,\n\n (_, Case::Upper) => case_changes += 2,\n\n (Case::Upper, Case::Lower) => case_changes += 1,\n\n _ => {}\n\n }\n\n\n\n last_case = next_case;\n\n }\n\n\n\n case_changes > 1\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 9, "score": 181925.14177951595 }, { "content": "pub fn word_variants(word: &str) -> CaseHandler {\n\n if is_mixed_case(word) {\n\n return mixed_case_word_variants(word);\n\n }\n\n\n\n let word = SmolStr::new(word);\n\n let mut base: Vec<SmolStr> = vec![];\n\n\n\n base.append(\n\n &mut std::iter::once(&word)\n\n .chain(base.iter())\n\n .filter(|x| is_all_caps(x))\n\n .map(|x| upper_first(&lower_case(x)))\n\n .collect(),\n\n );\n\n\n\n base.append(\n\n &mut std::iter::once(&word)\n\n .chain(base.iter())\n\n .map(|x| lower_case(x))\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 10, "score": 179199.60432089728 }, { "content": "pub trait IndexTable<F: vfs::File>: Sized {\n\n fn from_path<P, FS>(fs: &FS, path: P) -> Result<Self, TransducerError>\n\n where\n\n P: AsRef<std::path::Path>,\n\n FS: Filesystem<File = F>;\n\n fn input_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber>;\n\n fn target(&self, i: TransitionTableIndex) -> Option<TransitionTableIndex>;\n\n fn final_weight(&self, i: TransitionTableIndex) -> Option<Weight>;\n\n\n\n #[inline(always)]\n\n fn is_final(&self, i: TransitionTableIndex) -> bool {\n\n self.input_symbol(i) == None && self.target(i) != None\n\n }\n\n}\n\n\n\n#[cfg(feature = \"internal_convert\")]\n\npub mod convert;\n", "file_path": "crate/divvunspell/src/transducer/mod.rs", "rank": 13, "score": 146163.8319742108 }, { "content": "pub trait File: Read + Debug {\n\n fn len(&self) -> Result<u64>;\n\n fn is_empty(&self) -> Result<bool>;\n\n #[cfg(unix)]\n\n fn read_at(&self, buf: &mut [u8], offset: u64) -> Result<usize>;\n\n #[cfg(unix)]\n\n fn read_exact_at(&self, buf: &mut [u8], offset: u64) -> Result<()>;\n\n unsafe fn memory_map(&self) -> Result<Mmap>;\n\n unsafe fn partial_memory_map(&self, offset: u64, len: usize) -> Result<Mmap>;\n\n}\n\n\n\nimpl File for std::fs::File {\n\n fn len(&self) -> Result<u64> {\n\n self.metadata().map(|m| m.len())\n\n }\n\n\n\n fn is_empty(&self) -> Result<bool> {\n\n self.len().map(|x| x == 0)\n\n }\n\n\n", "file_path": "crate/divvunspell/src/vfs.rs", "rank": 14, "score": 143810.076682182 }, { "content": "#[derive(Debug, Options)]\n\nstruct Args {\n\n #[options(help = \"print help message\")]\n\n help: bool,\n\n\n\n #[options(command)]\n\n command: Option<Command>,\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 15, "score": 137781.48511968183 }, { "content": "#[derive(Debug, Options)]\n\nstruct TokenizeArgs {\n\n #[options(help = \"print help message\")]\n\n help: bool,\n\n\n\n #[options(short = \"w\", long = \"words\", help = \"show words only\")]\n\n is_words_only: bool,\n\n\n\n #[options(free, help = \"text to be tokenized\")]\n\n inputs: Vec<String>,\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 16, "score": 135140.07979746794 }, { "content": "#[derive(Debug, Options)]\n\nstruct SuggestArgs {\n\n #[options(help = \"print help message\")]\n\n help: bool,\n\n\n\n #[options(help = \"BHFST or ZHFST archive to be used\", required)]\n\n archive: PathBuf,\n\n\n\n #[options(short = \"S\", help = \"always show suggestions even if word is correct\")]\n\n always_suggest: bool,\n\n\n\n #[options(help = \"maximum weight limit for suggestions\")]\n\n weight: Option<f32>,\n\n\n\n #[options(help = \"maximum number of results\")]\n\n nbest: Option<usize>,\n\n\n\n #[options(\n\n no_short,\n\n long = \"no-case-handling\",\n\n help = \"disables case-handling algorithm (makes results more like hfst-ospell)\"\n\n )]\n\n disable_case_handling: bool,\n\n\n\n #[options(no_short, long = \"json\", help = \"output in JSON format\")]\n\n use_json: bool,\n\n\n\n #[options(free, help = \"words to be processed\")]\n\n inputs: Vec<String>,\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 17, "score": 135140.07979746794 }, { "content": "#[derive(Debug, Options)]\n\nstruct PredictArgs {\n\n #[options(help = \"print help message\")]\n\n help: bool,\n\n\n\n #[options(help = \"BHFST archive to be used\", required)]\n\n archive: PathBuf,\n\n\n\n #[options(\n\n short = \"n\",\n\n long = \"name\",\n\n help = \"Predictor name to use (default: gpt2_predictor)\"\n\n )]\n\n predictor_name: Option<String>,\n\n\n\n #[options(help = \"whether suggestions should not be validated against a speller\")]\n\n disable_spelling_validation: bool,\n\n\n\n #[options(no_short, long = \"json\", help = \"output in JSON format\")]\n\n use_json: bool,\n\n\n\n #[options(free, help = \"text to be tokenized\")]\n\n inputs: Vec<String>,\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 18, "score": 135140.07979746794 }, { "content": "#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\nenum FormatExtendType {\n\n AcceptAny,\n\n AcceptNone,\n\n RequireLetter,\n\n RequireHLetter,\n\n AcceptQLetter,\n\n RequireNumeric,\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/word.rs", "rank": 19, "score": 133052.46068955932 }, { "content": "fn mixed_case_word_variants(word: &str) -> CaseHandler {\n\n // The input string should be accepted IFF it is accepted exactly as given,\n\n // or with the initial letter downcased, or all upper.\n\n //\n\n // Crucially, it should not be accepted if it is only accepted when all lowercased.\n\n\n\n let mut words = vec![];\n\n if is_first_caps(word) {\n\n words.push(lower_first(word));\n\n } else {\n\n let upper = upper_first(word);\n\n // Edge case of \"sOMETHING\"\n\n if !is_all_caps(&upper) {\n\n words.push(upper);\n\n }\n\n }\n\n\n\n CaseHandler {\n\n original_input: word.into(),\n\n mutation: if is_first_caps(word) {\n\n CaseMutation::FirstCaps\n\n } else {\n\n CaseMutation::None\n\n },\n\n mode: CaseMode::FirstResults,\n\n words,\n\n }\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 20, "score": 130050.15242677435 }, { "content": "#[doc(hidden)]\n\npub trait IntoFlatbuffer {\n\n fn into_flatbuffer(self) -> Vec<u8>;\n\n}\n", "file_path": "crate/divvunspell/src/ffi/fbs/mod.rs", "rank": 21, "score": 128722.62537099353 }, { "content": "#[inline(always)]\n\nfn speller_start_node(pool: &Pool<TreeNode>, size: usize) -> Vec<Recycled<TreeNode>> {\n\n let start_node = TreeNode::empty(pool, vec![0; size]);\n\n let mut nodes = Vec::with_capacity(256);\n\n nodes.push(start_node);\n\n nodes\n\n}\n\n\n\npub struct SpellerWorker<F: crate::vfs::File, T: Transducer<F>, U: Transducer<F>> {\n\n speller: Arc<HfstSpeller<F, T, U>>,\n\n input: Vec<SymbolNumber>,\n\n config: SpellerConfig,\n\n}\n\n\n\n#[allow(clippy::too_many_arguments)]\n\nimpl<'t, F, T: Transducer<F> + 't, U: Transducer<F> + 't> SpellerWorker<F, T, U>\n\nwhere\n\n F: crate::vfs::File,\n\n T: Transducer<F>,\n\n U: Transducer<F>,\n\n{\n", "file_path": "crate/divvunspell/src/speller/worker.rs", "rank": 22, "score": 128673.34174427493 }, { "content": "#[inline(always)]\n\nfn boxpath(path: &Path, filename: &str) -> BoxPath {\n\n let path = Path::new(path.file_name().unwrap()).join(filename);\n\n BoxPath::new(path).unwrap()\n\n}\n\n\n", "file_path": "crate/thfst-tools/src/main.rs", "rank": 23, "score": 127970.87677796112 }, { "content": "fn tokenize(args: TokenizeArgs) -> anyhow::Result<()> {\n\n let inputs: String = if args.inputs.is_empty() {\n\n eprintln!(\"Reading from stdin...\");\n\n let mut buffer = String::new();\n\n io::stdin()\n\n .read_to_string(&mut buffer)\n\n .expect(\"reading stdin\");\n\n buffer\n\n } else {\n\n args.inputs.into_iter().collect::<Vec<_>>().join(\" \")\n\n };\n\n\n\n if args.is_words_only {\n\n for (index, token) in inputs.word_indices() {\n\n println!(\"{:>4}: \\\"{}\\\"\", index, token);\n\n }\n\n } else {\n\n for (index, token) in inputs.word_bound_indices() {\n\n println!(\"{:>4}: \\\"{}\\\"\", index, token);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 24, "score": 125539.99357289138 }, { "content": "#[cfg(not(feature = \"gpt2\"))]\n\nfn predict(_args: PredictArgs) -> anyhow::Result<()> {\n\n eprintln!(\"ERROR: DivvunSpell was built without GPT2 support.\");\n\n eprintln!(\"If you built this using cargo, re-run the build with the following:\");\n\n eprintln!(\"\");\n\n eprintln!(\" cargo build --features gpt2\");\n\n eprintln!(\"\");\n\n\n\n std::process::exit(1);\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 25, "score": 125539.99357289138 }, { "content": "fn suggest(args: SuggestArgs) -> anyhow::Result<()> {\n\n let mut suggest_cfg = SpellerConfig::default();\n\n\n\n if args.disable_case_handling {\n\n suggest_cfg.case_handling = None;\n\n }\n\n\n\n if let Some(v) = args.nbest {\n\n if v == 0 {\n\n suggest_cfg.n_best = None;\n\n } else {\n\n suggest_cfg.n_best = Some(v);\n\n }\n\n }\n\n\n\n if let Some(v) = args.weight.filter(|x| x >= &0.0) {\n\n if v == 0.0 {\n\n suggest_cfg.max_weight = None;\n\n } else {\n\n suggest_cfg.max_weight = Some(v);\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 26, "score": 125539.99357289138 }, { "content": "#[cfg(feature = \"gpt2\")]\n\nfn predict(args: PredictArgs) -> anyhow::Result<()> {\n\n let raw_input = if args.inputs.is_empty() {\n\n eprintln!(\"Reading from stdin...\");\n\n let mut buffer = String::new();\n\n io::stdin()\n\n .read_to_string(&mut buffer)\n\n .expect(\"reading stdin\");\n\n buffer\n\n } else {\n\n args.inputs.join(\" \")\n\n };\n\n\n\n let predictor_name = args.predictor_name.as_deref();\n\n let archive = load_predictor_archive(&args.archive, predictor_name)?;\n\n let predictor = archive.predictor();\n\n\n\n let mut writer: Box<dyn OutputWriter> = if args.use_json {\n\n Box::new(JsonWriter::new())\n\n } else {\n\n Box::new(StdoutWriter)\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 27, "score": 125539.99357289138 }, { "content": "pub trait TransitionTable<F: vfs::File>: Sized {\n\n fn from_path<P, FS>(fs: &FS, path: P) -> Result<Self, TransducerError>\n\n where\n\n P: AsRef<std::path::Path>,\n\n FS: Filesystem<File = F>;\n\n fn input_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber>;\n\n fn output_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber>;\n\n fn target(&self, i: TransitionTableIndex) -> Option<TransitionTableIndex>;\n\n fn weight(&self, i: TransitionTableIndex) -> Option<Weight>;\n\n\n\n #[inline(always)]\n\n fn is_final(&self, i: TransitionTableIndex) -> bool {\n\n self.input_symbol(i) == None && self.output_symbol(i) == None && self.target(i) == Some(1)\n\n }\n\n\n\n #[inline(always)]\n\n fn symbol_transition(&self, i: TransitionTableIndex) -> SymbolTransition {\n\n SymbolTransition::new(self.target(i), self.output_symbol(i), self.weight(i))\n\n }\n\n}\n\n\n", "file_path": "crate/divvunspell/src/transducer/mod.rs", "rank": 28, "score": 111801.36433062484 }, { "content": "#[inline(always)]\n\nfn insert_thfst_files(boxfile: &mut BoxFileWriter, path: &Path) -> Result<(), std::io::Error> {\n\n let boxpath = BoxPath::new(path.file_name().unwrap()).unwrap();\n\n println!(\"Inserting \\\"{}\\\"...\", &boxpath);\n\n\n\n boxfile.mkdir(boxpath, std::collections::HashMap::new())?;\n\n insert(boxfile, Compression::Stored, path, \"alphabet\")?;\n\n insert(boxfile, Compression::Stored, path, \"index\")?;\n\n insert(boxfile, Compression::Stored, path, \"transition\")\n\n}\n\n\n", "file_path": "crate/thfst-tools/src/main.rs", "rank": 29, "score": 108097.83932122 }, { "content": " }\n\n\n\n // Final weight reads from the same position as target, but for a different tuple\n\n // This can probably be abstracted out more nicely\n\n #[inline(always)]\n\n pub fn final_weight(&self, i: TransitionTableIndex) -> Option<Weight> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = self.offset + INDEX_TABLE_SIZE * i as usize;\n\n let weight: Weight = {\n\n let mut cursor = self.make_cursor();\n\n cursor.set_position((index + mem::size_of::<SymbolNumber>()) as u64);\n\n cursor.read_f32::<LittleEndian>().unwrap()\n\n };\n\n\n\n Some(weight)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn is_final(&self, i: TransitionTableIndex) -> bool {\n\n self.input_symbol(i) == None && self.target(i) != None\n\n }\n\n}\n", "file_path": "crate/divvunspell/src/transducer/hfst/index_table.rs", "rank": 30, "score": 107812.5714475299 }, { "content": "use std::ptr;\n\n\n\nuse memmap2::Mmap;\n\n\n\nuse crate::transducer::TransducerError;\n\nuse crate::types::{SymbolNumber, TransitionTableIndex, Weight};\n\nuse crate::vfs::{self, Filesystem};\n\n\n\n#[derive(Debug)]\n\npub struct MemmapIndexTable<F> {\n\n buf: Mmap,\n\n pub(crate) size: u32,\n\n _file: std::marker::PhantomData<F>,\n\n}\n\n\n\nconst INDEX_TABLE_SIZE: usize = 8;\n\n\n\nimpl<F: vfs::File> MemmapIndexTable<F> {\n\n pub fn from_path_partial<P, FS>(\n\n fs: &FS,\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 31, "score": 107812.24897291517 }, { "content": " fn read_u16_at(&self, index: u64) -> u16 {\n\n let mut buf = [0u8; 2];\n\n self.file\n\n .read_exact_at(&mut buf, index)\n\n .expect(\"failed to read u16\");\n\n u16::from_le_bytes(buf)\n\n }\n\n\n\n #[inline(always)]\n\n fn read_u32_at(&self, index: u64) -> u32 {\n\n let mut buf = [0u8; 4];\n\n self.file\n\n .read_exact_at(&mut buf, index)\n\n .expect(\"failed to read u32\");\n\n u32::from_le_bytes(buf)\n\n }\n\n }\n\n\n\n impl<F: vfs::File> IndexTable<F> for FileIndexTable<F> {\n\n fn from_path<P, FS>(fs: &FS, path: P) -> Result<Self, TransducerError>\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 32, "score": 107811.80337693644 }, { "content": " pub fn target(&self, i: TransitionTableIndex) -> Option<TransitionTableIndex> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = self.offset + INDEX_TABLE_SIZE * i as usize;\n\n let target: TransitionTableIndex =\n\n if cfg!(all(target_arch = \"arm\", target_pointer_width = \"32\")) {\n\n let mut cursor = self.make_cursor();\n\n cursor.set_position((index + mem::size_of::<SymbolNumber>()) as u64);\n\n cursor.read_u32::<LittleEndian>().unwrap()\n\n } else {\n\n unsafe { ptr::read(self.mmap.as_ptr().add(index + 2) as *const _) }\n\n };\n\n\n\n if target == u32::MAX {\n\n None\n\n } else {\n\n Some(target)\n\n }\n", "file_path": "crate/divvunspell/src/transducer/hfst/index_table.rs", "rank": 33, "score": 107808.58763805451 }, { "content": "}\n\n\n\nimpl fmt::Debug for MappedIndexTable {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Index table index: {}\", self.size)?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[allow(clippy::len_without_is_empty)]\n\nimpl MappedIndexTable {\n\n pub fn new(\n\n buf: Arc<Mmap>,\n\n offset: usize,\n\n len: usize,\n\n size: TransitionTableIndex,\n\n ) -> MappedIndexTable {\n\n MappedIndexTable {\n\n size,\n\n mmap: buf,\n", "file_path": "crate/divvunspell/src/transducer/hfst/index_table.rs", "rank": 34, "score": 107808.28271861614 }, { "content": " }\n\n\n\n fn final_weight(&self, i: TransitionTableIndex) -> Option<Weight> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = (INDEX_TABLE_SIZE * i as usize) + 4;\n\n let x = self.read_u32_at(index as u64);\n\n let weight: Weight = f32::from_bits(x);\n\n\n\n Some(weight)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(unix)]\n\npub use self::unix::FileIndexTable;\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 35, "score": 107808.10675052858 }, { "content": "// We manually ensure alignment of reads in this file.\n\n#![allow(clippy::cast_ptr_alignment)]\n\n\n\nuse byteorder::{LittleEndian, ReadBytesExt};\n\nuse std::fmt;\n\nuse std::io::Cursor;\n\nuse std::mem;\n\nuse std::ptr;\n\nuse std::{u16, u32};\n\n\n\nuse crate::constants::INDEX_TABLE_SIZE;\n\nuse crate::types::{SymbolNumber, TransitionTableIndex, Weight};\n\nuse memmap2::Mmap;\n\nuse std::sync::Arc;\n\n\n\npub struct MappedIndexTable {\n\n pub(crate) size: TransitionTableIndex,\n\n pub(crate) mmap: Arc<Mmap>,\n\n pub(crate) offset: usize,\n\n pub(crate) len: usize,\n", "file_path": "crate/divvunspell/src/transducer/hfst/index_table.rs", "rank": 36, "score": 107808.00764237274 }, { "content": " offset,\n\n len,\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n pub fn len(&self) -> usize {\n\n self.len - self.offset\n\n }\n\n\n\n #[inline(always)]\n\n fn make_cursor<'a>(&'a self) -> Cursor<&'a [u8]> {\n\n Cursor::new(&self.mmap)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn input_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber> {\n\n if i >= self.size {\n\n return None;\n\n }\n", "file_path": "crate/divvunspell/src/transducer/hfst/index_table.rs", "rank": 37, "score": 107807.46379196634 }, { "content": "\n\n let index = self.offset + INDEX_TABLE_SIZE * i as usize;\n\n\n\n let input_symbol: SymbolNumber =\n\n if cfg!(all(target_arch = \"arm\", target_pointer_width = \"32\")) {\n\n let mut cursor = self.make_cursor();\n\n cursor.set_position(index as u64);\n\n cursor.read_u16::<LittleEndian>().unwrap()\n\n } else {\n\n unsafe { ptr::read(self.mmap.as_ptr().add(index) as *const _) }\n\n };\n\n\n\n if input_symbol == u16::MAX {\n\n None\n\n } else {\n\n Some(input_symbol)\n\n }\n\n }\n\n\n\n #[inline(always)]\n", "file_path": "crate/divvunspell/src/transducer/hfst/index_table.rs", "rank": 38, "score": 107806.96506957128 }, { "content": " Some(weight)\n\n }\n\n}\n\n\n\n#[cfg(unix)]\n\nmod unix {\n\n use super::*;\n\n\n\n use crate::transducer::IndexTable;\n\n use crate::transducer::TransducerError;\n\n use crate::types::{SymbolNumber, TransitionTableIndex, Weight};\n\n use crate::vfs::{self, Filesystem};\n\n\n\n pub struct FileIndexTable<F: vfs::File> {\n\n file: F,\n\n size: u32,\n\n }\n\n\n\n impl<F: vfs::File> FileIndexTable<F> {\n\n #[inline(always)]\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 39, "score": 107806.08585059448 }, { "content": "\n\n let index = (INDEX_TABLE_SIZE * i as usize) + 4;\n\n let target: TransitionTableIndex =\n\n unsafe { ptr::read(self.buf.as_ptr().add(index) as *const _) };\n\n\n\n if target == std::u32::MAX {\n\n None\n\n } else {\n\n Some(target)\n\n }\n\n }\n\n\n\n fn final_weight(&self, i: TransitionTableIndex) -> Option<Weight> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = (INDEX_TABLE_SIZE * i as usize) + 4;\n\n let weight: Weight = unsafe { ptr::read(self.buf.as_ptr().add(index) as *const _) };\n\n\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 40, "score": 107803.41955537263 }, { "content": " if input_symbol == std::u16::MAX {\n\n None\n\n } else {\n\n Some(input_symbol)\n\n }\n\n }\n\n\n\n fn target(&self, i: TransitionTableIndex) -> Option<TransitionTableIndex> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = (INDEX_TABLE_SIZE * i as usize) + 4;\n\n let target: TransitionTableIndex = self.read_u32_at(index as u64);\n\n\n\n if target == std::u32::MAX {\n\n None\n\n } else {\n\n Some(target)\n\n }\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 41, "score": 107802.46409348614 }, { "content": " if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = INDEX_TABLE_SIZE * i as usize;\n\n\n\n let input_symbol: SymbolNumber =\n\n unsafe { ptr::read(self.buf.as_ptr().add(index) as *const _) };\n\n\n\n if input_symbol == std::u16::MAX {\n\n None\n\n } else {\n\n Some(input_symbol)\n\n }\n\n }\n\n\n\n fn target(&self, i: TransitionTableIndex) -> Option<TransitionTableIndex> {\n\n if i >= self.size {\n\n return None;\n\n }\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 42, "score": 107802.15891846955 }, { "content": " path: P,\n\n chunk: u64,\n\n total: u64,\n\n ) -> Result<Self, TransducerError>\n\n where\n\n P: AsRef<std::path::Path>,\n\n FS: Filesystem<File = F>,\n\n {\n\n let file = fs.open_file(path).map_err(TransducerError::Io)?;\n\n let len = file.len().map_err(TransducerError::Io)? / total;\n\n let buf = unsafe {\n\n file.partial_memory_map(chunk * len, len as usize)\n\n .map_err(TransducerError::Memmap)?\n\n };\n\n let size = (buf.len() / INDEX_TABLE_SIZE) as u32;\n\n Ok(MemmapIndexTable {\n\n buf,\n\n size,\n\n _file: std::marker::PhantomData::<F>,\n\n })\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 43, "score": 107801.06578273705 }, { "content": " }\n\n}\n\n\n\nimpl<F: vfs::File> crate::transducer::IndexTable<F> for MemmapIndexTable<F> {\n\n fn from_path<P, FS>(fs: &FS, path: P) -> Result<Self, TransducerError>\n\n where\n\n P: AsRef<std::path::Path>,\n\n FS: Filesystem<File = F>,\n\n {\n\n let file = fs.open_file(path).map_err(TransducerError::Io)?;\n\n let buf = unsafe { file.memory_map().map_err(TransducerError::Memmap)? };\n\n let size = (buf.len() / INDEX_TABLE_SIZE) as u32;\n\n Ok(MemmapIndexTable {\n\n buf,\n\n size,\n\n _file: std::marker::PhantomData::<F>,\n\n })\n\n }\n\n\n\n fn input_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber> {\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 44, "score": 107799.16302360228 }, { "content": " where\n\n P: AsRef<std::path::Path>,\n\n FS: Filesystem<File = F>,\n\n {\n\n let file = fs.open_file(path).map_err(TransducerError::Io)?;\n\n Ok(FileIndexTable {\n\n size: file.len().map_err(TransducerError::Io)? as u32,\n\n file,\n\n })\n\n }\n\n\n\n fn input_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = INDEX_TABLE_SIZE * i as usize;\n\n\n\n let input_symbol: SymbolNumber = self.read_u16_at(index as u64);\n\n\n", "file_path": "crate/divvunspell/src/transducer/thfst/index_table.rs", "rank": 45, "score": 107798.72072219891 }, { "content": "pub fn open<P>(path: P) -> Result<Arc<dyn SpellerArchive + Send + Sync>, SpellerArchiveError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n match path.as_ref().extension() {\n\n Some(x) if x == \"bhfst\" => {\n\n ThfstChunkedBoxSpellerArchive::open(path.as_ref()).map(|x| Arc::new(x) as _)\n\n }\n\n Some(x) if x == \"zhfst\" => ZipSpellerArchive::open(path.as_ref()).map(|x| Arc::new(x) as _),\n\n unknown => Err(SpellerArchiveError::UnsupportedExt(\n\n unknown\n\n .map(|x| x.to_owned())\n\n .unwrap_or_else(|| OsString::new()),\n\n )),\n\n }\n\n}\n\n\n\n#[cfg(feature = \"internal_ffi\")]\n\npub(crate) mod ffi {\n\n use super::*;\n", "file_path": "crate/divvunspell/src/archive/mod.rs", "rank": 46, "score": 103245.50417277741 }, { "content": "struct Suggestion {\n\n std::string value;\n\n float weight;\n\n\n\n Suggestion(std::string value, float weight) : value(value), weight(weight) {}\n\n};\n\n\n", "file_path": "crate/support/divvunspell.hpp", "rank": 47, "score": 101956.86463912955 }, { "content": "#[derive(Serialize, Default, Debug, Clone)]\n\nstruct Summary {\n\n total_words: u32,\n\n first_position: u32,\n\n top_five: u32,\n\n any_position: u32,\n\n no_suggestions: u32,\n\n only_wrong: u32,\n\n slowest_lookup: Time,\n\n fastest_lookup: Time,\n\n average_time: Time,\n\n average_time_95pc: Time,\n\n}\n\n\n\nimpl std::fmt::Display for Summary {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {\n\n let percent =\n\n |v: u32| -> String { format!(\"{:.2}%\", v as f32 / self.total_words as f32 * 100f32) };\n\n\n\n write!(\n\n f,\n", "file_path": "crate/accuracy/src/main.rs", "rank": 48, "score": 100092.11075263484 }, { "content": "#[derive(Debug, Default, Serialize, PartialOrd, Ord, PartialEq, Eq, Clone, Copy)]\n\nstruct Time {\n\n secs: u64,\n\n subsec_nanos: u32,\n\n}\n\n\n\nimpl std::fmt::Display for Time {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {\n\n let ms = self.secs * 1000 + (self.subsec_nanos as u64 / 1_000_000);\n\n write!(f, \"{}ms\", ms)\n\n }\n\n}\n\n\n", "file_path": "crate/accuracy/src/main.rs", "rank": 49, "score": 100091.14838549629 }, { "content": "struct SpellerConfig {\n\n std::size_t nBest;\n\n float maxWeight;\n\n float beam;\n\n\n\n SpellerConfig(std::size_t nBest, float maxWeight, float beam) : nBest(nBest), maxWeight(maxWeight), beam(beam) {}\n\n SpellerConfig() : nBest(5), maxWeight(20000.0), beam(0.0) {} \n\n};\n\n\n", "file_path": "crate/support/divvunspell.hpp", "rank": 50, "score": 100078.5869541795 }, { "content": "#[derive(Debug, Options)]\n\nenum Command {\n\n #[options(help = \"get suggestions for provided input\")]\n\n Suggest(SuggestArgs),\n\n\n\n #[options(help = \"print input in word-separated tokenized form\")]\n\n Tokenize(TokenizeArgs),\n\n\n\n #[options(help = \"predict next words using GPT2 model\")]\n\n Predict(PredictArgs),\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 51, "score": 98685.67826893208 }, { "content": "enum Opts {\n\n #[structopt(about = \"Convert an HFST file to THFST\")]\n\n HfstToThfst {\n\n #[structopt(parse(from_os_str))]\n\n from: PathBuf,\n\n },\n\n\n\n #[structopt(about = \"Convert a ZHFST file to BHFST\")]\n\n ZhfstToBhfst {\n\n #[structopt(parse(from_os_str))]\n\n from: PathBuf,\n\n },\n\n\n\n #[structopt(about = \"Convert a THFST acceptor/errmodel pair to BHFST\")]\n\n ThfstsToBhfst {\n\n #[structopt(parse(from_os_str))]\n\n acceptor: PathBuf,\n\n\n\n #[structopt(parse(from_os_str))]\n\n errmodel: PathBuf,\n", "file_path": "crate/thfst-tools/src/main.rs", "rank": 52, "score": 98681.08706046078 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nenum Case {\n\n Upper,\n\n Lower,\n\n Neither,\n\n}\n\n\n\nimpl Case {\n\n #[inline(always)]\n\n fn new(ch: char) -> Case {\n\n if ch.is_lowercase() {\n\n Case::Lower\n\n } else if ch.is_uppercase() {\n\n Case::Upper\n\n } else {\n\n Case::Neither\n\n }\n\n }\n\n}\n\n\n", "file_path": "crate/divvunspell/src/tokenizer/case_handling.rs", "rank": 53, "score": 96986.72572458799 }, { "content": "#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\nenum RegionalState {\n\n Half,\n\n Full,\n\n Unknown,\n\n}\n\n\n\nimpl<'a> Iterator for WordBounds<'a> {\n\n type Item = &'a str;\n\n\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n let slen = self.string.len();\n\n (cmp::min(slen, 1), Some(slen))\n\n }\n\n\n\n #[inline]\n\n #[allow(clippy::cognitive_complexity)]\n\n fn next(&mut self) -> Option<&'a str> {\n\n use self::FormatExtendType::*;\n\n use self::WordBoundsState::*;\n", "file_path": "crate/divvunspell/src/tokenizer/word.rs", "rank": 54, "score": 96986.72572458799 }, { "content": "#[inline(always)]\n\nfn insert(\n\n boxfile: &mut BoxFileWriter,\n\n compression: Compression,\n\n path: &Path,\n\n name: &str,\n\n) -> Result<(), std::io::Error> {\n\n use std::collections::HashMap;\n\n use std::io::BufReader;\n\n let file = std::fs::File::open(path.join(name))?;\n\n boxfile\n\n .insert(\n\n compression,\n\n boxpath(path, name),\n\n &mut BufReader::new(file),\n\n HashMap::new(),\n\n )\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "crate/thfst-tools/src/main.rs", "rank": 55, "score": 96919.50832202882 }, { "content": "fn load_words(\n\n path: &str,\n\n max_words: Option<usize>,\n\n) -> Result<Vec<(String, String)>, Box<dyn Error>> {\n\n let mut rdr = csv::ReaderBuilder::new()\n\n .comment(Some(b'#'))\n\n .delimiter(b'\\t')\n\n .has_headers(false)\n\n .flexible(true)\n\n .from_path(path)?;\n\n\n\n Ok(rdr\n\n .records()\n\n .filter_map(Result::ok)\n\n .filter_map(|r| {\n\n r.get(0)\n\n .and_then(|x| r.get(1).map(|y| (x.to_string(), y.to_string())))\n\n })\n\n .take(max_words.unwrap_or(std::usize::MAX))\n\n .collect())\n\n}\n\n\n", "file_path": "crate/accuracy/src/main.rs", "rank": 56, "score": 96914.99446529517 }, { "content": "fn run(\n\n speller: Arc<dyn Speller + Send>,\n\n words: Vec<String>,\n\n writer: &mut dyn OutputWriter,\n\n is_suggesting: bool,\n\n is_always_suggesting: bool,\n\n suggest_cfg: &SpellerConfig,\n\n) {\n\n for word in words {\n\n let is_correct = speller.clone().is_correct_with_config(&word, &suggest_cfg);\n\n writer.write_correction(&word, is_correct);\n\n\n\n if is_suggesting && (is_always_suggesting || !is_correct) {\n\n let suggestions = speller.clone().suggest_with_config(&word, &suggest_cfg);\n\n writer.write_suggestions(&word, &suggestions);\n\n }\n\n }\n\n}\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 57, "score": 96914.99446529517 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct Report<'a> {\n\n metadata: Option<&'a divvunspell::archive::meta::SpellerMetadata>,\n\n config: &'a SpellerConfig,\n\n summary: Summary,\n\n results: Vec<AccuracyResult<'a>>,\n\n start_timestamp: Time,\n\n total_time: Time,\n\n}\n\n\n", "file_path": "crate/accuracy/src/main.rs", "rank": 58, "score": 96897.39890523233 }, { "content": "struct StdoutWriter;\n\n\n\nimpl OutputWriter for StdoutWriter {\n\n fn write_correction(&mut self, word: &str, is_correct: bool) {\n\n println!(\n\n \"Input: {}\\t\\t[{}]\",\n\n &word,\n\n if is_correct { \"CORRECT\" } else { \"INCORRECT\" }\n\n );\n\n }\n\n\n\n fn write_suggestions(&mut self, _word: &str, suggestions: &[Suggestion]) {\n\n for sugg in suggestions {\n\n println!(\"{}\\t\\t{}\", sugg.value, sugg.weight);\n\n }\n\n println!();\n\n }\n\n\n\n fn write_predictions(&mut self, predictions: &[String]) {\n\n println!(\"Predictions: \");\n\n println!(\"{}\", predictions.join(\" \"));\n\n }\n\n\n\n fn finish(&mut self) {}\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 59, "score": 96590.25559822387 }, { "content": "#[derive(Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct JsonWriter {\n\n suggest: Vec<SuggestionRequest>,\n\n predict: Option<Vec<String>>,\n\n}\n\n\n\nimpl JsonWriter {\n\n pub fn new() -> JsonWriter {\n\n JsonWriter {\n\n suggest: vec![],\n\n predict: None,\n\n }\n\n }\n\n}\n\n\n\nimpl OutputWriter for JsonWriter {\n\n fn write_correction(&mut self, word: &str, is_correct: bool) {\n\n self.suggest.push(SuggestionRequest {\n\n word: word.to_owned(),\n\n is_correct,\n\n suggestions: vec![],\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 60, "score": 96590.25559822387 }, { "content": "#[derive(Serialize)]\n\nstruct SuggestionRequest {\n\n word: String,\n\n is_correct: bool,\n\n suggestions: Vec<Suggestion>,\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 61, "score": 96590.25559822387 }, { "content": "#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\nenum WordBoundsState {\n\n Start,\n\n Letter,\n\n HLetter,\n\n Numeric,\n\n Katakana,\n\n ExtendNumLet,\n\n Regional(RegionalState),\n\n FormatExtend(FormatExtendType),\n\n Zwj,\n\n Emoji,\n\n}\n\n\n\n// subtypes for FormatExtend state in WordBoundsState\n", "file_path": "crate/divvunspell/src/tokenizer/word.rs", "rank": 62, "score": 95362.75261599141 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct AccuracyResult<'a> {\n\n input: &'a str,\n\n expected: &'a str,\n\n distance: usize,\n\n suggestions: Vec<Suggestion>,\n\n position: Option<usize>,\n\n time: Time,\n\n}\n\n\n", "file_path": "crate/accuracy/src/main.rs", "rank": 63, "score": 95110.70906405305 }, { "content": "pub trait Filesystem {\n\n type File: File;\n\n\n\n fn open_file<P: AsRef<Path>>(&self, path: P) -> Result<Self::File>;\n\n fn copy_to_temp_dir<P: AsRef<Path>>(&self, path: P) -> Result<TempDir>;\n\n}\n\n\n", "file_path": "crate/divvunspell/src/vfs.rs", "rank": 64, "score": 94351.0682436719 }, { "content": "fn convert_thfsts_to_bhfst(\n\n acceptor_path: &Path,\n\n errmodel_path: &Path,\n\n output_path: &Path,\n\n) -> Result<(), std::io::Error> {\n\n let fs = divvunspell::vfs::Fs;\n\n let _acceptor_transducer =\n\n MemmapThfstTransducer::from_path(&fs, acceptor_path).map_err(|e| e.into_io_error())?;\n\n let _errmodel_transducer =\n\n MemmapThfstTransducer::from_path(&fs, errmodel_path).map_err(|e| e.into_io_error())?;\n\n\n\n let mut boxfile: BoxFileWriter = BoxFileWriter::create_with_alignment(output_path, ALIGNMENT)?;\n\n\n\n insert_thfst_files(&mut boxfile, acceptor_path)?;\n\n insert_thfst_files(&mut boxfile, errmodel_path)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crate/thfst-tools/src/main.rs", "rank": 65, "score": 93600.92189729825 }, { "content": "#[test]\n\nfn test_xml_parse() {\n\n use std::str::FromStr;\n\n\n\n let xml_data = r##\"\n\n <?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n <hfstspeller dtdversion=\"1.0\" hfstversion=\"3\">\n\n <info>\n\n <locale>se</locale>\n\n <title>Giellatekno/Divvun/UiT fst-based speller for Northern Sami</title>\n\n <description>This is an fst-based speller for Northern Sami. It is based\n\n on the normative subset of the morphological analyser for Northern Sami.\n\n The source code can be found at:\n\n https://victorio.uit.no/langtech/trunk/langs/sme/\n\n License: GPL3+.</description>\n\n <version vcsrev=\"GT_REVISION\">GT_VERSION</version>\n\n <date>DATE</date>\n\n <producer>Giellatekno/Divvun/UiT contributors</producer>\n\n <contact email=\"[email protected]\" website=\"http://divvun.no\"/>\n\n </info>\n\n <acceptor type=\"general\" id=\"acceptor.default.hfst\">\n", "file_path": "crate/divvunspell/src/archive/meta.rs", "rank": 66, "score": 93600.92189729825 }, { "content": "#[cfg(feature = \"gpt2\")]\n\nfn load_predictor_archive(\n\n path: &Path,\n\n name: Option<&str>,\n\n) -> Result<Box<dyn PredictorArchive>, PredictorArchiveError> {\n\n let archive = BoxGpt2PredictorArchive::open(path, name)?;\n\n let archive = Box::new(archive);\n\n Ok(archive)\n\n}\n\n\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 67, "score": 93600.92189729825 }, { "content": "pub trait Tokenize {\n\n fn word_bound_indices(&self) -> WordBoundIndices<'_>;\n\n fn word_indices(&self) -> WordIndices<'_>;\n\n fn word_bound_indices_with_alphabet(&self, alphabet: Vec<char>) -> WordBoundIndices;\n\n fn words_with_alphabet(&self, alphabet: Vec<char>) -> Words;\n\n}\n\n\n\nimpl Tokenize for str {\n\n fn word_bound_indices(&self) -> WordBoundIndices<'_> {\n\n WordBoundIndices::new(self)\n\n }\n\n\n\n fn word_indices(&self) -> WordIndices<'_> {\n\n WordIndices {\n\n iter: WordBoundIndices::new(self),\n\n }\n\n }\n\n\n\n fn word_bound_indices_with_alphabet(&self, alphabet: Vec<char>) -> WordBoundIndices {\n\n WordBoundIndices::new_with_alphabet(self, alphabet)\n", "file_path": "crate/divvunspell/src/tokenizer/mod.rs", "rank": 68, "score": 92652.50589680234 }, { "content": "pub trait Predictor {\n\n fn predict(self: Arc<Self>, raw_input: &str) -> Vec<String>;\n\n}\n", "file_path": "crate/divvunspell/src/predictor/mod.rs", "rank": 69, "score": 92652.50589680234 }, { "content": "pub trait Speller {\n\n fn is_correct(self: Arc<Self>, word: &str) -> bool;\n\n fn is_correct_with_config(self: Arc<Self>, word: &str, config: &SpellerConfig) -> bool;\n\n fn suggest(self: Arc<Self>, word: &str) -> Vec<Suggestion>;\n\n fn suggest_with_config(self: Arc<Self>, word: &str, config: &SpellerConfig) -> Vec<Suggestion>;\n\n}\n\n\n\nimpl<F, T, U> Speller for HfstSpeller<F, T, U>\n\nwhere\n\n F: crate::vfs::File + Send,\n\n T: Transducer<F> + Send,\n\n U: Transducer<F> + Send,\n\n{\n\n #[allow(clippy::wrong_self_convention)]\n\n fn is_correct_with_config(self: Arc<Self>, word: &str, config: &SpellerConfig) -> bool {\n\n use crate::tokenizer::case_handling::*;\n\n\n\n if word.len() == 0 {\n\n return true;\n\n }\n", "file_path": "crate/divvunspell/src/speller/mod.rs", "rank": 70, "score": 92652.50589680234 }, { "content": "pub trait SpellerArchive {\n\n fn open(path: &Path) -> Result<Self, SpellerArchiveError>\n\n where\n\n Self: Sized;\n\n\n\n fn speller(&self) -> Arc<dyn Speller + Send + Sync>;\n\n fn metadata(&self) -> Option<&SpellerMetadata>;\n\n}\n\n\n", "file_path": "crate/divvunspell/src/archive/mod.rs", "rank": 71, "score": 91032.91729742564 }, { "content": "pub trait PredictorArchive {\n\n fn open(path: &Path, predictor_name: Option<&str>) -> Result<Self, PredictorArchiveError>\n\n where\n\n Self: Sized;\n\n\n\n fn predictor(&self) -> Arc<dyn Predictor + Send + Sync>;\n\n fn metadata(&self) -> Option<&PredictorMetadata>;\n\n}\n\n\n", "file_path": "crate/divvunspell/src/archive/mod.rs", "rank": 72, "score": 91032.91729742564 }, { "content": "pub trait ConvertFrom<T> {\n\n fn convert_from<W: Write>(from: &T, writer: &mut W) -> Result<(), std::io::Error>;\n\n}\n\n\n\nimpl ConvertFile<hfst::HfstTransducer<std::fs::File>>\n\n for thfst::MemmapThfstTransducer<std::fs::File>\n\n{\n\n fn convert_file(\n\n transducer: &hfst::HfstTransducer<std::fs::File>,\n\n path: &Path,\n\n ) -> Result<(), std::io::Error> {\n\n let thfst_path = path.with_extension(\"thfst\");\n\n std::fs::create_dir_all(&thfst_path)?;\n\n\n\n let transition_path = thfst_path.join(\"transition\");\n\n let index_path = thfst_path.join(\"index\");\n\n let alphabet_path = thfst_path.join(\"alphabet\");\n\n\n\n let mut writer = BufWriter::new(File::create(transition_path)?);\n\n thfst::MemmapTransitionTable::convert_from(&transducer.transition_table, &mut writer)?;\n", "file_path": "crate/divvunspell/src/transducer/convert.rs", "rank": 73, "score": 88590.64580613261 }, { "content": "fn main() -> anyhow::Result<()> {\n\n pretty_env_logger::init();\n\n\n\n let args = Args::parse_args_default_or_exit();\n\n\n\n match args.command {\n\n None => Ok(()),\n\n Some(Command::Suggest(args)) => suggest(args),\n\n Some(Command::Tokenize(args)) => tokenize(args),\n\n Some(Command::Predict(args)) => predict(args),\n\n }\n\n}\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 74, "score": 88152.86549089669 }, { "content": "pub trait ConvertFile<T> {\n\n fn convert_file(transducer: &T, path: &Path) -> Result<(), std::io::Error>;\n\n}\n\n\n", "file_path": "crate/divvunspell/src/transducer/convert.rs", "rank": 75, "score": 87044.64832234241 }, { "content": "fn mmap_by_name<R: Read + Seek>(\n\n zipfile: &mut File,\n\n archive: &mut ZipArchive<R>,\n\n name: &str,\n\n) -> Result<MmapRef, std::io::Error> {\n\n let mut index = archive.by_name(name)?;\n\n\n\n if index.compression() != CompressionMethod::Stored {\n\n let tempdir = tempfile::tempdir()?;\n\n let outpath = tempdir.path().join(index.mangled_name());\n\n\n\n let mut outfile = File::create(&outpath)?;\n\n std::io::copy(&mut index, &mut outfile)?;\n\n\n\n let outfile = File::open(&outpath)?;\n\n\n\n let mmap = unsafe { MmapOptions::new().map(&outfile) };\n\n\n\n return match mmap {\n\n Ok(v) => Ok(MmapRef::Temp(TempMmap {\n", "file_path": "crate/divvunspell/src/archive/zip.rs", "rank": 76, "score": 83194.1838313495 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n pretty_env_logger::init();\n\n\n\n let matches = App::new(\"divvunspell-accuracy\")\n\n .setting(AppSettings::ArgRequiredElseHelp)\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .about(\"Accuracy testing for DivvunSpell.\")\n\n .arg(\n\n Arg::with_name(\"config\")\n\n .short(\"c\")\n\n .takes_value(true)\n\n .help(\"Provide JSON config file to override test defaults\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"words\")\n\n .value_name(\"WORDS\")\n\n .help(\"The 'input -> expected' list in tab-delimited value file (TSV)\"),\n\n )\n\n // .arg(\n\n // Arg::with_name(\"bhfst\")\n", "file_path": "crate/accuracy/src/main.rs", "rank": 77, "score": 83070.44920094297 }, { "content": "fn main() -> Result<(), std::io::Error> {\n\n let opts = Opts::from_args();\n\n\n\n match opts {\n\n Opts::HfstToThfst { from } => convert_hfst_to_thfst(&from),\n\n Opts::ThfstsToBhfst {\n\n acceptor,\n\n errmodel,\n\n output,\n\n } => convert_thfsts_to_bhfst(&acceptor, &errmodel, &output),\n\n Opts::ZhfstToBhfst { from } => convert_zhfst_to_bhfst(&from),\n\n Opts::BhfstInfo { path } => {\n\n let ar: ThfstBoxSpellerArchive = BoxSpellerArchive::open(&path).unwrap();\n\n println!(\"{:#?}\", ar.metadata());\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "crate/thfst-tools/src/main.rs", "rank": 78, "score": 81594.95426821987 }, { "content": "pub trait Transducer<F: vfs::File>: Sized {\n\n const FILE_EXT: &'static str;\n\n\n\n fn from_path<P, FS>(fs: &FS, path: P) -> Result<Self, TransducerError>\n\n where\n\n P: AsRef<std::path::Path>,\n\n FS: Filesystem<File = F>;\n\n\n\n fn alphabet(&self) -> &TransducerAlphabet;\n\n fn mut_alphabet(&mut self) -> &mut TransducerAlphabet;\n\n\n\n fn transition_input_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber>;\n\n fn has_transitions(&self, i: TransitionTableIndex, s: Option<SymbolNumber>) -> bool;\n\n fn next(&self, i: TransitionTableIndex, symbol: SymbolNumber) -> Option<TransitionTableIndex>;\n\n fn has_epsilons_or_flags(&self, i: TransitionTableIndex) -> bool;\n\n fn take_epsilons_and_flags(&self, i: TransitionTableIndex) -> Option<SymbolTransition>;\n\n fn take_epsilons(&self, i: TransitionTableIndex) -> Option<SymbolTransition>;\n\n fn take_non_epsilons(\n\n &self,\n\n i: TransitionTableIndex,\n\n symbol: SymbolNumber,\n\n ) -> Option<SymbolTransition>;\n\n fn is_final(&self, i: TransitionTableIndex) -> bool;\n\n fn final_weight(&self, i: TransitionTableIndex) -> Option<Weight>;\n\n}\n\n\n", "file_path": "crate/divvunspell/src/transducer/mod.rs", "rank": 79, "score": 79188.79378216865 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub enum FlagDiacriticOperator {\n\n PositiveSet,\n\n NegativeSet,\n\n Require,\n\n Disallow,\n\n Clear,\n\n Unification,\n\n}\n\n\n\nimpl std::str::FromStr for FlagDiacriticOperator {\n\n type Err = ();\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"P\" => Ok(FlagDiacriticOperator::PositiveSet),\n\n \"N\" => Ok(FlagDiacriticOperator::NegativeSet),\n\n \"R\" => Ok(FlagDiacriticOperator::Require),\n", "file_path": "crate/divvunspell/src/types.rs", "rank": 80, "score": 75303.75483087686 }, { "content": "\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct FlagDiacriticOperation {\n\n pub operation: FlagDiacriticOperator,\n\n pub feature: SymbolNumber,\n\n pub value: ValueNumber,\n\n}\n\n\n\npub type SymbolNumber = u16;\n\npub type ValueNumber = i16;\n\npub type TransitionTableIndex = u32;\n\npub type Weight = f32;\n\npub type FlagDiacriticState = Vec<i16>;\n\npub type OperationsMap = hashbrown::HashMap<SymbolNumber, FlagDiacriticOperation>;\n", "file_path": "crate/divvunspell/src/types.rs", "rank": 81, "score": 75302.22940801702 }, { "content": " \"D\" => Ok(FlagDiacriticOperator::Disallow),\n\n \"C\" => Ok(FlagDiacriticOperator::Clear),\n\n \"U\" => Ok(FlagDiacriticOperator::Unification),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum HeaderFlag {\n\n Weighted,\n\n Deterministic,\n\n InputDeterministic,\n\n Minimized,\n\n Cyclic,\n\n HasEpsilonEpsilonTransitions,\n\n HasInputEpsilonTransitions,\n\n HasInputEpsilonCycles,\n\n HasUnweightedInputEpsilonCycles,\n\n}\n", "file_path": "crate/divvunspell/src/types.rs", "rank": 82, "score": 75290.12392027208 }, { "content": "fn convert_zhfst_to_bhfst(zhfst_path: &Path) -> Result<(), std::io::Error> {\n\n let zhfst_path = std::fs::canonicalize(zhfst_path)?;\n\n let zhfst = ZipSpellerArchive::open(&zhfst_path).unwrap();\n\n\n\n let dir = tempfile::tempdir()?;\n\n println!(\n\n \"Unzipping {:?} to temporary directory...\",\n\n zhfst_path.file_name().unwrap()\n\n );\n\n std::process::Command::new(\"unzip\")\n\n .current_dir(&dir)\n\n .args(&[&zhfst_path])\n\n .output()?;\n\n\n\n let bhfst_path = zhfst_path.with_extension(\"bhfst\");\n\n let mut boxfile: BoxFileWriter = BoxFileWriter::create_with_alignment(&bhfst_path, ALIGNMENT)?;\n\n\n\n let meta_json = match zhfst.metadata() {\n\n Some(metadata) => {\n\n println!(\"Converting \\\"index.xml\\\" to \\\"meta.json\\\"...\");\n", "file_path": "crate/thfst-tools/src/main.rs", "rank": 83, "score": 72619.51615250642 }, { "content": "fn convert_hfst_to_thfst(hfst_path: &Path) -> Result<(), std::io::Error> {\n\n let fs = divvunspell::vfs::Fs;\n\n let transducer = HfstTransducer::from_path(&fs, hfst_path).map_err(|e| e.into_io_error())?;\n\n println!(\n\n \"Converting {:?} to {:?}...\",\n\n &hfst_path.file_name().unwrap(),\n\n &hfst_path.with_extension(\"thfst\").file_name().unwrap()\n\n );\n\n\n\n thfst::ThfstTransducer::convert_file(&transducer, hfst_path)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crate/thfst-tools/src/main.rs", "rank": 84, "score": 72619.51615250642 }, { "content": "fn load_archive(path: &Path) -> Result<Box<dyn SpellerArchive>, SpellerArchiveError> {\n\n let ext = match path.extension() {\n\n Some(v) => v,\n\n None => {\n\n return Err(SpellerArchiveError::Io(\n\n path.to_string_lossy().to_string(),\n\n std::io::Error::new(\n\n std::io::ErrorKind::Other,\n\n \"Unsupported archive (missing .zhfst or .bhfst)\",\n\n )\n\n .into(),\n\n ))\n\n }\n\n };\n\n\n\n if ext == \"bhfst\" {\n\n let archive: ThfstBoxSpellerArchive = match BoxSpellerArchive::open(path) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n eprintln!(\"{:?}\", e);\n", "file_path": "crate/divvunspell-bin/src/main.rs", "rank": 85, "score": 69388.97448390236 }, { "content": " use super::*;\n\n\n\n use crate::transducer::TransducerError;\n\n use crate::transducer::TransitionTable;\n\n use crate::types::{SymbolNumber, TransitionTableIndex, Weight};\n\n use crate::vfs::{self, Filesystem};\n\n\n\n pub struct FileTransitionTable<F: vfs::File> {\n\n file: F,\n\n size: u32,\n\n }\n\n\n\n impl<F: vfs::File> FileTransitionTable<F> {\n\n #[inline(always)]\n\n fn read_u16_at(&self, index: u64) -> u16 {\n\n let mut buf = [0u8; 2];\n\n self.file\n\n .read_exact_at(&mut buf, index)\n\n .expect(\"failed to read u16\");\n\n u16::from_le_bytes(buf)\n", "file_path": "crate/divvunspell/src/transducer/thfst/transition_table.rs", "rank": 86, "score": 69365.90002567502 }, { "content": "use std::{mem, ptr};\n\n\n\nuse crate::transducer::TransducerError;\n\nuse crate::transducer::TransitionTable;\n\nuse crate::types::{SymbolNumber, TransitionTableIndex, Weight};\n\nuse crate::vfs::{self, Filesystem};\n\nuse memmap2::Mmap;\n\n\n\n#[derive(Debug)]\n\npub struct MemmapTransitionTable<F> {\n\n buf: Mmap,\n\n pub(crate) size: u32,\n\n _file: std::marker::PhantomData<F>,\n\n}\n\n\n\nconst TRANS_TABLE_SIZE: usize = 12;\n\n\n\nimpl<F: vfs::File> MemmapTransitionTable<F> {\n\n pub fn from_path_partial<P, FS>(\n\n fs: &FS,\n", "file_path": "crate/divvunspell/src/transducer/thfst/transition_table.rs", "rank": 87, "score": 69358.57858382529 }, { "content": " #[inline(always)]\n\n pub fn weight(&self, i: TransitionTableIndex) -> Option<Weight> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = self.offset\n\n + ((TRANS_TABLE_SIZE * i as usize)\n\n + (2 * mem::size_of::<SymbolNumber>())\n\n + mem::size_of::<TransitionTableIndex>());\n\n\n\n let x: Weight = if cfg!(all(target_arch = \"arm\", target_pointer_width = \"32\")) {\n\n let mut cursor = self.make_cursor();\n\n cursor.set_position(index as u64);\n\n cursor.read_f32::<LittleEndian>().unwrap()\n\n } else {\n\n unsafe { ptr::read(self.mmap.as_ptr().add(index) as *const _) }\n\n };\n\n Some(x)\n\n }\n", "file_path": "crate/divvunspell/src/transducer/hfst/transition_table.rs", "rank": 88, "score": 69358.51736031467 }, { "content": " fn read_symbol_from_cursor(&self, index: usize) -> Option<SymbolNumber> {\n\n let index = self.offset + index;\n\n let x: SymbolNumber = if cfg!(all(target_arch = \"arm\", target_pointer_width = \"32\")) {\n\n let mut cursor = self.make_cursor();\n\n cursor.set_position(index as u64);\n\n cursor.read_u16::<LittleEndian>().unwrap()\n\n } else {\n\n unsafe { ptr::read(self.mmap.as_ptr().add(index) as *const _) }\n\n };\n\n if x == u16::MAX {\n\n None\n\n } else {\n\n Some(x)\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n pub fn input_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber> {\n\n if i >= self.size {\n\n return None;\n", "file_path": "crate/divvunspell/src/transducer/hfst/transition_table.rs", "rank": 89, "score": 69357.84698105654 }, { "content": "\n\nimpl fmt::Debug for MappedTransitionTable {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Transition table index: {}\", self.size)?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl MappedTransitionTable {\n\n #[inline(always)]\n\n pub fn new(mmap: Arc<Mmap>, offset: usize, size: u32) -> MappedTransitionTable {\n\n MappedTransitionTable { size, mmap, offset }\n\n }\n\n\n\n #[inline(always)]\n\n fn make_cursor(&self) -> Cursor<&[u8]> {\n\n Cursor::new(&self.mmap)\n\n }\n\n\n\n #[inline(always)]\n", "file_path": "crate/divvunspell/src/transducer/hfst/transition_table.rs", "rank": 90, "score": 69356.13082629698 }, { "content": " }\n\n\n\n let index = TRANS_TABLE_SIZE as usize * i as usize;\n\n self.read_symbol_from_cursor(index)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn output_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = ((TRANS_TABLE_SIZE * i as usize) + mem::size_of::<SymbolNumber>()) as usize;\n\n self.read_symbol_from_cursor(index)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn target(&self, i: TransitionTableIndex) -> Option<TransitionTableIndex> {\n\n if i >= self.size {\n\n return None;\n", "file_path": "crate/divvunspell/src/transducer/hfst/transition_table.rs", "rank": 91, "score": 69356.06954013479 }, { "content": " }\n\n\n\n #[inline(always)]\n\n fn read_u32_at(&self, index: u64) -> u32 {\n\n let mut buf = [0u8; 4];\n\n self.file\n\n .read_exact_at(&mut buf, index)\n\n .expect(\"failed to read u32\");\n\n u32::from_le_bytes(buf)\n\n }\n\n }\n\n\n\n impl<F: vfs::File> TransitionTable<F> for FileTransitionTable<F> {\n\n fn from_path<P, FS>(fs: &FS, path: P) -> Result<Self, TransducerError>\n\n where\n\n P: AsRef<std::path::Path>,\n\n FS: Filesystem<File = F>,\n\n {\n\n let file = fs.open_file(path).map_err(TransducerError::Io)?;\n\n Ok(FileTransitionTable {\n", "file_path": "crate/divvunspell/src/transducer/thfst/transition_table.rs", "rank": 92, "score": 69355.41613008117 }, { "content": " #[inline(always)]\n\n fn output_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = ((TRANS_TABLE_SIZE * i as usize) + mem::size_of::<SymbolNumber>()) as usize;\n\n let x = self.read_u16_at(index as u64);\n\n if x == std::u16::MAX {\n\n None\n\n } else {\n\n Some(x)\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn target(&self, i: TransitionTableIndex) -> Option<TransitionTableIndex> {\n\n if i >= self.size {\n\n return None;\n\n }\n", "file_path": "crate/divvunspell/src/transducer/thfst/transition_table.rs", "rank": 93, "score": 69355.25716066033 }, { "content": "// We manually ensure alignment of reads in this file.\n\n#![allow(clippy::cast_ptr_alignment)]\n\n\n\nuse byteorder::{LittleEndian, ReadBytesExt};\n\nuse memmap2::Mmap;\n\nuse std::fmt;\n\nuse std::io::Cursor;\n\nuse std::ptr;\n\nuse std::sync::Arc;\n\nuse std::{mem, u16, u32};\n\n\n\nuse crate::constants::TRANS_TABLE_SIZE;\n\nuse crate::transducer::symbol_transition::SymbolTransition;\n\nuse crate::types::{SymbolNumber, TransitionTableIndex, Weight};\n\n\n\npub struct MappedTransitionTable {\n\n pub(crate) size: TransitionTableIndex,\n\n pub(crate) mmap: Arc<Mmap>,\n\n pub(crate) offset: usize,\n\n}\n", "file_path": "crate/divvunspell/src/transducer/hfst/transition_table.rs", "rank": 94, "score": 69354.02723990688 }, { "content": "\n\n let index = (TRANS_TABLE_SIZE * i as usize) + (2 * mem::size_of::<SymbolNumber>());\n\n\n\n let x = self.read_u32_at(index as u64);\n\n if x == std::u32::MAX {\n\n None\n\n } else {\n\n Some(x)\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn weight(&self, i: TransitionTableIndex) -> Option<Weight> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = (TRANS_TABLE_SIZE * i as usize)\n\n + (2 * mem::size_of::<SymbolNumber>())\n\n + mem::size_of::<TransitionTableIndex>();\n", "file_path": "crate/divvunspell/src/transducer/thfst/transition_table.rs", "rank": 95, "score": 69353.8944500896 }, { "content": " }\n\n\n\n let index =\n\n self.offset + ((TRANS_TABLE_SIZE * i as usize) + (2 * mem::size_of::<SymbolNumber>()));\n\n\n\n let x: TransitionTableIndex = if cfg!(all(target_arch = \"arm\", target_pointer_width = \"32\"))\n\n {\n\n let mut cursor = self.make_cursor();\n\n cursor.set_position(index as u64);\n\n cursor.read_u32::<LittleEndian>().unwrap()\n\n } else {\n\n unsafe { ptr::read(self.mmap.as_ptr().add(index) as *const _) }\n\n };\n\n if x == u32::MAX {\n\n None\n\n } else {\n\n Some(x)\n\n }\n\n }\n\n\n", "file_path": "crate/divvunspell/src/transducer/hfst/transition_table.rs", "rank": 96, "score": 69353.72762404663 }, { "content": " size: file.len().map_err(TransducerError::Io)? as u32,\n\n file,\n\n })\n\n }\n\n\n\n #[inline(always)]\n\n fn input_symbol(&self, i: TransitionTableIndex) -> Option<SymbolNumber> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = TRANS_TABLE_SIZE as usize * i as usize;\n\n let x = self.read_u16_at(index as u64);\n\n if x == std::u16::MAX {\n\n None\n\n } else {\n\n Some(x)\n\n }\n\n }\n\n\n", "file_path": "crate/divvunspell/src/transducer/thfst/transition_table.rs", "rank": 97, "score": 69353.34488636981 }, { "content": " return None;\n\n }\n\n\n\n let index = ((TRANS_TABLE_SIZE * i as usize) + mem::size_of::<SymbolNumber>()) as usize;\n\n self.read_symbol_from_cursor(index)\n\n }\n\n\n\n fn target(&self, i: TransitionTableIndex) -> Option<TransitionTableIndex> {\n\n if i >= self.size {\n\n return None;\n\n }\n\n\n\n let index = (TRANS_TABLE_SIZE * i as usize) + (2 * mem::size_of::<SymbolNumber>());\n\n\n\n let x: TransitionTableIndex =\n\n unsafe { ptr::read(self.buf.as_ptr().add(index) as *const _) };\n\n if x == std::u32::MAX {\n\n None\n\n } else {\n\n Some(x)\n", "file_path": "crate/divvunspell/src/transducer/thfst/transition_table.rs", "rank": 98, "score": 69351.58316074846 }, { "content": "\n\n #[inline(always)]\n\n pub fn is_final(&self, i: TransitionTableIndex) -> bool {\n\n self.input_symbol(i) == None && self.output_symbol(i) == None && self.target(i) == Some(1)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn symbol_transition(&self, i: TransitionTableIndex) -> SymbolTransition {\n\n SymbolTransition::new(self.target(i), self.output_symbol(i), self.weight(i))\n\n }\n\n}\n", "file_path": "crate/divvunspell/src/transducer/hfst/transition_table.rs", "rank": 99, "score": 69350.86185571605 } ]
Rust
native/src/lib.rs
zhangxh1023/node-image-search
2a66956f1fbe295208c6bc1a49b008d02860d4c6
extern crate neon; use neon::prelude::*; mod image; mod utils; struct ImageSearchTask { parent_image_path: String, child_image_path: String, out: String, result_level: u32, } impl Task for ImageSearchTask { type Output = Vec<Vec<image::ResultPoint>>; type Error = String; type JsEvent = JsArray; fn perform(&self) -> Result<Self::Output, Self::Error> { let parent_image = image::Image::new(self.parent_image_path.clone()); let child_image = image::Image::new(self.child_image_path.clone()); let result = parent_image.search_child_image_point_from_parent_image(&child_image, self.result_level); if &self.out != "" { parent_image.mark_child_image_border_with_new_image(&child_image, &self.out, &result); } Ok(result) } fn complete( self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>, ) -> JsResult<Self::JsEvent> { let result = result.unwrap(); let result_array = JsArray::new(&mut cx, result.len() as u32); for (i, v) in result.iter().enumerate() { let temp_array = JsArray::new(&mut cx, v.len() as u32); for (index, object) in v.iter().enumerate() { let result_object = JsObject::new(&mut cx); let x = object.x; let y = object.y; let x = cx.number(x); let y = cx.number(y); let hash_string = cx.string(object.hash_string.clone()); let hamming_distance = cx.number(object.hamming_distance); result_object.set(&mut cx, "x", x).unwrap(); result_object.set(&mut cx, "y", y).unwrap(); result_object .set(&mut cx, "hash_string", hash_string) .unwrap(); result_object .set(&mut cx, "hamming_distance", hamming_distance) .unwrap(); temp_array .set(&mut cx, index as u32, result_object) .unwrap(); } result_array.set(&mut cx, i as u32, temp_array).unwrap(); } Ok(result_array) } } fn image_search(mut cx: FunctionContext) -> JsResult<JsUndefined> { let parent_image_path = cx.argument::<JsString>(0)?.value(); let child_image_path = cx.argument::<JsString>(1)?.value(); let options = cx.argument::<JsObject>(2)?; let out = options .get(&mut cx, "out")? .downcast::<JsString>() .or_throw(&mut cx)? .value(); let result_level = options .get(&mut cx, "result_level")? .downcast::<JsNumber>() .or_throw(&mut cx)? .value(); let f = cx.argument::<JsFunction>(3)?; let image_search_task = ImageSearchTask { parent_image_path, child_image_path, out, result_level: result_level as u32, }; image_search_task.schedule(f); Ok(cx.undefined()) } struct GetDHashTask { image_path: String, } impl Task for GetDHashTask { type Output = String; type Error = String; type JsEvent = JsString; fn perform(&self) -> Result<Self::Output, Self::Error> { let image = image::Image::new(self.image_path.clone()); let result = image.get_d_hash(); Ok(result) } fn complete( self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>, ) -> JsResult<Self::JsEvent> { let result = result.unwrap(); Ok(cx.string(result)) } } fn get_d_hash(mut cx: FunctionContext) -> JsResult<JsUndefined> { let image_path = cx.argument::<JsString>(0)?.value(); let f = cx.argument::<JsFunction>(1)?; let get_d_hash_task = GetDHashTask { image_path }; get_d_hash_task.schedule(f); Ok(cx.undefined()) } fn get_hamming_distance_by_hex_hash(mut cx: FunctionContext) -> JsResult<JsNumber> { let hash_1 = cx.argument::<JsString>(0)?.value(); let hash_2 = cx.argument::<JsString>(1)?.value(); let result = utils::get_hamming_distance_by_hex_hash(&hash_1, &hash_2); Ok(cx.number(result)) } register_module!(mut m, { m.export_function("image_search", image_search)?; m.export_function("get_d_hash", get_d_hash)?; m.export_function( "get_hamming_distance_by_hex_hash", get_hamming_distance_by_hex_hash, )?; Ok(()) });
extern crate neon; use neon::prelude::*; mod image; mod utils; struct ImageSearchTask { parent_image_path: String, child_image_path: String, out: String, result_level: u32, } impl Task for ImageSearchTask { type Output = Vec<Vec<image::ResultPoint>>; type Error = String; type JsEvent = JsArray; fn perform(&self) -> Result<Self::Output, Self::Error> { let parent_image = image::Image::new(self.parent_image_path.clone()); let child_image = image::Image::new(self.child_image_path.clone()); let result = parent_image.search_child_image_point_from_parent_image(&child_image, self.result_level); if &self.out != "" { parent_image.mark_child_image_border_with_new_image(&child_image, &self.out, &result); } Ok(result) } fn complete( self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>, ) -> JsResult<Self::JsEvent> { let result = result.unwrap(); let result_array = JsArray::new(&mut cx, result.len() as u32); for (i, v) in result.iter().enumerate() { let temp_array = JsArray::new(&mut cx, v.len() as u32); for (index, object) in v.iter().enumerate() { let result_object = JsObject::new(&mut cx); let x = object.x; let y = object.y; let x = cx.number(x); let y = cx.number(y); let hash_string = cx.string(object.hash_string.clone()); let hamming_distance = cx.number(object.hamming_distance); result_object.set(&mut cx, "x", x).unwrap(); result_object.set(&mut cx, "y", y).unwrap(); result_object .set(&mut cx, "hash_string", hash_string) .unwrap(); result_object .set(&mut cx, "hamming_distance", hamming_distance) .unwrap(); temp_array .set(&mut cx, index as u32, result_object) .unwrap(); } result_array.set(&mut cx, i as u32, temp_array).unwrap(); } Ok(result_array) } }
struct GetDHashTask { image_path: String, } impl Task for GetDHashTask { type Output = String; type Error = String; type JsEvent = JsString; fn perform(&self) -> Result<Self::Output, Self::Error> { let image = image::Image::new(self.image_path.clone()); let result = image.get_d_hash(); Ok(result) } fn complete( self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>, ) -> JsResult<Self::JsEvent> { let result = result.unwrap(); Ok(cx.string(result)) } } fn get_d_hash(mut cx: FunctionContext) -> JsResult<JsUndefined> { let image_path = cx.argument::<JsString>(0)?.value(); let f = cx.argument::<JsFunction>(1)?; let get_d_hash_task = GetDHashTask { image_path }; get_d_hash_task.schedule(f); Ok(cx.undefined()) } fn get_hamming_distance_by_hex_hash(mut cx: FunctionContext) -> JsResult<JsNumber> { let hash_1 = cx.argument::<JsString>(0)?.value(); let hash_2 = cx.argument::<JsString>(1)?.value(); let result = utils::get_hamming_distance_by_hex_hash(&hash_1, &hash_2); Ok(cx.number(result)) } register_module!(mut m, { m.export_function("image_search", image_search)?; m.export_function("get_d_hash", get_d_hash)?; m.export_function( "get_hamming_distance_by_hex_hash", get_hamming_distance_by_hex_hash, )?; Ok(()) });
fn image_search(mut cx: FunctionContext) -> JsResult<JsUndefined> { let parent_image_path = cx.argument::<JsString>(0)?.value(); let child_image_path = cx.argument::<JsString>(1)?.value(); let options = cx.argument::<JsObject>(2)?; let out = options .get(&mut cx, "out")? .downcast::<JsString>() .or_throw(&mut cx)? .value(); let result_level = options .get(&mut cx, "result_level")? .downcast::<JsNumber>() .or_throw(&mut cx)? .value(); let f = cx.argument::<JsFunction>(3)?; let image_search_task = ImageSearchTask { parent_image_path, child_image_path, out, result_level: result_level as u32, }; image_search_task.schedule(f); Ok(cx.undefined()) }
function_block-full_function
[ { "content": "pub fn convert_to_binary_from_hex(hex: &str) -> String {\n\n hex.chars().map(to_binary).collect()\n\n}\n\n\n", "file_path": "native/src/utils.rs", "rank": 4, "score": 67880.1477159762 }, { "content": "pub fn get_hamming_distance_by_hex_hash(hash_1: &str, hash_2: &str) -> u32 {\n\n let mut binary_hash_1 = convert_to_binary_from_hex(hash_1);\n\n let mut binary_hash_2 = convert_to_binary_from_hex(hash_2);\n\n while binary_hash_1.len() < 64 {\n\n binary_hash_1 = String::from(\"0\") + &binary_hash_1;\n\n }\n\n while binary_hash_2.len() < 64 {\n\n binary_hash_2 = String::from(\"0\") + &binary_hash_2;\n\n }\n\n\n\n let mut hamming_distance = 0_u32;\n\n for index in 0..64 {\n\n if binary_hash_1.as_bytes()[index] != binary_hash_2.as_bytes()[index] {\n\n hamming_distance += 1;\n\n }\n\n }\n\n return hamming_distance;\n\n}\n", "file_path": "native/src/utils.rs", "rank": 5, "score": 59038.295887006134 }, { "content": "pub fn to_binary(c: char) -> &'static str {\n\n match c {\n\n '0' => \"0000\",\n\n '1' => \"0001\",\n\n '2' => \"0010\",\n\n '3' => \"0011\",\n\n '4' => \"0100\",\n\n '5' => \"0101\",\n\n '6' => \"0110\",\n\n '7' => \"0111\",\n\n '8' => \"1000\",\n\n '9' => \"1001\",\n\n 'A' => \"1010\",\n\n 'B' => \"1011\",\n\n 'C' => \"1100\",\n\n 'D' => \"1101\",\n\n 'E' => \"1110\",\n\n 'F' => \"1111\",\n\n _ => \"\",\n\n }\n\n}\n\n\n", "file_path": "native/src/utils.rs", "rank": 7, "score": 42550.274740217756 }, { "content": "fn main() {\n\n neon_build::setup(); // must be called in build.rs\n\n\n\n // add project-specific build logic here...\n\n}\n", "file_path": "native/build.rs", "rank": 8, "score": 34231.075236284436 }, { "content": "fn main() {\n\n let main_image = image::Image::new(String::from(\"../examples/img/2/big.jpg\"));\n\n let min_image = image::Image::new(String::from(\"../examples/img/2/small.png\"));\n\n\n\n let result = main_image.search_child_image_point_from_parent_image(&min_image, 1);\n\n main_image.mark_child_image_border_with_new_image(&min_image, \"./temp.png\", &result);\n\n}\n", "file_path": "native/src/main.rs", "rank": 9, "score": 33314.87979049384 }, { "content": "const path = require('path');\n", "file_path": "examples/index.js", "rank": 10, "score": 30381.354701662087 }, { "content": "const addon = require('../native');\n", "file_path": "lib/index.js", "rank": 11, "score": 30381.354701662087 }, { "content": "const addon = require('..');\n", "file_path": "examples/index.js", "rank": 12, "score": 30381.354701662087 }, { "content": "let hamming_distance = addon.get_hamming_distance_by_hex_hash('3731316430182B65', '3631314430105A64');\n", "file_path": "examples/index.js", "rank": 13, "score": 29290.074658353093 }, { "content": "const addon = require('../native');\n\n\n\n/**\n\n *\n\n * @callback imageSearchCallback\n\n * @param {Error} error\n\n * @param {Any} res\n\n * @returns {void}\n\n */\n\n/**\n\n * 在 parent 图片中, 寻找相似 child 图片的坐标\n\n * \n\n * @param {String} parent_image_path parent image path\n\n * @param {String} child_image_path child image path\n\n * @param {Object} options options\n\n * @param {String} [options.out=''] save as a new image with child image border\n\n * @param {Number} [options.result_level=1]\n\n * @param {imageSearchCallback} cb callback function\n\n */\n\nfunction image_search(parent_image_path, child_image_path, options, cb) {\n\n if (typeof parent_image_path !== 'string' || typeof child_image_path !== 'string') {\n\n throw new Error('The image path must be a string!');\n\n }\n\n if (!cb) {\n\n cb = options;\n\n options = {};\n\n }\n\n if (typeof options != 'object') {\n\n throw new Error('The options must be a object!');\n\n }\n\n if (typeof cb !== 'function') {\n\n throw new Error('The callback must be a function!');\n\n }\n\n if (typeof options.result_level !== 'number' || options.result_level < 1) {\n\n options.result_level = 1;\n\n }\n\n options.out = options.out || '';\n\n\n\n return addon.image_search(parent_image_path, child_image_path, options, cb);\n\n}\n\n\n\n/**\n\n * \n\n * @callback getDHashCallback\n\n * @param {Error} error error\n\n * @param {String} res dHash String\n\n * @returns {void}\n\n */\n\n\n\n/**\n\n * 获取图片的差异值哈希\n\n * \n\n * @param {String} image_path image path\n\n * @param {getDHashCallback} cb callback function\n\n */\n\nfunction get_d_hash(image_path, cb) {\n\n if (typeof image_path !== 'string') {\n\n throw new Error('The image path must be a string!');\n\n }\n\n if (typeof cb !== 'function') {\n\n throw new Error('The callback must be a function!');\n\n }\n\n return addon.get_d_hash(image_path, cb);\n\n}\n\n\n\n/**\n\n * 通过两个64位十六进制字符串哈希计算汉明距离\n\n * \n\n * @param {String} hash_1 hex hash\n\n * @param {String} hash_2 hex hash\n\n * \n\n * @returns {Number}\n\n */\n\nfunction get_hamming_distance_by_hex_hash(hash_1, hash_2) {\n\n if (typeof hash_1 !== 'string' || typeof hash_2 !== 'string') {\n\n throw new Error('The param must be hex string!');\n\n }\n\n return addon.get_hamming_distance_by_hex_hash(hash_1, hash_2);\n\n}\n\n\n\nmodule.exports = {\n\n image_search,\n\n get_d_hash,\n\n get_hamming_distance_by_hex_hash\n\n}\n", "file_path": "lib/index.js", "rank": 14, "score": 24096.81564549222 }, { "content": "const addon = require('..');\n\nconst path = require('path');\n\n\n\nconsole.time('image_search');\n\naddon.image_search(path.join(__dirname, './img/4/big.png'),\n\n path.join(__dirname, './img/4/small.png'),\n\n {\n\n out: path.join(__dirname, './temp.jpeg'),\n\n result_level: 2,\n\n },\n\n (err, res) => {\n\n console.log('image_search', err, res);\n\n console.timeEnd('image_search');\n\n });\n\n\n\nconsole.time('get_d_hash');\n\naddon.get_d_hash(path.join(__dirname, './img/1/small.png'),\n\n (err, res) => {\n\n console.log('get_d_hash', err, res);\n\n console.timeEnd('get_d_hash');\n\n });\n\n\n\nconsole.time('get_hamming_distance_by_hex_hash');\n\nlet hamming_distance = addon.get_hamming_distance_by_hex_hash('3731316430182B65', '3631314430105A64');\n\nconsole.log('hamming_distance', hamming_distance);\n\nconsole.timeEnd('get_hamming_distance_by_hex_hash');\n\n\n", "file_path": "examples/index.js", "rank": 15, "score": 24096.81564549222 }, { "content": "extern crate image;\n\n\n\nuse self::image::{imageops, GenericImageView, ImageBuffer, Pixel};\n\nuse std::path::Path;\n\n\n\nuse crate::utils;\n\n\n\npub struct ResultPoint {\n\n pub x: u32,\n\n pub y: u32,\n\n pub hash_string: String,\n\n pub hamming_distance: u32,\n\n}\n\n\n\npub struct Image {\n\n pub path: String,\n\n pub image: image::DynamicImage,\n\n}\n\n\n\nimpl Image {\n", "file_path": "native/src/image.rs", "rank": 16, "score": 16080.652286769444 }, { "content": " pub fn new(path: String) -> Self {\n\n let image = match image::open(&Path::new(&path)) {\n\n Ok(img) => img,\n\n Err(e) => panic!(e),\n\n };\n\n let mut path_slice: Vec<&str> = path.split(\".\").collect();\n\n let image_type = path_slice.pop().unwrap();\n\n if image_type != \"jpeg\" && image_type != \"jpg\" && image_type != \"png\" {\n\n panic!(\"Unexpected image type!\");\n\n }\n\n Image { path, image }\n\n }\n\n\n\n fn get_size(&self) -> (u32, u32) {\n\n let width = self.image.width();\n\n let height = self.image.height();\n\n return (width, height);\n\n }\n\n\n\n pub fn get_d_hash(&self) -> String {\n", "file_path": "native/src/image.rs", "rank": 17, "score": 16076.880289840294 }, { "content": " difference.push(1);\n\n } else {\n\n difference.push(0);\n\n }\n\n }\n\n }\n\n let mut decimal_value: i32 = 0;\n\n let mut img_hash_string = String::new();\n\n for (index, value) in difference.iter().enumerate() {\n\n if *value == 1 {\n\n decimal_value += *value as i32 * (2_i32.pow(index as u32 % 8)) as i32;\n\n }\n\n if index as u32 % 8 == 7 {\n\n let hex_str = format!(\"{:X}\", decimal_value);\n\n let mut hash = hex_str.to_string();\n\n while hash.len() < 2 {\n\n hash = String::from(\"0\") + &hash;\n\n }\n\n img_hash_string = format!(\"{}{}\", img_hash_string, hash);\n\n decimal_value = 0;\n", "file_path": "native/src/image.rs", "rank": 18, "score": 16075.977105563765 }, { "content": " }\n\n let temp_image = Image {\n\n path: String::new(),\n\n image: image::DynamicImage::ImageRgb8(temp_image),\n\n };\n\n let temp_image_d_hash = temp_image.get_d_hash();\n\n let hamming_distance =\n\n utils::get_hamming_distance_by_hex_hash(&temp_image_d_hash, &child_image_d_hash);\n\n\n\n let result_point = ResultPoint {\n\n x: width,\n\n y: height,\n\n hamming_distance,\n\n hash_string: temp_image_d_hash,\n\n };\n\n self.try_to_push_result_point(\n\n &mut min_hamming_distance_for_point,\n\n result_level as usize,\n\n result_point,\n\n );\n", "file_path": "native/src/image.rs", "rank": 19, "score": 16075.150123408777 }, { "content": " }\n\n }\n\n return min_hamming_distance_for_point;\n\n }\n\n\n\n pub fn mark_child_image_border_with_new_image(\n\n &self,\n\n child_image: &Image,\n\n path: &str,\n\n point: &Vec<Vec<ResultPoint>>,\n\n ) {\n\n let new_image = self.image.clone();\n\n let mut image_type: Vec<&str> = path.split(\".\").collect();\n\n let image_type = image_type.pop().unwrap();\n\n match new_image {\n\n image::DynamicImage::ImageRgb8(mut img) => {\n\n let (child_image_width, child_image_height) = child_image.get_size();\n\n let (parent_image_width, parent_image_height) = self.get_size();\n\n\n\n for v in point {\n", "file_path": "native/src/image.rs", "rank": 20, "score": 16074.770102719698 }, { "content": " }\n\n if image_type == \"png\" {\n\n img\n\n .save_with_format(Path::new(path), image::ImageFormat::Png)\n\n .expect(\"save image error\");\n\n } else {\n\n img\n\n .save_with_format(Path::new(path), image::ImageFormat::Jpeg)\n\n .expect(\"save image error\");\n\n }\n\n }\n\n image::DynamicImage::ImageRgba8(mut img) => {\n\n let (child_image_width, child_image_height) = child_image.get_size();\n\n let (parent_image_width, parent_image_height) = self.get_size();\n\n\n\n for v in point {\n\n for p in v {\n\n let ResultPoint {\n\n x: start_x,\n\n y: start_y,\n", "file_path": "native/src/image.rs", "rank": 21, "score": 16074.286735589576 }, { "content": " result_point: ResultPoint,\n\n ) {\n\n if result_point_vec.len() < max_length {\n\n let mut exist_same_hamming_distance_index: i32 = -1;\n\n for (index, item) in result_point_vec.iter().enumerate() {\n\n if item[0].hamming_distance == result_point.hamming_distance {\n\n exist_same_hamming_distance_index = index as i32;\n\n break;\n\n }\n\n }\n\n if exist_same_hamming_distance_index > -1 {\n\n result_point_vec[exist_same_hamming_distance_index as usize].push(result_point);\n\n } else {\n\n let new_vec = vec![result_point];\n\n result_point_vec.push(new_vec);\n\n }\n\n } else {\n\n let last = &result_point_vec[result_point_vec.len() - 1];\n\n if last[0].hamming_distance >= result_point.hamming_distance {\n\n let mut exist_same_hamming_distance_index: i32 = -1;\n", "file_path": "native/src/image.rs", "rank": 22, "score": 16074.08306073556 }, { "content": " for (index, item) in result_point_vec.iter().enumerate() {\n\n if item[0].hamming_distance == result_point.hamming_distance {\n\n exist_same_hamming_distance_index = index as i32;\n\n break;\n\n }\n\n }\n\n if exist_same_hamming_distance_index > -1 {\n\n result_point_vec[exist_same_hamming_distance_index as usize].push(result_point);\n\n } else {\n\n result_point_vec.pop();\n\n let new_vec = vec![result_point];\n\n result_point_vec.push(new_vec);\n\n }\n\n }\n\n }\n\n\n\n self.sort_result_point_vector(result_point_vec);\n\n }\n\n\n\n pub fn search_child_image_point_from_parent_image(\n", "file_path": "native/src/image.rs", "rank": 23, "score": 16073.919874169927 }, { "content": " }\n\n }\n\n\n\n return img_hash_string;\n\n }\n\n\n\n pub fn sort_result_point_vector(&self, result_point_vec: &mut Vec<Vec<ResultPoint>>) {\n\n result_point_vec.sort_by(|a, b| {\n\n let a = &a[0];\n\n let b = &b[0];\n\n let a = a.hamming_distance;\n\n let b = b.hamming_distance;\n\n a.cmp(&b)\n\n });\n\n }\n\n\n\n pub fn try_to_push_result_point(\n\n &self,\n\n result_point_vec: &mut Vec<Vec<ResultPoint>>,\n\n max_length: usize,\n", "file_path": "native/src/image.rs", "rank": 24, "score": 16073.583236210688 }, { "content": " &self,\n\n child_image: &Image,\n\n result_level: u32,\n\n ) -> Vec<Vec<ResultPoint>> {\n\n let child_image_d_hash = child_image.get_d_hash();\n\n let mut min_hamming_distance_for_point: Vec<Vec<ResultPoint>> = vec![];\n\n let (child_image_width, child_image_height) = child_image.get_size();\n\n let iterate_width = self.image.width() - child_image_width;\n\n let iterate_height = self.image.height() - child_image_height;\n\n\n\n for width in 0..iterate_width {\n\n for height in 0..iterate_height {\n\n let mut temp_image: image::ImageBuffer<image::Rgb<u8>, std::vec::Vec<u8>> =\n\n ImageBuffer::new(child_image_width, child_image_height);\n\n for x in width..width + child_image_width {\n\n for y in height..height + child_image_height {\n\n let p = self.image.get_pixel(x, y);\n\n let p = p.to_rgb();\n\n temp_image.put_pixel(x - width, y - height, p);\n\n }\n", "file_path": "native/src/image.rs", "rank": 25, "score": 16073.331595701353 }, { "content": " let point_y = y + start_y;\n\n let point_x = start_x + child_image_width;\n\n if point_x >= parent_image_width {\n\n break;\n\n }\n\n if point_y < parent_image_height - 1 {\n\n let pixel = self.image.get_pixel(point_x, point_y);\n\n let pixel = [255 - pixel[0], 255 - pixel[1], 255 - pixel[2], pixel[3]];\n\n img.put_pixel(point_x as u32, point_y, image::Rgba(pixel));\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n if image_type == \"png\" {\n\n img\n\n .save_with_format(Path::new(path), image::ImageFormat::Png)\n\n .expect(\"save image error\");\n\n } else {\n", "file_path": "native/src/image.rs", "rank": 26, "score": 16073.244745435893 }, { "content": " for p in v {\n\n let ResultPoint {\n\n x: start_x,\n\n y: start_y,\n\n hash_string: _,\n\n hamming_distance: _,\n\n } = p;\n\n\n\n for x in 0..child_image_width {\n\n let point_x = x + start_x;\n\n if point_x < parent_image_width - 1 {\n\n let pixel = self.image.get_pixel(point_x, *start_y);\n\n let pixel = [255 - pixel[0], 255 - pixel[1], 255 - pixel[2]];\n\n img.put_pixel(point_x, *start_y, image::Rgb(pixel));\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n for y in 0..child_image_height {\n", "file_path": "native/src/image.rs", "rank": 27, "score": 16072.588283535328 }, { "content": " img\n\n .save_with_format(Path::new(path), image::ImageFormat::Jpeg)\n\n .expect(\"save image error\");\n\n }\n\n }\n\n _ => (),\n\n };\n\n }\n\n}\n", "file_path": "native/src/image.rs", "rank": 28, "score": 16072.54702963168 }, { "content": " let resize_width = 9;\n\n let resize_height = 8;\n\n // resize\n\n let resized_img =\n\n self\n\n .image\n\n .resize_exact(resize_width, resize_height, imageops::FilterType::Nearest);\n\n // 灰度化\n\n let resized_img = imageops::colorops::grayscale(&resized_img);\n\n // calculate difference\n\n let mut difference: Vec<u8> = vec![];\n\n for height in 0..resize_height {\n\n for width in 0..(resize_width - 1) {\n\n let v_before = match resized_img.get_pixel(width, height) {\n\n &image::Luma(v) => v,\n\n };\n\n let v_later = match resized_img.get_pixel(width + 1, height) {\n\n &image::Luma(v) => v,\n\n };\n\n if v_before > v_later {\n", "file_path": "native/src/image.rs", "rank": 29, "score": 16071.789060260104 }, { "content": " } else {\n\n break;\n\n }\n\n }\n\n\n\n for y in 0..child_image_height {\n\n let point_y = y + start_y;\n\n let point_x = start_x + child_image_width;\n\n if point_x >= parent_image_width {\n\n break;\n\n }\n\n if point_y < parent_image_height - 1 {\n\n let pixel = self.image.get_pixel(point_x, point_y);\n\n let pixel = [255 - pixel[0], 255 - pixel[1], 255 - pixel[2]];\n\n img.put_pixel(point_x as u32, point_y, image::Rgb(pixel));\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n", "file_path": "native/src/image.rs", "rank": 30, "score": 16071.776625414885 }, { "content": " img.put_pixel(*start_x, point_y, image::Rgba(pixel));\n\n }\n\n }\n\n\n\n for x in 0..child_image_width {\n\n let point_x = x + start_x;\n\n let point_y = start_y + child_image_height;\n\n if point_y >= parent_image_height {\n\n break;\n\n }\n\n if point_x < parent_image_width - 1 {\n\n let pixel = self.image.get_pixel(point_x, point_y);\n\n let pixel = [255 - pixel[0], 255 - pixel[1], 255 - pixel[2], pixel[3]];\n\n img.put_pixel(point_x, point_y as u32, image::Rgba(pixel));\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n for y in 0..child_image_height {\n", "file_path": "native/src/image.rs", "rank": 31, "score": 16071.685615985649 }, { "content": " hash_string: _,\n\n hamming_distance: _,\n\n } = p;\n\n\n\n for x in 0..child_image_width {\n\n let point_x = x + start_x;\n\n if point_x < parent_image_width - 1 {\n\n let pixel = self.image.get_pixel(point_x, *start_y);\n\n let pixel = [255 - pixel[0], 255 - pixel[1], 255 - pixel[2], pixel[3]];\n\n img.put_pixel(point_x, *start_y, image::Rgba(pixel));\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n for y in 0..child_image_height {\n\n let point_y = y + start_y;\n\n if point_y < parent_image_height - 1 {\n\n let pixel = self.image.get_pixel(*start_x, point_y);\n\n let pixel = [255 - pixel[0], 255 - pixel[1], 255 - pixel[2], pixel[3]];\n", "file_path": "native/src/image.rs", "rank": 32, "score": 16071.60708005731 }, { "content": " let point_y = y + start_y;\n\n if point_y < parent_image_height - 1 {\n\n let pixel = self.image.get_pixel(*start_x, point_y);\n\n let pixel = [255 - pixel[0], 255 - pixel[1], 255 - pixel[2]];\n\n img.put_pixel(*start_x, point_y, image::Rgb(pixel));\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n for x in 0..child_image_width {\n\n let point_x = x + start_x;\n\n let point_y = start_y + child_image_height;\n\n if point_y >= parent_image_height {\n\n break;\n\n }\n\n if point_x < parent_image_width - 1 {\n\n let pixel = self.image.get_pixel(point_x, point_y);\n\n let pixel = [255 - pixel[0], 255 - pixel[1], 255 - pixel[2]];\n\n img.put_pixel(point_x, point_y as u32, image::Rgb(pixel));\n", "file_path": "native/src/image.rs", "rank": 33, "score": 16071.554534020197 }, { "content": "# node-image-search\n\n在一张大图中, 找到一个相似的小图的坐标.\n\n\n\n通过对比两个图片的差异值哈希, 计算汉明距离, 找到最相似的子图的坐标.\n\n\n\n随缘待解决:\n\n\n\n- 计算速度很慢\n\n- png图片, 带有透明像素似乎会影响计算出来的哈希\n", "file_path": "README.md", "rank": 34, "score": 10688.180216186276 }, { "content": "extern crate neon_build;\n\n\n", "file_path": "native/build.rs", "rank": 38, "score": 6.396547417501932 }, { "content": "mod image;\n\nmod utils;\n\n\n", "file_path": "native/src/main.rs", "rank": 39, "score": 6.141832917412286 } ]
Rust
services/headless-lms/server/src/ts_binding_generator.rs
rage/secret-project-331
3c78c02f2f1d2e4539522e73c3065ae8866604e3
use crate::controllers::{ auth::Login, course_material::{ exams::{ExamData, ExamEnrollmentData}, submissions::PreviousSubmission, }, main_frontend::{ courses::GetFeedbackQuery, exams::ExamCourseInfo, exercises::ExerciseSubmissions, feedback::MarkAsRead, proposed_edits::GetEditProposalsQuery, }, ErrorResponse, UploadResult, }; use headless_lms_models::*; use headless_lms_utils::pagination::Pagination; macro_rules! export { ($target:expr, $($types:ty),*) => { { let target = $target; fn _export(target: &mut impl ::std::io::Write) -> ::std::result::Result<(), ::std::io::Error> { $( writeln!(target, "export {}\n", <$types as ::ts_rs::TS>::decl())?; )* Ok(()) } _export(target) } }; } #[test] fn ts_binding_generator() { let mut target = std::fs::File::create("../../../shared-module/src/bindings.ts").unwrap(); let res = export! { &mut target, chapters::Chapter, chapters::ChapterStatus, chapters::ChapterUpdate, chapters::ChapterWithStatus, chapters::NewChapter, chapters::UserCourseInstanceChapterProgress, course_instance_enrollments::CourseInstanceEnrollment, course_instances::ChapterScore, course_instances::CourseInstance, course_instances::CourseInstanceForm, course_instances::PointMap, course_instances::Points, course_instances::VariantStatus, courses::Course, courses::CourseStructure, courses::CourseUpdate, courses::NewCourse, courses::CourseCount, email_templates::EmailTemplate, email_templates::EmailTemplateNew, email_templates::EmailTemplateUpdate, exams::CourseExam, exams::Exam, exams::ExamEnrollment, exercise_service_info::CourseMaterialExerciseServiceInfo, exercise_service_info::ExerciseServiceInfoApi, exercise_services::ExerciseService, exercise_services::ExerciseServiceNewOrUpdate, exercise_slides::ExerciseSlide, exercise_tasks::CourseMaterialExerciseTask, exercise_tasks::ExerciseTask, exercises::ActivityProgress, exercises::CourseMaterialExercise, exercises::Exercise, exercises::ExerciseStatus, exercises::GradingProgress, feedback::Feedback, feedback::FeedbackBlock, feedback::FeedbackCount, feedback::NewFeedback, gradings::Grading, gradings::UserPointsUpdateStrategy, organizations::Organization, page_history::PageHistory, page_history::HistoryChangeReason, pages::CmsPageExercise, pages::CmsPageExerciseSlide, pages::CmsPageExerciseTask, pages::CmsPageUpdate, pages::ContentManagementPage, pages::CoursePageWithUserData, pages::ExerciseWithExerciseTasks, pages::HistoryRestoreData, pages::Page, pages::PageRoutingDataWithChapterStatus, pages::PageSearchRequest, pages::PageSearchResult, pages::PageWithExercises, pages::NewPage, playground_examples::PlaygroundExample, playground_examples::PlaygroundExampleData, proposed_block_edits::BlockProposal, proposed_block_edits::BlockProposalAction, proposed_block_edits::BlockProposalInfo, proposed_block_edits::NewProposedBlockEdit, proposed_block_edits::ProposalStatus, proposed_page_edits::EditProposalInfo, proposed_page_edits::NewProposedPageEdits, proposed_page_edits::PageProposal, proposed_page_edits::ProposalCount, submissions::Submission, submissions::SubmissionCount, submissions::SubmissionCountByWeekAndHour, submissions::SubmissionCountByExercise, submissions::SubmissionInfo, submissions::SubmissionResult, submissions::NewSubmission, submissions::GradingResult, user_course_settings::UserCourseSettings, user_exercise_states::UserCourseInstanceChapterExerciseProgress, user_exercise_states::UserCourseInstanceProgress, users::User, PreviousSubmission, ExamData, ExamEnrollmentData, ExamCourseInfo, Login, UploadResult, ExerciseSubmissions, MarkAsRead, GetFeedbackQuery, GetEditProposalsQuery, ErrorResponse, Pagination }; res.unwrap(); }
use crate::controllers::{ auth::Login, course_material::{ exams::{ExamData, ExamEnrollmentData}, submissions::PreviousSubmission, }, main_frontend::{ courses::GetFeedbackQuery, exams::ExamCourseInfo, exercises::ExerciseSubmissions, feedback::MarkAsRead, proposed_edits::GetEditProposalsQuery, }, ErrorResponse, UploadResult, }; use headless_lms_models::*; use headless_lms_utils::pagination::Pagination; macro_rules! export { ($target:expr, $($types:ty),*) => { { let target = $target; fn _export(target: &mut impl ::std::io::Write) -> ::std::result::Result<(), ::std::io::Error> { $( writeln!(target, "export {}\n", <$types as ::ts_rs::TS>::decl())?; )* Ok(()) } _export(target) } }; } #[test] fn ts_binding_generator() { let mut target = std::fs::File::create("../../../shared-module/src/bindings.ts").unwrap(); let res = export! { &mut target, chapters::Chapter, chapters::ChapterStatus, chapters::ChapterUpdate, chapters::ChapterWithStatus, chapters::NewChapter, chapters::UserCourseInstanceChapterProgress, course_instance_enrollments::CourseInstanceEnrollment, course_instances::ChapterScore, course_instances::CourseInstance, course_instances::CourseInstanceForm, course_instances::PointMap, course_instances::Points, course_instances::VariantStatus, courses::Course, courses::CourseStructure, courses::CourseUpdate, courses::NewCourse, courses::CourseCount, email_templates::EmailTemplate, email_templates::EmailTemplateNew, email_templates::EmailTemplateUpdate, exams::CourseExam, exams::Exam, exams::ExamEnrollment, exercise_service_info::CourseMaterialExerciseServiceInfo, exercise_service_info::ExerciseServiceInfoApi, exercise_services::ExerciseService, exercise_services::ExerciseServiceNewOrUpdate, exercise_slides::ExerciseSlide, exercise_tasks::CourseMaterialExerciseTask, exercise_tasks::ExerciseTask, exercises::ActivityProgress, exercises::CourseMaterialExercise, exercises::Exercise, exercises::ExerciseStatus, exercises::GradingProgress, feedback::Feedback, feedback::FeedbackBlock, feedback::FeedbackCount, feedback::NewFeedback, gradings::Grading, gradings::UserPointsUpdateStrategy,
organizations::Organization, page_history::PageHistory, page_history::HistoryChangeReason, pages::CmsPageExercise, pages::CmsPageExerciseSlide, pages::CmsPageExerciseTask, pages::CmsPageUpdate, pages::ContentManagementPage, pages::CoursePageWithUserData, pages::ExerciseWithExerciseTasks, pages::HistoryRestoreData, pages::Page, pages::PageRoutingDataWithChapterStatus, pages::PageSearchRequest, pages::PageSearchResult, pages::PageWithExercises, pages::NewPage, playground_examples::PlaygroundExample, playground_examples::PlaygroundExampleData, proposed_block_edits::BlockProposal, proposed_block_edits::BlockProposalAction, proposed_block_edits::BlockProposalInfo, proposed_block_edits::NewProposedBlockEdit, proposed_block_edits::ProposalStatus, proposed_page_edits::EditProposalInfo, proposed_page_edits::NewProposedPageEdits, proposed_page_edits::PageProposal, proposed_page_edits::ProposalCount, submissions::Submission, submissions::SubmissionCount, submissions::SubmissionCountByWeekAndHour, submissions::SubmissionCountByExercise, submissions::SubmissionInfo, submissions::SubmissionResult, submissions::NewSubmission, submissions::GradingResult, user_course_settings::UserCourseSettings, user_exercise_states::UserCourseInstanceChapterExerciseProgress, user_exercise_states::UserCourseInstanceProgress, users::User, PreviousSubmission, ExamData, ExamEnrollmentData, ExamCourseInfo, Login, UploadResult, ExerciseSubmissions, MarkAsRead, GetFeedbackQuery, GetEditProposalsQuery, ErrorResponse, Pagination }; res.unwrap(); }
function_block-function_prefix_line
[ { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/uploads/{tail:.*}\", web::get().to(serve_upload))\n\n .route(\"{tail:.*}\", web::get().to(redirect_to_storage_service));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/files.rs", "rank": 0, "score": 155878.01330495626 }, { "content": "/// Add controllers from all the submodules.\n\npub fn configure_controllers(cfg: &mut ServiceConfig) {\n\n cfg.service(web::scope(\"/course-material\").configure(course_material::_add_routes))\n\n .service(web::scope(\"/cms\").configure(cms::_add_routes))\n\n .service(web::scope(\"/files\").configure(files::_add_routes))\n\n .service(web::scope(\"/main-frontend\").configure(main_frontend::_add_routes))\n\n .service(web::scope(\"/auth\").configure(auth::_add_routes));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/mod.rs", "rank": 1, "score": 155878.01330495626 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/login\", web::post().to(login))\n\n .route(\"/logout\", web::post().to(logout))\n\n .route(\"/logged-in\", web::get().to(logged_in));\n\n}\n\n\n\npub type LoginToken = Result<\n\n StandardTokenResponse<EmptyExtraTokenFields, BasicTokenType>,\n\n RequestTokenError<\n\n oauth2::reqwest::Error<reqwest::Error>,\n\n StandardErrorResponse<BasicErrorResponseType>,\n\n >,\n\n>;\n\n\n\npub async fn get_user_from_moocfi(\n\n token: &LoginToken,\n\n conn: &mut PgConnection,\n\n) -> ControllerResult<User> {\n\n info!(\"Getting user details from mooc.fi\");\n\n if let Ok(token) = token {\n", "file_path": "services/headless-lms/server/src/controllers/auth.rs", "rank": 2, "score": 155878.01330495626 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{exam_id}/upload\", web::post().to(add_media));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/cms/exams.rs", "rank": 3, "score": 154561.5173339167 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{course_id}/upload\", web::post().to(add_media));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/cms/courses.rs", "rank": 4, "score": 154561.5173339167 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{organization_id}/upload\", web::post().to(add_media));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/cms/organizations.rs", "rank": 5, "score": 154561.5173339167 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{page_id}\", web::get().to(get_page))\n\n .route(\"/{page_id}\", web::put().to(update_page));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/cms/pages.rs", "rank": 6, "score": 154561.5173339167 }, { "content": "/// Add controllers from all the submodules.\n\npub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.service(web::scope(\"/pages\").configure(pages::_add_routes))\n\n .service(web::scope(\"/course-instances\").configure(course_instances::_add_routes))\n\n .service(web::scope(\"/email-templates\").configure(email_templates::_add_routes))\n\n .service(web::scope(\"/oembed\").configure(oembed::_add_routes))\n\n .service(web::scope(\"/organizations\").configure(organizations::_add_routes))\n\n .service(web::scope(\"/courses\").configure(courses::_add_routes))\n\n .service(web::scope(\"/exams\").configure(exams::_add_routes));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/cms/mod.rs", "rank": 7, "score": 154561.5173339167 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/preview\", web::get().to(get_oembed_data_from_provider));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/cms/oembed.rs", "rank": 8, "score": 154561.5173339167 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/exam/{page_id}\", web::get().to(get_by_exam_id))\n\n .route(\"/{current_page_id}/next-page\", web::get().to(get_next_page))\n\n .route(\"/{current_page_id}/url-path\", web::get().to(get_url_path));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/course_material/pages.rs", "rank": 9, "score": 153280.93573714423 }, { "content": "/// Add controllers from all the submodules.\n\npub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.service(web::scope(\"/courses\").configure(courses::_add_routes))\n\n .service(web::scope(\"/exercises\").configure(exercises::_add_routes))\n\n .service(web::scope(\"/pages\").configure(pages::_add_routes))\n\n .service(web::scope(\"/chapters\").configure(chapters::_add_routes))\n\n .service(web::scope(\"/submissions\").configure(submissions::_add_routes))\n\n .service(web::scope(\"/course-instances\").configure(course_instances::_add_routes))\n\n .service(web::scope(\"/proposed-edits\").configure(proposed_edits::_add_routes))\n\n .service(web::scope(\"/exams\").configure(exams::_add_routes));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/course_material/mod.rs", "rank": 10, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{course_id}\", web::get().to(get_course))\n\n .route(\"\", web::post().to(post_new_course))\n\n .route(\"/{course_id}\", web::put().to(update_course))\n\n .route(\"/{course_id}\", web::delete().to(delete_course))\n\n .route(\n\n \"/{course_id}/daily-submission-counts\",\n\n web::get().to(get_daily_submission_counts),\n\n )\n\n .route(\"/{course_id}/exercises\", web::get().to(get_all_exercises))\n\n .route(\n\n \"/{course_id}/structure\",\n\n web::get().to(get_course_structure),\n\n )\n\n .route(\n\n \"/{course_id}/language-versions\",\n\n web::get().to(get_all_course_language_versions),\n\n )\n\n .route(\n\n \"/{course_id}/language-versions\",\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/courses.rs", "rank": 11, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"\", web::post().to(post_submission)).route(\n\n \"/previous-for-exercise/{exercise_id}\",\n\n web::get().to(previous_submission),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/course_material/submissions.rs", "rank": 12, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"\", web::post().to(post_new_page))\n\n .route(\"/{page_id}\", web::delete().to(delete_page))\n\n .route(\"/{page_id}/history\", web::get().to(history))\n\n .route(\"/{page_id}/history_count\", web::get().to(history_count))\n\n .route(\"/{history_id}/restore\", web::post().to(restore));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/pages.rs", "rank": 13, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{email_template_id}\", web::get().to(get_email_template))\n\n .route(\"/{email_template_id}\", web::put().to(update_email_template));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/cms/email_templates.rs", "rank": 14, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{id}\", web::get().to(get_exam))\n\n .route(\"/{id}/set\", web::post().to(set_course))\n\n .route(\"/{id}/unset\", web::post().to(unset_course))\n\n .route(\"/{id}/export-points\", web::get().to(export_points))\n\n .route(\n\n \"/{id}/export-submissions\",\n\n web::get().to(export_submissions),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/exams.rs", "rank": 15, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\n\n \"/{course_instance_id}/organization\",\n\n web::get().to(get_organization_id),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/cms/course_instances.rs", "rank": 16, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{submission_id}/info\", web::get().to(get_submission_info));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/submissions.rs", "rank": 17, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\n\n \"/{exercise_id}/submissions\",\n\n web::get().to(get_exercise_submissions),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/exercises.rs", "rank": 18, "score": 153280.93573714423 }, { "content": "/// Add controllers from all the submodules.\n\npub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.service(web::scope(\"/chapters\").configure(chapters::_add_routes))\n\n .service(web::scope(\"/course-instances\").configure(course_instances::_add_routes))\n\n .service(web::scope(\"/courses\").configure(courses::_add_routes))\n\n .service(web::scope(\"/email-templates\").configure(email_templates::_add_routes))\n\n .service(web::scope(\"/exercises\").configure(exercises::_add_routes))\n\n .service(web::scope(\"/feedback\").configure(feedback::_add_routes))\n\n .service(web::scope(\"/org\").configure(org::_add_routes))\n\n .service(web::scope(\"/organizations\").configure(organizations::_add_routes))\n\n .service(web::scope(\"/pages\").configure(pages::_add_routes))\n\n .service(web::scope(\"/submissions\").configure(submissions::_add_routes))\n\n .service(web::scope(\"/proposed-edits\").configure(proposed_edits::_add_routes))\n\n .service(web::scope(\"/exercise-services\").configure(exercise_services::_add_routes))\n\n .service(web::scope(\"/playground_examples\").configure(playground_examples::_add_routes))\n\n .service(web::scope(\"/users\").configure(users::_add_routes))\n\n .service(web::scope(\"/exams\").configure(exams::_add_routes));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/mod.rs", "rank": 19, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\n\n \"/{organization_slug}\",\n\n web::get().to(get_organization_by_slug),\n\n )\n\n .route(\n\n \"/{organization_slug}/courses\",\n\n web::get().to(get_organization_courses_by_slug),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/org.rs", "rank": 20, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"\", web::get().to(get_all_organizations))\n\n .route(\"/{organization_id}\", web::get().to(get_organization))\n\n .route(\n\n \"/{organization_id}/courses\",\n\n web::get().to(get_organization_courses),\n\n )\n\n .route(\n\n \"/{organization_id}/courses/count\",\n\n web::get().to(get_organization_course_count),\n\n )\n\n .route(\n\n \"/{organization_id}/courses/active\",\n\n web::get().to(get_organization_active_courses),\n\n )\n\n .route(\n\n \"/{organization_id}/courses/active/count\",\n\n web::get().to(get_organization_active_courses_count),\n\n )\n\n .route(\n\n \"/{organization_id}/image\",\n\n web::put().to(set_organization_image),\n\n )\n\n .route(\n\n \"/{organization_id}/image\",\n\n web::delete().to(remove_organization_image),\n\n )\n\n .route(\"/{organization_id}/exams\", web::get().to(get_exams));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/organizations.rs", "rank": 21, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{exercise_id}\", web::get().to(get_exercise));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/course_material/exercises.rs", "rank": 22, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{user_id}\", web::get().to(get_user));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/users.rs", "rank": 23, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{chapter_id}/pages\", web::get().to(get_chapters_pages))\n\n .route(\n\n \"/{chapter_id}/exercises\",\n\n web::get().to(get_chapters_exercises),\n\n )\n\n .route(\n\n \"/{chapter_id}/pages-exclude-mainfrontpage\",\n\n web::get().to(get_chapters_pages_without_main_frontpage),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/course_material/chapters.rs", "rank": 24, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{course_id}\", web::get().to(get_course))\n\n .route(\"/{course_id}/chapters\", web::get().to(get_chapters))\n\n .route(\n\n \"/{course_id}/course-instances\",\n\n web::get().to(get_course_instances),\n\n )\n\n .route(\n\n \"/{course_id}/current-instance\",\n\n web::get().to(get_current_course_instance),\n\n )\n\n .route(\"/{course_id}/feedback\", web::post().to(feedback))\n\n .route(\n\n \"/{course_id}/page-by-path/{url_path:.*}\",\n\n web::get().to(get_course_page_by_path),\n\n )\n\n .route(\"/{course_id}/pages\", web::get().to(get_course_pages))\n\n .route(\n\n \"/{course_id}/search-pages-with-phrase\",\n\n web::post().to(search_pages_with_phrase),\n", "file_path": "services/headless-lms/server/src/controllers/course_material/courses.rs", "rank": 25, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{id}/enrollment\", web::get().to(enrollment))\n\n .route(\"/{id}/enroll\", web::post().to(enroll))\n\n .route(\"/{id}\", web::get().to(fetch_exam_for_user));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/course_material/exams.rs", "rank": 26, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"\", web::post().to(post_new_chapter))\n\n .route(\"/{chapter_id}\", web::delete().to(delete_chapter))\n\n .route(\"/{chapter_id}\", web::put().to(update_chapter))\n\n .route(\"/{chapter_id}/image\", web::put().to(set_chapter_image))\n\n .route(\n\n \"/{chapter_id}/image\",\n\n web::delete().to(remove_chapter_image),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/chapters.rs", "rank": 27, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{feedback_id}\", web::post().to(mark_as_read));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/feedback.rs", "rank": 28, "score": 153280.93573714423 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"\", web::get().to(get_playground_examples))\n\n .route(\"\", web::post().to(insert_playground_example))\n\n .route(\"\", web::put().to(update_playground_example))\n\n .route(\"/{id}\", web::delete().to(delete_playground_example));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/playground_examples.rs", "rank": 29, "score": 152034.81865976262 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\n\n \"/{email_template_id}\",\n\n web::delete().to(delete_email_template),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/email_templates.rs", "rank": 30, "score": 152034.81865976262 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/\", web::post().to(add_exercise_service))\n\n .route(\"/\", web::get().to(get_exercise_services))\n\n .route(\n\n \"/{exercise_service_id}\",\n\n web::delete().to(delete_exercise_service),\n\n )\n\n .route(\n\n \"/{exercise_service_id}\",\n\n web::put().to(update_exercise_service),\n\n )\n\n .route(\n\n \"/{exercise_service_id}\",\n\n web::get().to(get_exercise_service_by_id),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/exercise_services.rs", "rank": 31, "score": 152034.81865976262 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\n\n \"/{course_instance_id}/enroll\",\n\n web::post().to(add_user_enrollment),\n\n )\n\n .route(\n\n \"/{course_instance_id}/progress\",\n\n web::get().to(get_user_progress_for_course_instance),\n\n )\n\n .route(\n\n \"/{course_instance_id}/chapters/{chapter_id}/exercises/progress\",\n\n web::get().to(get_user_progress_for_course_instance_chapter_exercises),\n\n )\n\n .route(\n\n \"/{course_instance_id}/chapters/{chapter_id}/progress\",\n\n web::get().to(get_user_progress_for_course_instance_chapter),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/course_material/course_instances.rs", "rank": 32, "score": 152034.81865976262 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{course_instance_id}\", web::get().to(get_course_instance))\n\n .route(\n\n \"/{course_instance_id}/email-templates\",\n\n web::post().to(post_new_email_template),\n\n )\n\n .route(\n\n \"/{course_instance_id}/email-templates\",\n\n web::get().to(get_email_templates_by_course_instance_id),\n\n )\n\n .route(\n\n \"/{course_instance_id}/points/export\",\n\n web::get().to(point_export),\n\n )\n\n .route(\"/{course_instance_id}/edit\", web::post().to(edit))\n\n .route(\"/{course_instance_id}/delete\", web::post().to(delete))\n\n .route(\"/{course_instance_id}/points\", web::get().to(points));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/course_instances.rs", "rank": 33, "score": 152034.81865976262 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/{course_id}\", web::post().to(post_proposed_edits));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/course_material/proposed_edits.rs", "rank": 34, "score": 152034.81865976262 }, { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/course/{course_id}\", web::get().to(get_edit_proposals))\n\n .route(\n\n \"/course/{course_id}/count\",\n\n web::get().to(get_edit_proposal_count),\n\n )\n\n .route(\n\n \"/process-edit-proposal\",\n\n web::post().to(process_edit_proposal),\n\n );\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/proposed_edits.rs", "rank": 35, "score": 152034.81865976262 }, { "content": "ALTER COLUMN deleted_at TYPE BOOLEAN USING CASE\n\n WHEN deleted_at = NULL THEN false\n\n ELSE true\n\n END;\n", "file_path": "services/headless-lms/migrations/20210528091159_change_deleted_to_deleted_at.down.sql", "rank": 36, "score": 124418.9043202754 }, { "content": "pub fn setup_tracing() -> Result<(), Box<dyn Error>> {\n\n let subscriber = tracing_subscriber::Registry::default()\n\n .with(\n\n tracing_subscriber::fmt::layer()\n\n .event_format(tracing_subscriber::fmt::format().compact()),\n\n )\n\n .with(ErrorLayer::default())\n\n .with(EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(\"info\")));\n\n tracing::subscriber::set_global_default(subscriber)?;\n\n LogTracer::init()?;\n\n Ok(())\n\n}\n\n\n\n// tried storing PgPool here but that caused strange errors\n\nstatic DB_URL: Mutex<Option<String>> = Mutex::const_new(None);\n\n\n\nasync fn get_or_init_db() -> String {\n\n // if initialized, return a connection to the pool\n\n let mut guard = DB_URL.lock().await;\n\n if let Some(db) = guard.as_ref() {\n", "file_path": "services/headless-lms/models/src/test_helper.rs", "rank": 37, "score": 123109.33515508947 }, { "content": "fn path(file_name: &str, file_type: FileType, store_kind: StoreKind) -> PathBuf {\n\n let (base_dir, base_id) = match store_kind {\n\n StoreKind::Organization(id) => (\"organization\", id),\n\n StoreKind::Course(id) => (\"course\", id),\n\n StoreKind::Exam(id) => (\"exam\", id),\n\n };\n\n let file_type_subdir = match file_type {\n\n FileType::Image => \"images\",\n\n FileType::Audio => \"audios\",\n\n FileType::File => \"files\",\n\n };\n\n [base_dir, &base_id.to_string(), file_type_subdir, file_name]\n\n .iter()\n\n .collect()\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/helpers/media.rs", "rank": 38, "score": 122667.76969944432 }, { "content": "ALTER COLUMN deleted_at TYPE TIMESTAMP WITHOUT TIME ZONE USING CASE\n\n WHEN deleted_at = false THEN NULL\n\n ELSE now()\n\n END;\n", "file_path": "services/headless-lms/migrations/20210528091159_change_deleted_to_deleted_at.up.sql", "rank": 39, "score": 115358.81748442232 }, { "content": "const Test: React.FC<TestProps> = (props) => {\n\n return <StyledText {...props}>{PLACEHOLDER_TEXT}</StyledText>\n", "file_path": "shared-module/src/components/Test.tsx", "rank": 40, "score": 112532.51437891199 }, { "content": "ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'Europe/Helsinki',\n\n ALTER COLUMN updated_at TYPE TIMESTAMP USING updated_at AT TIME ZONE 'Europe/Helsinki',\n\n ALTER COLUMN deleted_at TYPE TIMESTAMP USING deleted_at AT TIME ZONE 'Europe/Helsinki';\n", "file_path": "services/headless-lms/migrations/20210603174417_change_timestamps_to_have_timezone.down.sql", "rank": 41, "score": 111627.53149877836 }, { "content": "ALTER COLUMN created_at TYPE TIMESTAMPTZ USING created_at AT TIME ZONE 'Europe/Helsinki',\n\n ALTER COLUMN updated_at TYPE TIMESTAMPTZ USING updated_at AT TIME ZONE 'Europe/Helsinki',\n\n ALTER COLUMN deleted_at TYPE TIMESTAMPTZ USING deleted_at AT TIME ZONE 'Europe/Helsinki';\n", "file_path": "services/headless-lms/migrations/20210603174417_change_timestamps_to_have_timezone.up.sql", "rank": 42, "score": 111627.53149877836 }, { "content": "const Wrapper: React.FC = ({ children }) => (\n\n <QueryClientProvider client={testClient}>{children}</QueryClientProvider>\n", "file_path": "shared-module/tests/hooks/useStateQuery.test.tsx", "rank": 43, "score": 108284.66326704688 }, { "content": "/* eslint-disable i18next/no-literal-string */\n\nimport { renderHook } from \"@testing-library/react-hooks\"\n\n\n\nimport useMessageChannel from \"../../src/hooks/useMessageChannel\"\n\n\n\ntest(\"useMessageChannel returns a message channel\", () => {\n\n // @ts-ignore: jsdom does not have MessageChannel\n\n window.MessageChannel = jest.fn().mockReturnValue({ port1: {}, port2: {} })\n\n const { result } = renderHook(() => useMessageChannel())\n\n expect(result).not.toBeNull()\n\n})\n", "file_path": "shared-module/tests/hooks/useMessageChannel.test.ts", "rank": 44, "score": 100410.12593561743 }, { "content": "export const useTypedSelector: TypedUseSelectorHook<StoreState> = useSelector\n", "file_path": "services/quizzes/src/store/store.ts", "rank": 45, "score": 97723.0004637175 }, { "content": "type GradingFutures =\n\n HashMap<String, Vec<Pin<Box<dyn Future<Output = GradingData> + Send + 'static>>>>;\n\n\n\npub async fn regrade(\n\n conn: &mut PgConnection,\n\n exercise_services_by_type: &HashMap<String, (ExerciseService, ExerciseServiceInfo)>,\n\n) -> Result<()> {\n\n // stores all the futures which will resolve into new gradings\n\n let mut grading_futures = GradingFutures::new();\n\n // set of regradings that should not be marked as completed by the end\n\n let mut incomplete_regradings = HashSet::new();\n\n\n\n tracing::info!(\"fetching uncompleted regradings\");\n\n let regrading_ids =\n\n models::regradings::get_uncompleted_regradings_and_mark_as_started(&mut *conn).await?;\n\n for regrading_id in regrading_ids.iter().copied() {\n\n // set regrading progress to pending\n\n models::regradings::set_total_grading_progress(\n\n &mut *conn,\n\n regrading_id,\n", "file_path": "services/headless-lms/server/src/regrading.rs", "rank": 46, "score": 96448.36497330293 }, { "content": "fn add_course_url_prefix_to_search_results(\n\n search_results: Vec<PageSearchResult>,\n\n course: &Course,\n\n) -> Vec<PageSearchResult> {\n\n search_results\n\n .into_iter()\n\n .map(|mut sr| {\n\n let optional_slash = if sr.url_path.starts_with('/') {\n\n \"\"\n\n } else {\n\n \"/\"\n\n };\n\n sr.url_path = format!(\"/{}{}{}\", course.slug, optional_slash, sr.url_path);\n\n sr\n\n })\n\n .collect()\n\n}\n\n\n\n/// Restore page contents and exercises to a previous revision\n\npub async fn restore(\n", "file_path": "services/headless-lms/models/src/pages.rs", "rank": 48, "score": 93542.14681869585 }, { "content": "fn figure_out_new_score_given(\n\n current_score_given: Option<f32>,\n\n grading_score_given: Option<f32>,\n\n user_points_update_strategy: UserPointsUpdateStrategy,\n\n) -> Option<f32> {\n\n let current_score_given = if let Some(current_score_given) = current_score_given {\n\n current_score_given\n\n } else {\n\n info!(\n\n \"Current state has no score, using score from grading ({:?})\",\n\n grading_score_given\n\n );\n\n return grading_score_given;\n\n };\n\n let grading_score_given = if let Some(grading_score_given) = grading_score_given {\n\n grading_score_given\n\n } else {\n\n info!(\n\n \"Grading has no score, using score from current state ({:?})\",\n\n current_score_given\n", "file_path": "services/headless-lms/models/src/user_exercise_states.rs", "rank": 49, "score": 93542.14681869585 }, { "content": "fn figure_out_new_grading_progress(\n\n current_grading_progress: GradingProgress,\n\n grading_grading_progress: GradingProgress,\n\n) -> GradingProgress {\n\n match current_grading_progress {\n\n GradingProgress::FullyGraded => GradingProgress::FullyGraded,\n\n _ => grading_grading_progress,\n\n }\n\n}\n\n\n\n/**\n\nReturns a new state for the activity progress.\n\n\n\nIn the future this function will be extended to support peer reviews. When\n\nthere's a peer review associated with the exercise, the activity is not complete\n\nbefore the user has given the peer reviews that they're required to give.\n\n*/\n", "file_path": "services/headless-lms/models/src/user_exercise_states.rs", "rank": 50, "score": 93542.14681869585 }, { "content": "fn figure_out_new_activity_progress(\n\n current_activity_progress: ActivityProgress,\n\n) -> ActivityProgress {\n\n if current_activity_progress == ActivityProgress::Completed {\n\n return ActivityProgress::Completed;\n\n }\n\n\n\n // The case where activity is not completed when the user needs to give peer\n\n // reviews\n\n ActivityProgress::Completed\n\n}\n\n\n\npub async fn update_user_exercise_state(\n\n conn: &mut PgConnection,\n\n grading: &Grading,\n\n submission: &Submission,\n\n) -> ModelResult<UserExerciseState> {\n\n let Submission {\n\n user_id,\n\n exercise_id,\n", "file_path": "services/headless-lms/models/src/user_exercise_states.rs", "rank": 51, "score": 93542.14681869585 }, { "content": "pub fn configure(\n\n config: &mut ServiceConfig,\n\n file_store: Arc<dyn FileStore>,\n\n app_conf: ApplicationConfiguration,\n\n) {\n\n let json_config =\n\n web::JsonConfig::default()\n\n .limit(1048576)\n\n .error_handler(|err, _req| -> actix_web::Error {\n\n info!(\"Bad request: {}\", &err);\n\n let body = format!(\"{{\\\"title\\\": \\\"Bad Request\\\", \\\"message\\\": \\\"{}\\\"}}\", &err);\n\n let body_bytes = body.as_bytes();\n\n // create custom error response\n\n let response = HttpResponse::with_body(\n\n StatusCode::BAD_REQUEST,\n\n AnyBody::copy_from_slice(body_bytes),\n\n );\n\n InternalError::from_response(err, response).into()\n\n });\n\n config\n", "file_path": "services/headless-lms/server/src/lib.rs", "rank": 52, "score": 91823.7331410537 }, { "content": "DROP TYPE grading_progress;\n", "file_path": "services/headless-lms/migrations/20210504040825_update_submissions.down.sql", "rank": 53, "score": 90945.98244769397 }, { "content": "DROP TYPE user_role;\n", "file_path": "services/headless-lms/migrations/20210607201656_create-roles.down.sql", "rank": 54, "score": 90945.98244769397 }, { "content": "// does not use async fn because the arguments should only be borrowed\n\n// for the part before any async stuff happens\n\npub fn send_grading_request(\n\n grade_url: Url,\n\n exercise_task: &ExerciseTask,\n\n submission: &Submission,\n\n) -> impl Future<Output = ModelResult<GradingResult>> + 'static {\n\n let client = reqwest::Client::new();\n\n let req = client\n\n .post(grade_url)\n\n .timeout(Duration::from_secs(120))\n\n .json(&GradingRequest {\n\n exercise_spec: &exercise_task.private_spec,\n\n submission_data: &submission.data_json,\n\n });\n\n async {\n\n let res = req.send().await?;\n\n let status = res.status();\n\n if !status.is_success() {\n\n return Err(ModelError::Generic(\"Grading failed\".to_string()));\n\n }\n\n let obj = res.json::<GradingResult>().await?;\n", "file_path": "services/headless-lms/models/src/gradings.rs", "rank": 55, "score": 90290.9246368091 }, { "content": "pub fn stream_exam_submissions(\n\n conn: &mut PgConnection,\n\n exam_id: Uuid,\n\n) -> impl Stream<Item = sqlx::Result<ExportedSubmission>> + '_ {\n\n sqlx::query_as!(\n\n ExportedSubmission,\n\n \"\n", "file_path": "services/headless-lms/models/src/submissions.rs", "rank": 56, "score": 90280.98501670144 }, { "content": "DROP TYPE activity_progress;\n", "file_path": "services/headless-lms/migrations/20210609111555_add_user_exercise_states.down.sql", "rank": 57, "score": 89453.20865763386 }, { "content": "DROP TYPE user_points_update_strategy;\n", "file_path": "services/headless-lms/migrations/20210504040825_update_submissions.down.sql", "rank": 58, "score": 89453.20865763386 }, { "content": "DROP TYPE history_change_reason;\n", "file_path": "services/headless-lms/migrations/20210812035454_add_page_history.down.sql", "rank": 59, "score": 89453.20865763386 }, { "content": "### Writing unit tests that use the database\n\n\n\nUse the `headless_lms_actix::test_helper::Conn` helper struct. It can be initialized using `Conn::init`, after which the only method available for it is `Conn::begin`, which starts a transaction and returns a wrapper struct that can be used in place of `&mut PgConnection` by calling `AsMut::as_mut`. For example:\n\n\n\n```rust\n\nlet mut conn = Conn::init().await;\n\nlet mut tx = conn.begin().await;\n\nlet orgs = all_organizations(tx.as_mut()).await.unwrap();\n\n```\n\n\n\nUsing these helper structs helps ensure that you do not accidentally make permanent modifications to the dev database. It also helps keep tests separate from each other: modifications to the database made using a given `Conn` are only visible when making queries with the same `Conn` instance.\n\n\n\n## Adding new dependency to cargo.toml\n\n\n\n1. search the dependency using `cargo search <DEPENDENCY_NAME>`\n\n ![cargo search](img/cargo-search.png)\n\n2. Select the version of the dependency, which you need and add it manually to the **cargo.toml** file under the **[dependency]** section. Add also the comment, which was printed along side with its corresponding dependency version.\n\n ![rust dependencies](img/rust-dependencies.png)\n\n\n\nThen you're done! Now you can use the dependency in the project.\n\n\n\n## Build problems with `bin/test` or `bin/dev`\n\n\n\n### Build fails because of a missing or an out of date program in the container\n\n\n\nThis might be the case if you get something like command not found or `error: no such subcommand: xxx` from cargo.\n\n\n\nUsually this is because the base image for the headless-lms container has been updated, but your computer has not pulled the updated image. To pull the updated image to your local Minikube, run the following command in the repo root: `bin/minikube-pull-headless-lms-dev-base`. After that restart the development environment.\n", "file_path": "docs/headless-lms.md", "rank": 62, "score": 18.899117474890144 }, { "content": " warn!(\"Trying development mode UUID login\");\n\n if let Ok(id) = Uuid::parse_str(&email) {\n\n let user = { models::users::get_by_id(&mut conn, id).await? };\n\n authorization::remember(&session, user)?;\n\n return Ok(HttpResponse::Ok().finish());\n\n };\n\n }\n\n\n\n if app_conf.test_mode {\n\n warn!(\"Using test credentials. Normal accounts won't work.\");\n\n let user = {\n\n models::users::authenticate_test_user(\n\n &mut conn,\n\n email.clone(),\n\n password.clone(),\n\n &app_conf,\n\n )\n\n .await\n\n };\n\n\n", "file_path": "services/headless-lms/server/src/controllers/auth.rs", "rank": 63, "score": 17.85451728958864 }, { "content": "\n\n fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {\n\n let bytes = Bytes::copy_from_slice(buf);\n\n self.sender\n\n .send(Ok(bytes))\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;\n\n Ok(buf.len())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::{\n\n io::{self, Cursor},\n\n sync::mpsc::Sender,\n\n };\n\n\n\n use bytes::Bytes;\n\n use headless_lms_models::{\n\n exercise_slides, exercise_tasks,\n", "file_path": "services/headless-lms/server/src/domain/csv_export.rs", "rank": 64, "score": 17.77660127126999 }, { "content": "\n\npub async fn find_by_upstream_id(\n\n conn: &mut PgConnection,\n\n upstream_id: i32,\n\n) -> ModelResult<Option<User>> {\n\n let user = sqlx::query_as!(\n\n User,\n\n \"SELECT * FROM users WHERE upstream_id = $1\",\n\n upstream_id\n\n )\n\n .fetch_optional(conn)\n\n .await?;\n\n Ok(user)\n\n}\n\n\n\n// Only used for testing, not to use in production.\n\npub async fn authenticate_test_user(\n\n conn: &mut PgConnection,\n\n email: String,\n\n password: String,\n", "file_path": "services/headless-lms/models/src/users.rs", "rank": 65, "score": 17.340165656589352 }, { "content": " }\n\n Ok(remapped_exercise_tasks)\n\n}\n\n\n\n/// Only used when testing.\n\npub async fn update_page_content(\n\n conn: &mut PgConnection,\n\n page_id: Uuid,\n\n content: &serde_json::Value,\n\n) -> ModelResult<()> {\n\n sqlx::query!(\n\n \"\n", "file_path": "services/headless-lms/models/src/pages.rs", "rank": 66, "score": 16.612400669915008 }, { "content": " next.exercise_task_id.to_string(),\n\n next.score_given.unwrap_or(0.0).to_string(),\n\n next.data_json\n\n .map(|o| o.to_string())\n\n .unwrap_or_else(|| \"\".to_string()),\n\n ];\n\n writer.write_record(csv_row);\n\n }\n\n let writer = writer.finish().await?;\n\n Ok(writer)\n\n}\n\n\n\npub struct CSVExportAdapter {\n\n pub sender: UnboundedSender<ControllerResult<Bytes>>,\n\n}\n\n\n\nimpl Write for CSVExportAdapter {\n\n fn flush(&mut self) -> std::io::Result<()> {\n\n Ok(())\n\n }\n", "file_path": "services/headless-lms/server/src/domain/csv_export.rs", "rank": 67, "score": 16.124151755140147 }, { "content": "mod test {\n\n use uuid::Uuid;\n\n\n\n use super::*;\n\n use crate::{\n\n email_templates::EmailTemplateNew,\n\n test_helper::{self, Conn, Data},\n\n };\n\n\n\n #[tokio::test]\n\n async fn email_templates_check() {\n\n let mut conn = Conn::init().await;\n\n let mut tx = conn.begin().await;\n\n let Data { instance: ci, .. } = test_helper::insert_data(tx.as_mut(), \"\").await.unwrap();\n\n\n\n let err = crate::email_templates::insert_email_template(\n\n tx.as_mut(),\n\n ci,\n\n EmailTemplateNew {\n\n name: \"\".to_string(),\n", "file_path": "services/headless-lms/models/src/error.rs", "rank": 68, "score": 15.814600946856402 }, { "content": "### Requiring authentication\n\n\n\nAuthentication is handled by the `domain::authorization::AuthUser` extractor type. If you want an endpoint to only be accessible by authenticated users, simply add a parameter of the type `AuthUser` to that endpoint. The user's ID and other information can then be accessed through the parameter. If an unauthenticated user attempts to access the endpoint, they will receive an authorization error.\n\n\n\n```rust\n\nuse crate::domain::authorization::AuthUser;\n\n\n\npub async fn private_endpoint(user: AuthUser) -> String {\n\n format!(\"Hello, {}!\", user.id)\n\n}\n\n```\n\n\n\nIf you're making an endpoint where you want to do different things depending on whether the user is logged in or not, you can add an `Option<AuthUser>` parameter. The endpoint can still be accessed by everyone, but the argument will contain the user's details if they are authenticated.\n\n\n\n```rust\n\nuse crate::domain::authorization::AuthUser;\n\n\n\npub async fn some_endpoint(user: Option<AuthUser>) -> String {\n\n if let Some(user) = user {\n\n format!(\"Hello, {}!\", user.id)\n\n } else {\n\n \"Hello, guest!\".to_string()\n\n }\n\n}\n\n```\n\n\n", "file_path": "docs/headless-lms.md", "rank": 69, "score": 15.78059980639568 }, { "content": " })\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n\n use crate::{playground_examples::PlaygroundExampleData, test_helper::Conn};\n\n\n\n #[tokio::test]\n\n async fn insert_and_fetch_playground_example() {\n\n let mut conn = Conn::init().await;\n\n let mut tx = conn.begin().await;\n\n\n\n let inserted_data = insert_playground_example(\n\n tx.as_mut(),\n\n PlaygroundExampleData {\n\n name: \"test\".to_string(),\n\n url: \"https:\\\\test.com\".to_string(),\n\n width: 500,\n", "file_path": "services/headless-lms/models/src/playground_examples.rs", "rank": 70, "score": 15.681629793262879 }, { "content": " \"gif\" => Some(\"image/gif\"),\n\n _ => None,\n\n };\n\n }\n\n let mut response = HttpResponse::Ok();\n\n if let Some(m) = mime_type {\n\n response.append_header((\"content-type\", m));\n\n }\n\n Ok(response.body(contents))\n\n}\n\n\n\n/**\n\nAdd a route for each controller in this module.\n\n\n\nThe name starts with an underline in order to appear before other functions in the module documentation.\n\n\n\nWe add the routes by calling the route method instead of using the route annotations because this method preserves the function signatures for documentation.\n\n*/\n", "file_path": "services/headless-lms/server/src/controllers/files.rs", "rank": 71, "score": 15.608046972696322 }, { "content": " exercises::{self, GradingProgress},\n\n gradings,\n\n submissions::{self, GradingResult},\n\n users,\n\n };\n\n use serde_json::Value;\n\n\n\n use super::*;\n\n use crate::test_helper::{insert_data, Conn, Data};\n\n\n\n #[tokio::test]\n\n async fn exports() {\n\n let mut conn = Conn::init().await;\n\n let mut tx = conn.begin().await;\n\n\n\n let Data {\n\n user,\n\n course,\n\n instance,\n\n exercise,\n", "file_path": "services/headless-lms/server/src/domain/csv_export.rs", "rank": 72, "score": 15.087522072551124 }, { "content": " Ok(course)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::str::FromStr;\n\n\n\n use serde_json::Value;\n\n\n\n use super::*;\n\n use crate::{\n\n chapters::{self, DatabaseChapter, NewChapter},\n\n courses, exercise_slides,\n\n exercise_tasks::{self, ExerciseTask},\n\n exercises::{self, Exercise},\n\n organizations, pages,\n\n test_helper::Conn,\n\n users,\n\n };\n\n\n", "file_path": "services/headless-lms/models/src/courses.rs", "rank": 73, "score": 15.074065453396281 }, { "content": "use proc_macro::TokenStream;\n\nuse quote::ToTokens;\n\nuse syn::{Attribute, ItemFn, Type};\n\n\n\n/// Includes the type's JSON example generated by doc-file-generator as a string.\n\n/// Convenience alias for #[cfg_attr(doc, doc = generated_docs!(MyType))]\n\n#[proc_macro_attribute]\n", "file_path": "services/headless-lms/doc-macro/src/lib.rs", "rank": 74, "score": 14.893646614104128 }, { "content": " if let Some(db) = guard.as_ref() {\n\n return db.clone();\n\n }\n\n\n\n // initialize logging and db\n\n dotenv::dotenv().ok();\n\n let db = env::var(\"DATABASE_URL\")\n\n .unwrap_or_else(|_| \"postgres://headless-lms@localhost:54328/headless_lms_dev\".to_string());\n\n let _ = setup_tracing();\n\n\n\n // store initialized pool and return connection\n\n guard.replace(db.clone());\n\n db\n\n}\n\n\n\n/// Wrapper to ensure the test database isn't used without a transaction\n\npub struct Conn(PgConnection);\n\nimpl Conn {\n\n /// Initializes the test database and returns a connection wrapper\n\n pub async fn init() -> Conn {\n", "file_path": "services/headless-lms/server/src/test_helper.rs", "rank": 75, "score": 14.26983990090686 }, { "content": "### Using postgres enums in SQLx queries\n\n\n\nSQLx isn't able to automatically use postgres enums in its queries; it needs a type hint. For example, given the following postgres enum\n\n\n\n```postgres\n\nCREATE TYPE user_role AS ENUM ('admin', 'assistant', 'teacher', 'reviewer');\n\n```\n\n\n\nand corresponding Rust enum\n\n\n\n```rust\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone, Copy, Type)]\n\n#[sqlx(type_name = \"user_role\", rename_all = \"snake_case\")]\n\npub enum UserRole {\n\n Admin,\n\n Assistant,\n\n Teacher,\n\n Reviewer,\n\n}\n\n```\n\n\n\nyou could use `sqlx::query!` like this\n\n\n\n```rust\n\nlet role: UserRole = sqlx::query!(r#\"SELECT role AS \"role: UserRole\" FROM roles\"#)\n\n .fetch_one(&mut connection) // ^^^^^^^^^^^^^^^^^^^\n\n .await?\n\n .role;\n\n```\n\n\n\nThe same syntax can be used with `sqlx::query_as!`\n\n\n\n```rust\n\n let roles = sqlx::query_as!(\n\n Role,\n\n r#\"SELECT organization_id, course_id, role AS \"role: UserRole\" FROM roles WHERE user_id = $1\"#, user_id\n\n // ^^^^^^^^^^^^^^^^^^^\n\n )\n\n .fetch_all(&mut connection)\n\n .await?;\n\n```\n\n\n\nHere, `Role` is a struct with various fields, including a `role: UserRole` field.\n\n\n\n### Setup development with a local Postgres\n\n\n\nUsually you don't need this as you can use the Postgres started by either `bin/dev` or `bin/dev-only-db`.\n\n\n\n1. Rename `.env.example` -> `.env`\n\n2. In `.env` setup `DATABASE_URL=postgres://localhost/headless_lms_dev`\n\n3. `bin/local-dev-db-create-user`\n\n4. `bin/local-dev-db-create`\n\n5. Run `bin/sqlx-migrate-run`\n\n6. (Optional) `bin/seed-local`\n\n7. If migrations succeed, run `bin/dev`\n\n\n", "file_path": "docs/headless-lms.md", "rank": 76, "score": 14.232729986002655 }, { "content": "\n\n use super::*;\n\n\n\n #[cfg(not(target_os = \"windows\"))]\n\n #[tokio::test]\n\n async fn it_works() {\n\n let dir = TempDir::new(\"test-folder-checksum\").expect(\"Failed to create a temp dir\");\n\n File::open(dir.path())\n\n .await\n\n .unwrap()\n\n .set_permissions(Permissions::from_mode(0o755))\n\n .await\n\n .unwrap();\n\n let first_hash = hash_folder(&dir.path()).await.unwrap();\n\n assert_eq!(\n\n first_hash.to_hex().to_string(),\n\n \"01444ae9678097d0214e449568b68eb351c4743b2697bfc3d517b5c601535823\"\n\n );\n\n let mut file = File::create(dir.path().join(\"test-file\")).await.unwrap();\n\n file.set_permissions(Permissions::from_mode(0o644))\n", "file_path": "services/headless-lms/utils/src/folder_checksum.rs", "rank": 77, "score": 14.20781168369188 }, { "content": " use super::*;\n\n use crate::{\n\n course_instance_enrollments::{self, NewCourseInstanceEnrollment},\n\n course_instances::{self, NewCourseInstance, VariantStatus},\n\n test_helper::{insert_data, Conn, Data},\n\n };\n\n\n\n #[tokio::test]\n\n async fn upserts_user_course_settings() {\n\n let mut conn = Conn::init().await;\n\n let mut tx = conn.begin().await;\n\n let Data {\n\n course,\n\n instance,\n\n user,\n\n ..\n\n } = insert_data(tx.as_mut(), \"example-exercise\").await.unwrap();\n\n\n\n let enrollment = course_instance_enrollments::insert_enrollment(\n\n tx.as_mut(),\n", "file_path": "services/headless-lms/models/src/user_course_settings.rs", "rank": 78, "score": 14.11149536685777 }, { "content": " return db.clone();\n\n }\n\n\n\n // initialize logging and db\n\n dotenv::dotenv().ok();\n\n let db = env::var(\"DATABASE_URL\")\n\n .unwrap_or_else(|_| \"postgres://headless-lms@localhost:54328/headless_lms_dev\".to_string());\n\n let _ = setup_tracing();\n\n\n\n // store initialized pool and return connection\n\n guard.replace(db.clone());\n\n db\n\n}\n\n\n\n/// Wrapper to ensure the test database isn't used without a transaction\n\npub struct Conn(PgConnection);\n\nimpl Conn {\n\n /// Initializes the test database and returns a connection wrapper\n\n pub async fn init() -> Conn {\n\n let db = get_or_init_db().await;\n", "file_path": "services/headless-lms/models/src/test_helper.rs", "rank": 79, "score": 14.097580517191213 }, { "content": " use super::*;\n\n use crate::{\n\n chapters,\n\n course_instance_enrollments::{self, NewCourseInstanceEnrollment},\n\n course_instances::{self, NewCourseInstance},\n\n course_language_groups, courses, exercise_slides, exercise_tasks, organizations, pages,\n\n test_helper::Conn,\n\n users,\n\n };\n\n\n\n #[tokio::test]\n\n async fn selects_course_material_exercise_for_enrolled_student() {\n\n let mut conn = Conn::init().await;\n\n let mut tx = conn.begin().await;\n\n\n\n let user_id = users::insert_with_id(\n\n tx.as_mut(),\n\n \"[email protected]\",\n\n Uuid::parse_str(\"e656e0a1-3f55-4f52-b0ae-96855faee5e7\").unwrap(),\n\n )\n", "file_path": "services/headless-lms/models/src/exercises.rs", "rank": 80, "score": 13.944699898404899 }, { "content": "FROM exercise_services\n\nWHERE slug = $1\n\n \"#,\n\n exercise_type\n\n )\n\n .fetch_one(conn)\n\n .await?;\n\n Ok(res)\n\n}\n\n\n\npub async fn get_exercise_service_internally_preferred_baseurl_by_exercise_type(\n\n conn: &mut PgConnection,\n\n exercise_type: &str,\n\n) -> ModelResult<Url> {\n\n let exercise_service = get_exercise_service_by_exercise_type(conn, exercise_type).await?;\n\n Ok(get_exercise_service_internally_preferred_baseurl(\n\n &exercise_service,\n\n )?)\n\n}\n\n\n", "file_path": "services/headless-lms/models/src/exercise_services.rs", "rank": 81, "score": 13.790494153825696 }, { "content": " sqlx::migrate!(\"../migrations\")\n\n .run(&mut conn)\n\n .await\n\n .expect(\"failed to run migrations\");\n\n setup_tracing().expect(\"Could not setup tracing.\");\n\n let mut lock = DB_URL.lock().await;\n\n *lock = Some(db.clone());\n\n db\n\n}\n\n\n\n/// Initialises the actix server for testing\n\npub async fn init_actix() -> (\n\n impl actix_web::dev::Service<Request, Response = ServiceResponse<AnyBody>, Error = actix_web::Error>,\n\n PgPool,\n\n) {\n\n let db = init_db().await;\n\n let private_cookie_key = \"sMG87WlKnNZoITzvL2+jczriTR7JRsCtGu/bSKaSIvw=\";\n\n let pool = PgPool::connect(&db)\n\n .await\n\n .expect(\"failed to connect to test db\");\n", "file_path": "services/headless-lms/server/tests/integration_test.rs", "rank": 82, "score": 13.640786186945478 }, { "content": "pub async fn get_service_info_by_exercise_type(\n\n conn: &mut PgConnection,\n\n exercise_type: &str,\n\n) -> ModelResult<ExerciseServiceInfo> {\n\n let exercise_service = get_exercise_service_by_exercise_type(conn, exercise_type).await?;\n\n let service_info = get_service_info_by_exercise_service(conn, &exercise_service).await?;\n\n Ok(service_info)\n\n}\n\n\n\npub async fn get_all_exercise_services_by_type(\n\n conn: &mut PgConnection,\n\n) -> ModelResult<HashMap<String, (ExerciseService, ExerciseServiceInfo)>> {\n\n let mut exercise_services_by_type = HashMap::new();\n\n for exercise_service in get_exercise_services(conn).await? {\n\n if let Ok(info) = get_service_info_by_exercise_service(conn, &exercise_service).await {\n\n exercise_services_by_type\n\n .insert(exercise_service.slug.clone(), (exercise_service, info));\n\n } else {\n\n tracing::error!(\n\n \"No corresponding service info found for {} ({})\",\n", "file_path": "services/headless-lms/models/src/exercise_service_info.rs", "rank": 83, "score": 13.459354992394816 }, { "content": "\n\n let (sender, receiver) = tokio::sync::mpsc::unbounded_channel::<ControllerResult<Bytes>>();\n\n\n\n // spawn handle that writes the csv row by row into the sender\n\n let mut handle_conn = pool.acquire().await?;\n\n let _handle = tokio::spawn(async move {\n\n let res =\n\n csv_export::export_exam_points(&mut handle_conn, exam_id, CSVExportAdapter { sender })\n\n .await;\n\n if let Err(err) = res {\n\n tracing::error!(\"Failed to export exam points: {}\", err);\n\n }\n\n });\n\n\n\n let exam = exams::get(&mut conn, exam_id).await?;\n\n\n\n // return response that streams data from the receiver\n\n Ok(HttpResponse::Ok()\n\n .append_header((\n\n \"Content-Disposition\",\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/exams.rs", "rank": 84, "score": 13.133297616423906 }, { "content": " }\n\n\n\n struct WriteAdapter {\n\n sender: Sender<Bytes>,\n\n }\n\n\n\n impl Write for WriteAdapter {\n\n fn flush(&mut self) -> std::io::Result<()> {\n\n Ok(())\n\n }\n\n\n\n fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {\n\n let bytes = Bytes::copy_from_slice(buf);\n\n self.sender\n\n .send(bytes)\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;\n\n Ok(buf.len())\n\n }\n\n }\n\n}\n", "file_path": "services/headless-lms/server/src/domain/csv_export.rs", "rank": 85, "score": 13.12560993488766 }, { "content": "//! Re-exports commonly used types for convenient use across the crate.\n\n//! Intended to be glob-imported like `use crate::prelude::*;`.\n\n\n\npub use chrono::{DateTime, Utc};\n\npub use headless_lms_utils::pagination::Pagination;\n\npub use serde::{Deserialize, Serialize};\n\npub use sqlx::{Connection, FromRow, PgConnection, Type};\n\npub use ts_rs::TS;\n\npub use uuid::Uuid;\n\n\n\npub use crate::{ModelError, ModelResult};\n", "file_path": "services/headless-lms/models/src/prelude.rs", "rank": 86, "score": 13.067773320225598 }, { "content": " fetched_service_info\n\n };\n\n Ok(service_info)\n\n}\n\n\n\n/**\n\nReturns service info meant for the course material. If no service info is found and fetching it fails, we return None to\n\nindicate that the service info is unavailable.\n\n*/\n\npub async fn get_course_material_service_info_by_exercise_type(\n\n conn: &mut PgConnection,\n\n exercise_type: &str,\n\n) -> ModelResult<Option<CourseMaterialExerciseServiceInfo>> {\n\n if let Ok(exercise_service) = get_exercise_service_by_exercise_type(conn, exercise_type).await {\n\n let full_service_info = get_service_info_by_exercise_service(conn, &exercise_service).await;\n\n let service_info_option = if let Ok(o) = full_service_info {\n\n // Need to convert relative url to absolute url because\n\n // otherwise the material won't be able to request the path\n\n // if the path is in a different domain\n\n let mut url = Url::parse(&exercise_service.public_url)\n", "file_path": "services/headless-lms/models/src/exercise_service_info.rs", "rank": 87, "score": 12.983366716957761 }, { "content": "/*!\n\nHandlers for HTTP requests to `/api/v0/login`.\n\n*/\n\n\n\nuse actix_session::Session;\n\nuse models::users::User;\n\nuse oauth2::{\n\n basic::{BasicErrorResponseType, BasicTokenType},\n\n EmptyExtraTokenFields, RequestTokenError, ResourceOwnerPassword, ResourceOwnerUsername,\n\n StandardErrorResponse, StandardTokenResponse, TokenResponse,\n\n};\n\nuse reqwest::Client;\n\nuse url::form_urlencoded::Target;\n\n\n\nuse crate::{controllers::prelude::*, domain::authorization, OAuthClient};\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, TS)]\n\npub struct Login {\n\n email: String,\n\n password: String,\n", "file_path": "services/headless-lms/server/src/controllers/auth.rs", "rank": 88, "score": 12.884874128387839 }, { "content": " #[instrument]\n\n pub fn new(bucket_name: String) -> Result<Self, UtilError> {\n\n let client = Client::default();\n\n\n\n Ok(Self {\n\n bucket_name,\n\n client,\n\n })\n\n }\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl FileStore for GoogleCloudFileStore {\n\n async fn upload(&self, path: &Path, file: Vec<u8>, mime_type: &str) -> Result<(), UtilError> {\n\n self.client\n\n .object()\n\n .create(&self.bucket_name, file, path_to_str(path)?, mime_type)\n\n .await?;\n\n Ok(())\n\n }\n", "file_path": "services/headless-lms/utils/src/file_store/google_cloud_file_store.rs", "rank": 89, "score": 12.609550413126348 }, { "content": " )\n\n .await?;\n\n\n\n let submission = models::submissions::get_by_id(&mut conn, *submission_id).await?;\n\n let exercise = models::exercises::get_by_id(&mut conn, submission.exercise_id).await?;\n\n let exercise_task =\n\n models::exercise_tasks::get_exercise_task_by_id(&mut conn, submission.exercise_task_id)\n\n .await?;\n\n let grading = if let Some(id) = submission.grading_id {\n\n Some(models::gradings::get_by_id(&mut conn, id).await?)\n\n } else {\n\n None\n\n };\n\n let exercise_service_info = models::exercise_service_info::get_service_info_by_exercise_type(\n\n &mut conn,\n\n &exercise_task.exercise_type,\n\n )\n\n .await?;\n\n\n\n Ok(web::Json(SubmissionInfo {\n\n submission,\n\n exercise,\n\n exercise_task,\n\n grading,\n\n iframe_path: exercise_service_info.exercise_type_specific_user_interface_iframe,\n\n }))\n\n}\n\n\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/submissions.rs", "rank": 90, "score": 12.353789019396267 }, { "content": " }\n\n } else {\n\n let msg = format!(\n\n \"No exercise services found for type {}\",\n\n exercise_task.exercise_type,\n\n );\n\n tracing::error!(\"{}\", msg);\n\n models::gradings::set_grading_progress(\n\n &mut *conn,\n\n not_ready_grading.id,\n\n GradingProgress::Failed,\n\n )\n\n .await?;\n\n regrading_status\n\n .missing_exercise_services\n\n .insert(exercise_task.exercise_type);\n\n }\n\n }\n\n Ok(regrading_status)\n\n}\n\n\n", "file_path": "services/headless-lms/server/src/regrading.rs", "rank": 91, "score": 12.324294186490999 }, { "content": "## Interacting with the backend\n\n\n\nYou can use an axios instance to avoid repeating the root of the API URL for every request. For example, `main-frontend` has the following client:\n\n\n\n```ts\n\nexport const mainFrontendClient = axios.create({ baseURL: \"/api/v0/main-frontend\" })\n\n```\n\n\n\n`shared-module` contains types (in `bindings.ts`) and guards (in `bindings.guard`) generated from the backend types as well as other helper functions (in `utils`) which should be used when interacting with the backend. For example, `main-frontend` fetches `/api/v0/main-frontend/organizations` with\n\n\n\n```ts\n\nimport { Organization } from \"../../shared-module/bindings\"\n\nimport { isOrganization } from \"../../shared-module/bindings.guard\"\n\nimport { isArray, validateResponse } from \"../../shared-module/utils/fetching\"\n\n\n\nexport const fetchOrganizations = async (): Promise<Array<Organization>> => {\n\n // first, we get a response from the API using mainFrontendClient\n\n const response = await mainFrontendClient.get(\"/organizations\", { responseType: \"json\" })\n\n // then we call validateResponse with the response and a guard that checks that the data's type is Array<Organization>\n\n return validateResponse(response, isArray(isOrganization))\n\n}\n\n```\n", "file_path": "docs/frontend.md", "rank": 92, "score": 12.298896778095603 }, { "content": " return db.clone();\n\n }\n\n dotenv::dotenv().ok();\n\n let db = env::var(\"DATABASE_URL_TEST\").unwrap_or_else(|_| {\n\n \"postgres://headless-lms@localhost:54328/headless_lms_test\".to_string()\n\n });\n\n if Postgres::database_exists(&db)\n\n .await\n\n .expect(\"failed to check test db existence\")\n\n {\n\n Postgres::drop_database(&db)\n\n .await\n\n .expect(\"failed to drop test db\");\n\n }\n\n Postgres::create_database(&db)\n\n .await\n\n .expect(\"failed to create test db\");\n\n let mut conn = PgConnection::connect(&db)\n\n .await\n\n .expect(\"failed to connect to test db\");\n", "file_path": "services/headless-lms/server/tests/integration_test.rs", "rank": 93, "score": 12.114809044037548 }, { "content": " .fetch_all(&mut *conn)\n\n .await?;\n\n let mut exercise_services_by_type = HashMap::new();\n\n for exercise_service in selected_services {\n\n let info = get_service_info_by_exercise_service(conn, &exercise_service).await?;\n\n exercise_services_by_type.insert(exercise_service.slug.clone(), (exercise_service, info));\n\n }\n\n Ok(exercise_services_by_type)\n\n}\n\n\n\npub async fn get_service_info_by_exercise_service(\n\n conn: &mut PgConnection,\n\n exercise_service: &ExerciseService,\n\n) -> ModelResult<ExerciseServiceInfo> {\n\n let res = get_service_info(conn, exercise_service.id).await;\n\n let service_info = if let Ok(exercise_service_info) = res {\n\n exercise_service_info\n\n } else {\n\n warn!(\"Could not find service info for service. This is rare and only should happen when a background worker has not had the opportunity to complete their fetching task yet. Trying the fetching here in this worker so that we can continue.\");\n\n let fetched_service_info = fetch_and_upsert_service_info(conn, exercise_service).await?;\n", "file_path": "services/headless-lms/models/src/exercise_service_info.rs", "rank": 94, "score": 12.108565852855941 }, { "content": "use models::users::User;\n\n\n\nuse crate::controllers::prelude::*;\n\n\n\n/**\n\nGET `/api/v0/main-frontend/users/:id`\n\n*/\n\n#[generated_doc(User)]\n\n#[instrument(skip(pool))]\n\npub async fn get_user(\n\n user_id: web::Path<Uuid>,\n\n pool: web::Data<PgPool>,\n\n) -> ControllerResult<web::Json<User>> {\n\n let mut conn = pool.acquire().await?;\n\n let user = models::users::get_by_id(&mut conn, *user_id).await?;\n\n Ok(web::Json(user))\n\n}\n\n\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/users.rs", "rank": 95, "score": 11.958490335216194 }, { "content": " exercise_service.name,\n\n exercise_service.id\n\n );\n\n }\n\n }\n\n Ok(exercise_services_by_type)\n\n}\n\n\n\npub async fn get_selected_exercise_services_by_type(\n\n conn: &mut PgConnection,\n\n slugs: &[String],\n\n) -> ModelResult<HashMap<String, (ExerciseService, ExerciseServiceInfo)>> {\n\n let selected_services = sqlx::query_as!(\n\n ExerciseService,\n\n \"\n\nSELECT *\n\nFROM exercise_services\n\nWHERE slug = ANY($1);\",\n\n slugs,\n\n )\n", "file_path": "services/headless-lms/models/src/exercise_service_info.rs", "rank": 96, "score": 11.922259358795452 }, { "content": " course_instance_id,\n\n CSVExportAdapter { sender },\n\n )\n\n .await;\n\n if let Err(err) = res {\n\n tracing::error!(\"Failed to export course instance points: {}\", err);\n\n }\n\n });\n\n\n\n let course_instance =\n\n course_instances::get_course_instance(&mut conn, course_instance_id).await?;\n\n let course = courses::get_course(&mut conn, course_instance.course_id).await?;\n\n\n\n // return response that streams data from the receiver\n\n Ok(HttpResponse::Ok()\n\n .append_header((\n\n \"Content-Disposition\",\n\n format!(\n\n \"attachment; filename=\\\"{} - {} - Point export {}.csv\\\"\",\n\n course.name,\n", "file_path": "services/headless-lms/server/src/controllers/main_frontend/course_instances.rs", "rank": 97, "score": 11.904544792100545 }, { "content": "use chrono::{DateTime, Duration, Utc};\n\nuse models::{\n\n exams::{self, ExamEnrollment},\n\n pages::{self, Page},\n\n};\n\n\n\nuse crate::controllers::prelude::*;\n\n\n\n/**\n\nGET /api/v0/course-material/exams/:id/enrollment\n\n*/\n\n#[generated_doc(Option<ExamEnrollment>)]\n\npub async fn enrollment(\n\n pool: web::Data<PgPool>,\n\n exam_id: web::Path<Uuid>,\n\n user: AuthUser,\n\n) -> ControllerResult<web::Json<Option<ExamEnrollment>>> {\n\n let mut conn = pool.acquire().await?;\n\n let enrollment = exams::get_enrollment(&mut conn, *exam_id, user.id).await?;\n\n Ok(web::Json(enrollment))\n", "file_path": "services/headless-lms/server/src/controllers/course_material/exams.rs", "rank": 98, "score": 11.799923765521443 }, { "content": "use url::Url;\n\n\n\n/// The entrypoint to the application.\n\n#[actix_web::main]\n\nasync fn main() -> Result<()> {\n\n dotenv().ok();\n\n setup_tracing()?;\n\n\n\n // read environment variables\n\n let database_url = env::var(\"DATABASE_URL\")\n\n .unwrap_or_else(|_| \"postgres://localhost/headless_lms_dev\".to_string());\n\n let oauth_application_id =\n\n env::var(\"OAUTH_APPLICATION_ID\").expect(\"OAUTH_APPLICATION_ID must be defined\");\n\n let oauth_secret = env::var(\"OAUTH_SECRET\").expect(\"OAUTH_SECRET must be defined\");\n\n let private_cookie_key =\n\n env::var(\"PRIVATE_COOKIE_KEY\").expect(\"PRIVATE_COOKIE_KEY must be defined\");\n\n let base_url = env::var(\"BASE_URL\").expect(\"BASE_URL must be defined\");\n\n let test_mode = env::var(\"TEST_MODE\").is_ok();\n\n let allow_no_https_for_development = env::var(\"ALLOW_NO_HTTPS_FOR_DEVELOPMENT\").is_ok();\n\n if test_mode {\n", "file_path": "services/headless-lms/server/src/main.rs", "rank": 99, "score": 11.734294010521694 } ]
Rust
src/lib.rs
Elinvynia/schwifty
e0233bf836af6382e5565801df452baf7c9f935b
#![forbid(unsafe_code)] #![warn(missing_debug_implementations)] #![warn(missing_docs)] pub use crate::country::Country; pub use crate::error::ValidationError; use std::str::FromStr; pub mod country; pub(crate) mod country_specific; pub mod error; #[allow(clippy::all)] pub(crate) mod u256 { uint::construct_uint! { pub(crate) struct U256(4); } } #[derive(Debug)] #[non_exhaustive] pub struct Iban { pub country: Country, pub(crate) raw: String, } impl Iban { pub fn account_number(&self) -> String { self.country.account_number(&self.raw) } pub fn bank_code(&self) -> String { self.country.bank_code(&self.raw) } pub fn country_code(&self) -> String { self.country.to_string() } pub fn raw(&self) -> &str { &self.raw } } pub fn validate<I: AsRef<str>>(input: I) -> Result<Iban, ValidationError> { let input = input.as_ref(); let input: String = input.split_whitespace().collect(); if input.len() > 34 { return Err(ValidationError::TooLong); }; if !input.chars().all(|ch| ch.is_alphanumeric()) { return Err(ValidationError::InvalidChar); }; if input.len() < 2 { return Err(ValidationError::InvalidCountryCode); }; let country_code = &input[0..2]; let country = match Country::from_str(country_code) { Ok(c) => c, Err(_) => return Err(ValidationError::InvalidCountryCode), }; if input.len() != country.length() { return Err(ValidationError::InvalidLength); } if !country.format().is_match(&input) { return Err(ValidationError::InvalidFormat); } if !country.custom_validation(&input) { return Err(ValidationError::CountryCheckFailed); } let (start, rest) = input.split_at(4); let mut rearranged = String::with_capacity(34); rearranged.push_str(rest); rearranged.push_str(start); let mut integer_string = String::with_capacity(34); for ch in rearranged.chars() { if ch.is_numeric() { integer_string.push(ch); } else { let x = ch.to_digit(36).unwrap().to_string(); integer_string.push_str(&x) } } let integer = u256::U256::from_dec_str(&integer_string).unwrap(); if integer % 97 != 1.into() { return Err(ValidationError::InvalidIban); } Ok(Iban { country, raw: input, }) }
#![forbid(unsafe_code)] #![warn(missing_debug_implementations)] #![warn(missing_docs)] pub use crate::country::Country; pub use crate::error::ValidationError; use std::str::FromStr; pub mod country; pub(crate) mod country_specific; pub mod error; #[allow(clippy::all)] pub(crate) mod u256 { uint::construct_uint! { pub(crate) struct U256(4); } } #[derive(Debug)] #[non_exhaustive] pub struct Iban { pub country: Country, pub(crate) raw: String, } impl Iban { pub fn account_number(&self) -> String { self.country.account_number(&self.raw) } pub fn bank_code(&self) -> String { self.country.bank_code(&self.raw) } pub fn country_code(&self) -> String { self.country.to_string() } pub fn raw(&self) -> &str { &self.raw } } pub fn validate<I: AsRef<str>>(input: I) -> Result<Iban, ValidationError> { let input = input.as_ref(); let input: String = input.split_whitespace().collect(); if input.len() > 34 { return Err(ValidationError::TooLong); }; if !input.chars().all(|ch| ch.is_alphanumeric()) { return Err(ValidationError::InvalidChar); }; if input.len() < 2 { return Err(ValidationError::InvalidCountryCode); }; let country_code = &input[0..2]; let country = match Country::from_str(country_code) { Ok(c) => c, Err(_) => return Err(ValidationError::InvalidCountryCode), }; if input.len() != country.length() { return Err(ValidationError::InvalidLength); } if !country.format().is_match(&input) { return Err(ValidationError::InvalidFormat); } if !country.custom_validation(&input) { return Err(ValidationError::CountryCheckFailed); } let (start, rest) = input.split_at(4); let mut rearranged = String::with_capacity(34); rearranged.push_str(rest); rearranged.push_str(start); let mut integer_string = String::with_capacity(34); for ch in rearranged.chars() {
} let integer = u256::U256::from_dec_str(&integer_string).unwrap(); if integer % 97 != 1.into() { return Err(ValidationError::InvalidIban); } Ok(Iban { country, raw: input, }) }
if ch.is_numeric() { integer_string.push(ch); } else { let x = ch.to_digit(36).unwrap().to_string(); integer_string.push_str(&x) }
if_condition
[ { "content": "fn c(num: usize) -> String {\n\n format!(\"[a-zA-Z0-9]{{{}}}\", num)\n\n}\n\n\n", "file_path": "src/country.rs", "rank": 1, "score": 59182.76963598044 }, { "content": "fn a(num: usize) -> String {\n\n format!(\"[A-Z]{{{}}}\", num)\n\n}\n\n\n\n// Almost-DSL macro for comfortable calling of above functions.\n\nmacro_rules! f {\n\n ($($arg:expr),+) => {\n\n {\n\n let mut s = String::with_capacity(34);\n\n $(s.push_str(&$arg);)*\n\n s\n\n }\n\n }\n\n}\n\n\n\nimpl Country {\n\n pub(crate) fn length(&self) -> usize {\n\n use Country::*;\n\n match self {\n\n Albania => 28,\n", "file_path": "src/country.rs", "rank": 2, "score": 53756.44061946997 }, { "content": "// Helper functions for the regex groups.\n\nfn n(num: usize) -> String {\n\n format!(\"[0-9]{{{}}}\", num)\n\n}\n\n\n", "file_path": "src/country.rs", "rank": 3, "score": 53756.44061946997 }, { "content": "#[test]\n\nfn validate_iban_empty() {\n\n assert!(matches!(\n\n validate(\"\"),\n\n Err(ValidationError::InvalidCountryCode)\n\n ));\n\n assert!(matches!(\n\n validate(\"F\"),\n\n Err(ValidationError::InvalidCountryCode)\n\n ));\n\n assert!(matches!(\n\n validate(\"FR\"),\n\n Err(ValidationError::InvalidLength)\n\n ));\n\n}\n", "file_path": "tests/iban.rs", "rank": 4, "score": 47779.83207378257 }, { "content": "#[test]\n\nfn validate_iban_basic() {\n\n validate(\"AL47 2121 1009 0000 0002 3569 8741\").unwrap(); // Albania\n\n validate(\"AD12 0001 2030 2003 5910 0100\").unwrap(); // Andorra\n\n validate(\"AT61 1904 3002 3457 3201\").unwrap(); // Austria\n\n validate(\"AZ21 NABZ 0000 0000 1370 1000 1944\").unwrap(); // Republic of Azerbaijan\n\n validate(\"BH67 BMAG 0000 1299 1234 56\").unwrap(); // Bahrain\n\n validate(\"BE68 5390 0754 7034\").unwrap(); // Belgium\n\n validate(\"BA39 1290 0794 0102 8494\").unwrap(); // Bosnia and Herzegovina\n\n validate(\"BR97 0036 0305 0000 1000 9795 493P 1\").unwrap(); // Brazil\n\n validate(\"BR18 0000 0000 1414 5512 3924 100C 2\").unwrap(); // Brazil\n\n validate(\"BG80 BNBG 9661 1020 3456 78\").unwrap(); // Bulgaria\n\n validate(\"CR05 0152 0200 1026 2840 66\").unwrap(); // Costa Rica\n\n validate(\"HR12 1001 0051 8630 0016 0\").unwrap(); // Croatia\n\n validate(\"CY17 0020 0128 0000 0012 0052 7600\").unwrap(); // Cyprus\n\n validate(\"CZ65 0800 0000 1920 0014 5399\").unwrap(); // Czech Republic\n\n validate(\"CZ94 5500 0000 0010 1103 8930\").unwrap(); // Czech Republic\n\n validate(\"DK50 0040 0440 1162 43\").unwrap(); // Greenland\n\n validate(\"FO62 6460 0001 6316 34\").unwrap(); // Faroer\n\n validate(\"GL89 6471 0001 0002 06\").unwrap(); // Denmark\n\n validate(\"DO28 BAGR 0000 0001 2124 5361 1324\").unwrap(); // Dominican Republic\n", "file_path": "tests/iban.rs", "rank": 5, "score": 47779.83207378257 }, { "content": " CountryCheckFailed,\n\n}\n\n\n\nimpl std::error::Error for ValidationError {}\n\n\n\nimpl Display for ValidationError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use ValidationError::*;\n\n let msg = match self {\n\n TooLong => \"Input is longer than 34 characters.\",\n\n InvalidChar => \"Input contains at least one invalid character.\",\n\n InvalidIban => \"IBAN mod 97 checksum is invalid.\",\n\n InvalidCountryCode => \"Input doesn't contain a supported country.\",\n\n InvalidLength => \"Input is invalid length for the detected country.\",\n\n InvalidFormat => \"IBAN has the wrong format for the detected country.\",\n\n CountryCheckFailed => \"Failed custom country-specific check.\",\n\n };\n\n write!(f, \"{}\", msg)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 6, "score": 19927.77609680602 }, { "content": "//! Module holding the error type.\n\n\n\nuse std::fmt::{self, Display};\n\n\n\n/// Error type for errors originating from this crate.\n\n#[derive(Debug)]\n\npub enum ValidationError {\n\n /// IBAN can be at most 34 characters long.\n\n TooLong,\n\n /// IBAN cannot contain non-alphanumeric characters.\n\n InvalidChar,\n\n /// The IBAN checksum was invalid (remainder of mod 97 was not 1).\n\n InvalidIban,\n\n /// Input didn't contain a supported country code.\n\n InvalidCountryCode,\n\n /// IBAN length didn't match the detected country.\n\n InvalidLength,\n\n /// The format was wrong for the detected country.\n\n InvalidFormat,\n\n /// A custom check this country implements has failed.\n", "file_path": "src/error.rs", "rank": 7, "score": 19925.547893698033 }, { "content": "use schwifty::*;\n\n\n\n#[test]\n", "file_path": "tests/iban.rs", "rank": 8, "score": 19482.452540499035 }, { "content": " validate(\"LT12 1000 0111 0100 1000\").unwrap(); // Lithuania\n\n validate(\"LU28 0019 4006 4475 0000\").unwrap(); // Luxembourg\n\n validate(\"MK07 2501 2000 0058 984\").unwrap(); // Macedonia\n\n validate(\"MT84 MALT 0110 0001 2345 MTLC AST0 01S\").unwrap(); // Malta\n\n validate(\"MR13 0002 0001 0100 0012 3456 753\").unwrap(); // Mauritania\n\n validate(\"MU17 BOMM 0101 1010 3030 0200 000M UR\").unwrap(); // Mauritius\n\n validate(\"MD24 AG00 0225 1000 1310 4168\").unwrap(); // Moldova\n\n validate(\"MC58 1122 2000 0101 2345 6789 030\").unwrap(); // Monaco\n\n validate(\"ME25 5050 0001 2345 6789 51\").unwrap(); // Montenegro\n\n validate(\"NL91 ABNA 0417 1643 00\").unwrap(); // The Netherlands\n\n validate(\"NO93 8601 1117 947\").unwrap(); // Norway\n\n validate(\"PK36 SCBL 0000 0011 2345 6702\").unwrap(); // Pakistan\n\n validate(\"PS92 PALS 0000 0000 0400 1234 5670 2\").unwrap(); // Palestine\n\n validate(\"PL61 1090 1014 0000 0712 1981 2874\").unwrap(); // Poland\n\n validate(\"PT50 0002 0123 1234 5678 9015 4\").unwrap(); // Portugal\n\n validate(\"QA58 DOHB 0000 1234 5678 90AB CDEF G\").unwrap(); // Qatar\n\n validate(\"RO49 AAAA 1B31 0075 9384 0000\").unwrap(); // Romania\n\n validate(\"LC55 HEMM 0001 0001 0012 0012 00023015\").unwrap(); // Saint Lucia\n\n validate(\"SM86 U032 2509 8000 0000 0270 100\").unwrap(); // San Marino\n\n validate(\"ST68 0001 0001 0051 8453 1011 2\").unwrap(); // Sao Tome And Principe\n", "file_path": "tests/iban.rs", "rank": 9, "score": 19479.731836542884 }, { "content": " validate(\"SA03 8000 0000 6080 1016 7519\").unwrap(); // Saudi Arabia\n\n validate(\"RS35 2600 0560 1001 6113 79\").unwrap(); // Serbia\n\n validate(\"SC18 SSCB 1101 0000 0000 0000 1497 USD\").unwrap(); // Seychelles\n\n validate(\"SK31 1200 0000 1987 4263 7541\").unwrap(); // Slovak Republic\n\n validate(\"SI56 1910 0000 0123 438\").unwrap(); // Slovenia\n\n validate(\"ES91 2100 0418 4502 0005 1332\").unwrap(); // Spain\n\n validate(\"SE45 5000 0000 0583 9825 7466\").unwrap(); // Sweden\n\n validate(\"CH93 0076 2011 6238 5295 7\").unwrap(); // Switzerland\n\n validate(\"TL38 0080 0123 4567 8910 157\").unwrap(); // Timor-Leste\n\n validate(\"TN59 1000 6035 1835 9847 8831\").unwrap(); // Tunisia\n\n validate(\"TR33 0006 1005 1978 6457 8413 26\").unwrap(); // Turkey\n\n validate(\"UA21 3996 2200 0002 6007 2335 6600 1\").unwrap(); // Ukraine\n\n validate(\"AE07 0331 2345 6789 0123 456\").unwrap(); // United Arab Emirates\n\n validate(\"GB29 NWBK 6016 1331 9268 19\").unwrap(); // United Kingdom\n\n validate(\"VG96 VPVG 0000 0123 4567 8901\").unwrap(); // Virgin Islands, British\n\n validate(\"BY13 NBRB 3600 9000 0000 2Z00 AB00\").unwrap(); // Republic of Belarus\n\n validate(\"SV62 CENR 0000 0000 0000 0070 0025\").unwrap(); // El Salvador\n\n validate(\"FO62 6460 0001 6316 34\").unwrap(); // Faroe Islands\n\n validate(\"GL89 6471 0001 0002 06\").unwrap(); // Grenland\n\n validate(\"IQ98 NBIQ 8501 2345 6789 012\").unwrap(); // Iraq\n\n validate(\"AA11 0011 123Z 5678\").unwrap(); // Internet\n\n}\n\n\n", "file_path": "tests/iban.rs", "rank": 10, "score": 19479.731836542884 }, { "content": " validate(\"EE38 2200 2210 2014 5685\").unwrap(); // Estonia\n\n validate(\"FI21 1234 5600 0007 85\").unwrap(); // Finland\n\n validate(\"FR14 2004 1010 0505 0001 3M02 606\").unwrap(); // France\n\n validate(\"GE29 NB00 0000 0101 9049 17\").unwrap(); // Georgia\n\n validate(\"DE89 3704 0044 0532 0130 00\").unwrap(); // Germany\n\n validate(\"GI75 NWBK 0000 0000 7099 453\").unwrap(); // Gibraltar\n\n validate(\"GR16 0110 1250 0000 0001 2300 695\").unwrap(); // Greece\n\n validate(\"GT82 TRAJ 0102 0000 0012 1002 9690\").unwrap(); // Guatemala\n\n validate(\"HU42 1177 3016 1111 1018 0000 0000\").unwrap(); // Hungary\n\n validate(\"IS14 0159 2600 7654 5510 7303 39\").unwrap(); // Iceland\n\n validate(\"IE29 AIBK 9311 5212 3456 78\").unwrap(); // Ireland\n\n validate(\"IL62 0108 0000 0009 9999 999\").unwrap(); // Israel\n\n validate(\"IT60 X054 2811 1010 0000 0123 456\").unwrap(); // Italy\n\n validate(\"JO94 CBJO 0010 0000 0000 0131 0003 02\").unwrap(); // Jordan\n\n validate(\"KZ86 125K ZT50 0410 0100\").unwrap(); // Kazakhstan\n\n validate(\"XK05 1212 0123 4567 8906\").unwrap(); // Republic of Kosovo\n\n validate(\"KW81 CBKU 0000 0000 0000 1234 5601 01\").unwrap(); // Kuwait\n\n validate(\"LV80 BANK 0000 4351 9500 1\").unwrap(); // Latvia\n\n validate(\"LB62 0999 0000 0001 0019 0122 9114\").unwrap(); // Lebanon\n\n validate(\"LI21 0881 0000 2324 013A A\").unwrap(); // Liechtenstein\n", "file_path": "tests/iban.rs", "rank": 11, "score": 19479.731836542884 }, { "content": " Ukraine => f!(n(6), c(19)),\n\n UnitedArabEmirates => f!(n(3), n(16)),\n\n UnitedKingdom => f!(a(4), n(14)),\n\n VaticanCity => f!(n(3), n(15)),\n\n VirginIslands => f!(c(4), n(16)),\n\n };\n\n\n\n Regex::new(&format).unwrap()\n\n }\n\n\n\n // Same reference as above.\n\n pub(crate) fn account_number(&self, input: &str) -> String {\n\n use Country::*;\n\n let end = input.len() - 1;\n\n let (start, stop) = match self {\n\n Albania | Andorra | Belarus | Cyprus | Egypt | Germany | Guatemala | Jordan\n\n | Poland | SaoTomePrincipe => (12, end),\n\n Azerbaijan\n\n | Bahrain\n\n | CostaRica\n", "file_path": "src/country.rs", "rank": 12, "score": 14717.416536602814 }, { "content": " };\n\n\n\n input[start..=stop].into()\n\n }\n\n\n\n // Same reference as above.\n\n pub(crate) fn bank_code(&self, input: &str) -> String {\n\n use Country::*;\n\n // b = part of bank code, x = other\n\n let (start, stop) = match self {\n\n // bbxx xxxx xxxx\n\n Estonia | Georgia | Iceland | Moldova | SaudiArabia | Slovenia | Tunisia => (4, 5),\n\n\n\n // bbbx xxxx xxxx\n\n Albania | Belgium | BosniaHerzegovina | Cyprus | EastTimor | Greece | Hungary\n\n | Israel | Kazakhstan | Libya | Luxembourg | NorthMacedonia | Montenegro | Poland\n\n | Serbia | Sweden | UnitedArabEmirates | VaticanCity => (4, 6),\n\n\n\n // bbbb xxxx xxxx\n\n Andorra\n", "file_path": "src/country.rs", "rank": 13, "score": 14716.309075175594 }, { "content": "//! Countries supported by IBAN and helper methods.\n\n\n\nuse regex::Regex;\n\nuse std::fmt::{self, Display};\n\nuse std::str::FromStr;\n\n\n\n/// These are the IBAN-supported countries.\n\n#[allow(missing_docs)]\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[non_exhaustive]\n\npub enum Country {\n\n Albania,\n\n Andorra,\n\n Austria,\n\n Azerbaijan,\n\n Bahrain,\n\n Belarus,\n\n Belgium,\n\n BosniaHerzegovina,\n\n Brazil,\n", "file_path": "src/country.rs", "rank": 14, "score": 14715.408900602592 }, { "content": " \"TR\" => Ok(Country::Turkey),\n\n \"UA\" => Ok(Country::Ukraine),\n\n \"AE\" => Ok(Country::UnitedArabEmirates),\n\n \"GB\" => Ok(Country::UnitedKingdom),\n\n \"VA\" => Ok(Country::VaticanCity),\n\n \"VG\" => Ok(Country::VirginIslands),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Country {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use Country::*;\n\n let code = match self {\n\n Albania => \"AL\",\n\n Andorra => \"AD\",\n\n Austria => \"AT\",\n\n Azerbaijan => \"AZ\",\n\n Bahrain => \"BH\",\n", "file_path": "src/country.rs", "rank": 15, "score": 14713.187033194896 }, { "content": "\n\n // To better understand this formatting, check here\n\n // https://en.wikipedia.org/wiki/International_Bank_Account_Number#IBAN_formats_by_country\n\n pub(crate) fn format(&self) -> Regex {\n\n use Country::*;\n\n let format = match self {\n\n Albania => f!(n(8), c(16)),\n\n Andorra => f!(n(8), c(12)),\n\n Austria => f!(n(16)),\n\n Azerbaijan => f!(c(4), n(20)),\n\n Bahrain => f!(a(4), c(14)),\n\n Belarus => f!(c(4), n(4), c(16)),\n\n Belgium => f!(n(12)),\n\n BosniaHerzegovina => f!(n(16)),\n\n Brazil => f!(n(23), a(1), c(1)),\n\n Bulgaria => f!(a(4), n(6), c(8)),\n\n CostaRica => f!(n(18)),\n\n Croatia => f!(n(17)),\n\n Cyprus => f!(n(8), c(16)),\n\n CzechRepublic => f!(n(20)),\n", "file_path": "src/country.rs", "rank": 16, "score": 14712.981098329385 }, { "content": "}\n\n\n\nimpl FromStr for Country {\n\n type Err = ();\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"AL\" => Ok(Country::Albania),\n\n \"AD\" => Ok(Country::Andorra),\n\n \"AT\" => Ok(Country::Austria),\n\n \"AZ\" => Ok(Country::Azerbaijan),\n\n \"BH\" => Ok(Country::Bahrain),\n\n \"BY\" => Ok(Country::Belarus),\n\n \"BE\" => Ok(Country::Belgium),\n\n \"BA\" => Ok(Country::BosniaHerzegovina),\n\n \"BR\" => Ok(Country::Brazil),\n\n \"BG\" => Ok(Country::Bulgaria),\n\n \"CR\" => Ok(Country::CostaRica),\n\n \"HR\" => Ok(Country::Croatia),\n\n \"CY\" => Ok(Country::Cyprus),\n", "file_path": "src/country.rs", "rank": 17, "score": 14712.491009297653 }, { "content": " Finland | Mauritius | Seychelles | Ukraine => (4, 9),\n\n\n\n // bbbb bbbx xxxx\n\n Croatia => (4, 10),\n\n\n\n // bbbb bbbb xxxx\n\n Brazil | Germany => (4, 11),\n\n\n\n // xbbb bbbb bbbb\n\n CostaRica => (5, 7),\n\n\n\n // xxxx bbbb bbxx\n\n Ireland => (8, 13),\n\n\n\n // xbbb bbxx xxxx\n\n Italy | SanMarino => (5, 9),\n\n };\n\n\n\n input[start..=stop].to_string()\n\n }\n", "file_path": "src/country.rs", "rank": 18, "score": 14709.802008661674 }, { "content": " \"NL\" => Ok(Country::Netherlands),\n\n \"NO\" => Ok(Country::Norway),\n\n \"PK\" => Ok(Country::Pakistan),\n\n \"PS\" => Ok(Country::PalestinianTerritories),\n\n \"PL\" => Ok(Country::Poland),\n\n \"PT\" => Ok(Country::Portugal),\n\n \"QA\" => Ok(Country::Qatar),\n\n \"RO\" => Ok(Country::Romania),\n\n \"LC\" => Ok(Country::SaintLucia),\n\n \"SM\" => Ok(Country::SanMarino),\n\n \"ST\" => Ok(Country::SaoTomePrincipe),\n\n \"SA\" => Ok(Country::SaudiArabia),\n\n \"RS\" => Ok(Country::Serbia),\n\n \"SC\" => Ok(Country::Seychelles),\n\n \"SK\" => Ok(Country::Slovakia),\n\n \"SI\" => Ok(Country::Slovenia),\n\n \"ES\" => Ok(Country::Spain),\n\n \"SE\" => Ok(Country::Sweden),\n\n \"CH\" => Ok(Country::Switzerland),\n\n \"TN\" => Ok(Country::Tunisia),\n", "file_path": "src/country.rs", "rank": 19, "score": 14708.63767456745 }, { "content": " \"IE\" => Ok(Country::Ireland),\n\n \"IL\" => Ok(Country::Israel),\n\n \"IT\" => Ok(Country::Italy),\n\n \"JO\" => Ok(Country::Jordan),\n\n \"KZ\" => Ok(Country::Kazakhstan),\n\n \"XK\" => Ok(Country::Kosovo),\n\n \"KW\" => Ok(Country::Kuwait),\n\n \"LV\" => Ok(Country::Latvia),\n\n \"LB\" => Ok(Country::Lebanon),\n\n \"LY\" => Ok(Country::Libya),\n\n \"LI\" => Ok(Country::Liechenstein),\n\n \"LT\" => Ok(Country::Lithuania),\n\n \"LU\" => Ok(Country::Luxembourg),\n\n \"MK\" => Ok(Country::NorthMacedonia),\n\n \"MT\" => Ok(Country::Malta),\n\n \"MR\" => Ok(Country::Mauritania),\n\n \"MU\" => Ok(Country::Mauritius),\n\n \"MC\" => Ok(Country::Monaco),\n\n \"MD\" => Ok(Country::Moldova),\n\n \"ME\" => Ok(Country::Montenegro),\n", "file_path": "src/country.rs", "rank": 20, "score": 14707.526435752268 }, { "content": " \"CZ\" => Ok(Country::CzechRepublic),\n\n \"DK\" => Ok(Country::Denmark),\n\n \"DO\" => Ok(Country::DominicanRepublic),\n\n \"TL\" => Ok(Country::EastTimor),\n\n \"EG\" => Ok(Country::Egypt),\n\n \"SV\" => Ok(Country::ElSalvador),\n\n \"EE\" => Ok(Country::Estonia),\n\n \"FO\" => Ok(Country::FaroeIslands),\n\n \"FI\" => Ok(Country::Finland),\n\n \"FR\" => Ok(Country::France),\n\n \"GE\" => Ok(Country::Georgia),\n\n \"DE\" => Ok(Country::Germany),\n\n \"GI\" => Ok(Country::Gibraltar),\n\n \"GR\" => Ok(Country::Greece),\n\n \"GL\" => Ok(Country::Greenland),\n\n \"GT\" => Ok(Country::Guatemala),\n\n \"HU\" => Ok(Country::Hungary),\n\n \"IS\" => Ok(Country::Iceland),\n\n \"IQ\" => Ok(Country::Iraq),\n\n \"AA\" => Ok(Country::Internet),\n", "file_path": "src/country.rs", "rank": 21, "score": 14707.512295561155 }, { "content": " Serbia => \"RS\",\n\n Seychelles => \"SC\",\n\n Slovakia => \"SK\",\n\n Slovenia => \"SI\",\n\n Spain => \"ES\",\n\n Sweden => \"SE\",\n\n Switzerland => \"CH\",\n\n Tunisia => \"TN\",\n\n Turkey => \"TR\",\n\n Ukraine => \"UA\",\n\n UnitedArabEmirates => \"AE\",\n\n UnitedKingdom => \"GB\",\n\n VaticanCity => \"VA\",\n\n VirginIslands => \"VG\",\n\n };\n\n write!(f, \"{}\", code)\n\n }\n\n}\n", "file_path": "src/country.rs", "rank": 22, "score": 14706.109240950182 }, { "content": " Norway => f!(n(11)),\n\n Pakistan => f!(c(4), n(16)),\n\n PalestinianTerritories => f!(c(4), n(21)),\n\n Poland => f!(n(24)),\n\n Portugal => f!(n(21)),\n\n Qatar => f!(a(4), c(21)),\n\n Romania => f!(a(4), c(16)),\n\n SaintLucia => f!(a(4), c(24)),\n\n SanMarino => f!(a(1), n(10), c(12)),\n\n SaoTomePrincipe => f!(n(21)),\n\n SaudiArabia => f!(n(2), c(18)),\n\n Serbia => f!(n(18)),\n\n Seychelles => f!(a(4), n(20), a(3)),\n\n Slovakia => f!(n(20)),\n\n Slovenia => f!(n(15)),\n\n Spain => f!(n(20)),\n\n Sweden => f!(n(20)),\n\n Switzerland => f!(n(5), c(12)),\n\n Tunisia => f!(n(20)),\n\n Turkey => f!(n(5), c(17)),\n", "file_path": "src/country.rs", "rank": 23, "score": 14704.330367331133 }, { "content": " | Malta\n\n | Netherlands\n\n | Norway\n\n | Pakistan\n\n | PalestinianTerritories\n\n | Portugal\n\n | Qatar\n\n | Romania\n\n | SaintLucia\n\n | SaoTomePrincipe\n\n | Slovakia\n\n | Spain\n\n | UnitedKingdom\n\n | VirginIslands => (4, 7),\n\n\n\n // bbbb bxxx xxxx\n\n Austria | France | Liechenstein | Lithuania | Mauritania | Monaco | Switzerland\n\n | Turkey => (4, 8),\n\n\n\n // bbbb bbxx xxxx\n", "file_path": "src/country.rs", "rank": 24, "score": 14704.330367331133 }, { "content": " Andorra => 24,\n\n Austria => 20,\n\n Azerbaijan => 28,\n\n Bahrain => 22,\n\n Belarus => 28,\n\n Belgium => 16,\n\n BosniaHerzegovina => 20,\n\n Brazil => 29,\n\n Bulgaria => 22,\n\n CostaRica => 22,\n\n Croatia => 21,\n\n Cyprus => 28,\n\n CzechRepublic => 24,\n\n Denmark => 18,\n\n DominicanRepublic => 28,\n\n EastTimor => 23,\n\n Egypt => 29,\n\n ElSalvador => 28,\n\n Estonia => 20,\n\n FaroeIslands => 18,\n", "file_path": "src/country.rs", "rank": 25, "score": 14704.330367331133 }, { "content": " Gibraltar => \"GI\",\n\n Greece => \"GR\",\n\n Greenland => \"GL\",\n\n Guatemala => \"GT\",\n\n Hungary => \"HU\",\n\n Iceland => \"IS\",\n\n Iraq => \"IQ\",\n\n Internet => \"AA\",\n\n Ireland => \"IE\",\n\n Israel => \"IL\",\n\n Italy => \"IT\",\n\n Jordan => \"JO\",\n\n Kazakhstan => \"KZ\",\n\n Kosovo => \"XK\",\n\n Kuwait => \"KW\",\n\n Latvia => \"LV\",\n\n Lebanon => \"LB\",\n\n Libya => \"LY\",\n\n Liechenstein => \"LI\",\n\n Lithuania => \"LT\",\n", "file_path": "src/country.rs", "rank": 26, "score": 14704.330367331133 }, { "content": " Denmark => f!(n(14)),\n\n DominicanRepublic => f!(a(4), n(20)),\n\n EastTimor => f!(n(19)),\n\n Egypt => f!(n(25)),\n\n ElSalvador => f!(a(4), n(20)),\n\n Estonia => f!(n(16)),\n\n FaroeIslands => f!(n(14)),\n\n Finland => f!(n(14)),\n\n France => f!(n(10), c(11), n(2)),\n\n Georgia => f!(c(2), n(16)),\n\n Germany => f!(n(18)),\n\n Gibraltar => f!(a(4), c(15)),\n\n Greece => f!(n(7), c(16)),\n\n Greenland => f!(n(14)),\n\n Guatemala => f!(c(4), c(20)),\n\n Hungary => f!(n(24)),\n\n Iceland => f!(n(22)),\n\n Internet => f!(c(12)),\n\n Iraq => f!(a(4), n(15)),\n\n Ireland => f!(c(4), n(14)),\n", "file_path": "src/country.rs", "rank": 27, "score": 14704.330367331133 }, { "content": " Belarus => \"BY\",\n\n Belgium => \"BE\",\n\n BosniaHerzegovina => \"BA\",\n\n Brazil => \"BR\",\n\n Bulgaria => \"BG\",\n\n CostaRica => \"CR\",\n\n Croatia => \"HR\",\n\n Cyprus => \"CY\",\n\n CzechRepublic => \"CZ\",\n\n Denmark => \"DK\",\n\n DominicanRepublic => \"DO\",\n\n EastTimor => \"TL\",\n\n Egypt => \"EG\",\n\n ElSalvador => \"SV\",\n\n Estonia => \"EE\",\n\n FaroeIslands => \"FO\",\n\n Finland => \"FI\",\n\n France => \"FR\",\n\n Georgia => \"GE\",\n\n Germany => \"DE\",\n", "file_path": "src/country.rs", "rank": 28, "score": 14704.330367331133 }, { "content": " Hungary,\n\n Iceland,\n\n Internet,\n\n Iraq,\n\n Ireland,\n\n Israel,\n\n Italy,\n\n Jordan,\n\n Kazakhstan,\n\n Kosovo,\n\n Kuwait,\n\n Latvia,\n\n Lebanon,\n\n Libya,\n\n Liechenstein,\n\n Lithuania,\n\n Luxembourg,\n\n NorthMacedonia,\n\n Malta,\n\n Mauritania,\n", "file_path": "src/country.rs", "rank": 29, "score": 14704.330367331133 }, { "content": " SaintLucia => 32,\n\n SanMarino => 27,\n\n SaoTomePrincipe => 25,\n\n SaudiArabia => 24,\n\n Serbia => 22,\n\n Seychelles => 31,\n\n Slovakia => 24,\n\n Slovenia => 19,\n\n Spain => 24,\n\n Sweden => 24,\n\n Switzerland => 21,\n\n Tunisia => 24,\n\n Turkey => 26,\n\n Ukraine => 29,\n\n UnitedArabEmirates => 23,\n\n UnitedKingdom => 22,\n\n VaticanCity => 22,\n\n VirginIslands => 24,\n\n }\n\n }\n", "file_path": "src/country.rs", "rank": 30, "score": 14704.330367331133 }, { "content": " Luxembourg => \"LU\",\n\n NorthMacedonia => \"MK\",\n\n Malta => \"MT\",\n\n Mauritania => \"MR\",\n\n Mauritius => \"MU\",\n\n Monaco => \"MC\",\n\n Moldova => \"MD\",\n\n Montenegro => \"ME\",\n\n Netherlands => \"NL\",\n\n Norway => \"NO\",\n\n Pakistan => \"PK\",\n\n PalestinianTerritories => \"PS\",\n\n Poland => \"PL\",\n\n Portugal => \"PT\",\n\n Qatar => \"QA\",\n\n Romania => \"RO\",\n\n SaintLucia => \"LC\",\n\n SanMarino => \"SM\",\n\n SaoTomePrincipe => \"ST\",\n\n SaudiArabia => \"SA\",\n", "file_path": "src/country.rs", "rank": 31, "score": 14704.330367331133 }, { "content": " | Azerbaijan\n\n | Bahrain\n\n | Belarus\n\n | Bulgaria\n\n | CzechRepublic\n\n | Denmark\n\n | DominicanRepublic\n\n | Egypt\n\n | ElSalvador\n\n | FaroeIslands\n\n | Gibraltar\n\n | Greenland\n\n | Guatemala\n\n | Internet\n\n | Iraq\n\n | Jordan\n\n | Kosovo\n\n | Kuwait\n\n | Latvia\n\n | Lebanon\n", "file_path": "src/country.rs", "rank": 32, "score": 14704.330367331133 }, { "content": " Israel => f!(n(19)),\n\n Italy => f!(a(1), n(10), c(12)),\n\n Jordan => f!(a(4), n(22)),\n\n Kazakhstan => f!(n(3), c(13)),\n\n Kosovo => f!(n(4), n(10), n(2)),\n\n Kuwait => f!(a(4), c(22)),\n\n Latvia => f!(a(4), c(13)),\n\n Lebanon => f!(n(4), c(20)),\n\n Libya => f!(n(21)),\n\n Liechenstein => f!(n(5), c(12)),\n\n Lithuania => f!(n(16)),\n\n Luxembourg => f!(n(3), c(13)),\n\n NorthMacedonia => f!(n(3), c(10), n(2)),\n\n Malta => f!(a(4), n(5), c(18)),\n\n Mauritania => f!(n(23)),\n\n Mauritius => f!(a(4), n(19), a(3)),\n\n Monaco => f!(n(10), c(11), n(2)),\n\n Moldova => f!(c(2), c(18)),\n\n Montenegro => f!(n(18)),\n\n Netherlands => f!(a(4), n(10)),\n", "file_path": "src/country.rs", "rank": 33, "score": 14704.330367331133 }, { "content": " Brazil => (17, end - 2),\n\n Bulgaria | CzechRepublic | Slovakia | Spain => (14, end),\n\n Croatia | Greece | Iraq => (11, end),\n\n FaroeIslands | Norway | Estonia => (8, end - 1),\n\n Finland => (10, end - 1),\n\n France | Monaco => (14, end - 2),\n\n Georgia | Moldova | SaudiArabia => (6, end),\n\n Hungary => (12, end - 1),\n\n Iceland => (10, 15),\n\n Ireland | Mauritania | UnitedKingdom => (14, end),\n\n Israel | Libya => (10, end),\n\n Italy | SanMarino => (15, end),\n\n Kazakhstan | Luxembourg | Sweden | VaticanCity => (7, end),\n\n Malta => (13, end),\n\n Mauritius => (12, end - 6),\n\n Portugal => (12, end - 2),\n\n Seychelles => (13, end - 3),\n\n Slovenia => (8, end - 2),\n\n Tunisia => (9, end - 2),\n\n Turkey | Ukraine => (10, end),\n", "file_path": "src/country.rs", "rank": 34, "score": 14704.330367331133 }, { "content": " Mauritius,\n\n Monaco,\n\n Moldova,\n\n Montenegro,\n\n Netherlands,\n\n Norway,\n\n Pakistan,\n\n PalestinianTerritories,\n\n Poland,\n\n Portugal,\n\n Qatar,\n\n Romania,\n\n SaintLucia,\n\n SanMarino,\n\n SaoTomePrincipe,\n\n SaudiArabia,\n\n Serbia,\n\n Seychelles,\n\n Slovakia,\n\n Slovenia,\n", "file_path": "src/country.rs", "rank": 35, "score": 14704.330367331133 }, { "content": " Lebanon => 28,\n\n Libya => 25,\n\n Liechenstein => 21,\n\n Lithuania => 20,\n\n Luxembourg => 20,\n\n NorthMacedonia => 19,\n\n Malta => 31,\n\n Mauritania => 27,\n\n Mauritius => 30,\n\n Monaco => 27,\n\n Moldova => 24,\n\n Montenegro => 22,\n\n Netherlands => 18,\n\n Norway => 15,\n\n Pakistan => 24,\n\n PalestinianTerritories => 29,\n\n Poland => 28,\n\n Portugal => 25,\n\n Qatar => 29,\n\n Romania => 24,\n", "file_path": "src/country.rs", "rank": 36, "score": 14704.330367331133 }, { "content": " | Denmark\n\n | DominicanRepublic\n\n | ElSalvador\n\n | Gibraltar\n\n | Greenland\n\n | Internet\n\n | Kosovo\n\n | Kuwait\n\n | Latvia\n\n | Lebanon\n\n | Netherlands\n\n | Pakistan\n\n | PalestinianTerritories\n\n | Qatar\n\n | Romania\n\n | SaintLucia\n\n | VirginIslands => (8, end),\n\n Austria | Liechenstein | Lithuania | Switzerland | UnitedArabEmirates => (9, end),\n\n Belgium | EastTimor | NorthMacedonia | Montenegro | Serbia => (7, end - 2),\n\n BosniaHerzegovina => (10, end - 2),\n", "file_path": "src/country.rs", "rank": 37, "score": 14704.330367331133 }, { "content": " Finland => 18,\n\n France => 27,\n\n Georgia => 22,\n\n Germany => 22,\n\n Gibraltar => 23,\n\n Greece => 27,\n\n Greenland => 18,\n\n Guatemala => 28,\n\n Hungary => 28,\n\n Iceland => 26,\n\n Internet => 16,\n\n Iraq => 23,\n\n Ireland => 22,\n\n Israel => 23,\n\n Italy => 27,\n\n Jordan => 30,\n\n Kazakhstan => 20,\n\n Kosovo => 20,\n\n Kuwait => 30,\n\n Latvia => 21,\n", "file_path": "src/country.rs", "rank": 38, "score": 14704.330367331133 }, { "content": " Bulgaria,\n\n CostaRica,\n\n Croatia,\n\n Cyprus,\n\n CzechRepublic,\n\n Denmark,\n\n DominicanRepublic,\n\n EastTimor,\n\n Egypt,\n\n ElSalvador,\n\n Estonia,\n\n FaroeIslands,\n\n Finland,\n\n France,\n\n Georgia,\n\n Germany,\n\n Gibraltar,\n\n Greece,\n\n Greenland,\n\n Guatemala,\n", "file_path": "src/country.rs", "rank": 39, "score": 14704.330367331133 }, { "content": " Spain,\n\n Sweden,\n\n Switzerland,\n\n Tunisia,\n\n Turkey,\n\n Ukraine,\n\n UnitedArabEmirates,\n\n UnitedKingdom,\n\n VaticanCity,\n\n VirginIslands,\n\n}\n\n\n\n// Helper functions for the regex groups.\n", "file_path": "src/country.rs", "rank": 40, "score": 14704.330367331133 }, { "content": " let mut total = 0;\n\n for (ch, w) in branch_number.chars().zip([10, 5, 8, 4, 2, 1].iter()) {\n\n let ch = ch.to_digit(10).unwrap();\n\n total += ch * w;\n\n }\n\n\n\n total % 11 == 0\n\n }\n\n _ => true,\n\n }\n\n }\n\n\n\n pub(crate) fn check_digits(&self, input: &str) -> u32 {\n\n use Country::*;\n\n let end = input.len() - 1;\n\n let (start, stop) = match self {\n\n Albania => (12, 12),\n\n Belgium | Montenegro => (end - 1, end),\n\n _ => unreachable!(),\n\n };\n\n\n\n input[start..=stop].parse().unwrap()\n\n }\n\n}\n", "file_path": "src/country_specific.rs", "rank": 41, "score": 13573.890884370387 }, { "content": "use crate::country::Country;\n\n\n\nimpl Country {\n\n pub(crate) fn custom_validation(&self, input: &str) -> bool {\n\n use Country::*;\n\n match self {\n\n Albania => {\n\n let check_digit = self.check_digits(input);\n\n let account_number = &input[4..=11];\n\n\n\n let mut total = 0;\n\n for (ch, w) in account_number.chars().zip([9, 7, 3, 1, 9, 7, 3, 1].iter()) {\n\n let ch = ch.to_digit(10).unwrap();\n\n total += ch * w;\n\n }\n\n\n\n total % 10 == check_digit\n\n }\n\n Belgium => {\n\n let check_digits = self.check_digits(input) as u128;\n", "file_path": "src/country_specific.rs", "rank": 42, "score": 13573.842200057175 }, { "content": " let check_number: u128 = input[4..=13].parse().unwrap();\n\n check_number % 97 == check_digits\n\n }\n\n CzechRepublic => {\n\n let account_number = &input[14..];\n\n\n\n let mut total = 0;\n\n for (ch, w) in account_number\n\n .chars()\n\n .zip([6, 3, 7, 9, 10, 5, 8, 4, 2, 1].iter())\n\n {\n\n let ch = ch.to_digit(10).unwrap();\n\n total += ch * w;\n\n }\n\n\n\n if total % 11 != 0 {\n\n return false;\n\n }\n\n\n\n let branch_number = &input[8..=13];\n", "file_path": "src/country_specific.rs", "rank": 43, "score": 13563.558356988331 }, { "content": "[![ci-badge][]][ci] [![docs-badge][]][docs] [![crate-version]][crate-link]\n\n\n\n# schwifty\n\n\n\nA simple IBAN validation library inspired by Python's `schwifty`.\n\n\n\n## Sample Usage\n\n```rust\n\n assert!(schwifty::validate(\"GB82 WEST 1234 5698 7654 32\").is_ok());\n\n```\n\n\n\n[ci]: https://github.com/Elinvynia/schwifty/actions?query=workflow%3ARust\n\n[ci-badge]: https://img.shields.io/github/workflow/status/Elinvynia/schwifty/Rust/master?style=flat-square\n\n[docs]: https://docs.rs/schwifty\n\n[docs-badge]: https://img.shields.io/badge/docs-online-5023dd.svg?style=flat-square\n\n[crate-link]: https://crates.io/crates/schwifty\n\n[crate-version]: https://img.shields.io/crates/v/schwifty?style=flat-square\n\n\n", "file_path": "README.md", "rank": 49, "score": 1.0673091166677082 } ]
Rust
examples/max11619-adc.rs
us-irs/vorago-reb1-rs
742a53d63a8d531d3e36068ee09987c2f38ef77e
#![no_main] #![no_std] use cortex_m_rt::entry; use embedded_hal::{blocking::delay::DelayUs, spi}; use max116xx_10bit::VoltageRefMode; use max116xx_10bit::{AveragingConversions, AveragingResults}; use panic_rtt_target as _; use rtt_target::{rprintln, rtt_init_print}; use va108xx_hal::timer::CountDownTimer; use va108xx_hal::{ gpio::PinsA, pac::{self, interrupt, SPIB}, prelude::*, spi::{Spi, SpiBase, SpiConfig, TransferConfig}, timer::{default_ms_irq_handler, set_up_ms_timer, Delay, IrqCfg}, utility::{port_mux, Funsel, PortSel}, }; use vorago_reb1::max11619::{ max11619_externally_clocked_no_wakeup, max11619_externally_clocked_with_wakeup, max11619_internally_clocked, EocPin, AN2_CHANNEL, POTENTIOMETER_CHANNEL, }; #[derive(Debug, PartialEq, Copy, Clone)] pub enum ExampleMode { UsingEoc, NotUsingEoc, NotUsingEocWithDelay, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum ReadMode { Single, Multiple, MultipleNToHighest, AverageN, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum MuxMode { None, PortB19to17, } const EXAMPLE_MODE: ExampleMode = ExampleMode::NotUsingEoc; const READ_MODE: ReadMode = ReadMode::Multiple; const MUX_MODE: MuxMode = MuxMode::None; #[entry] fn main() -> ! { rtt_init_print!(); rprintln!("-- Vorago ADC Example --"); let mut dp = pac::Peripherals::take().unwrap(); let tim0 = set_up_ms_timer( IrqCfg::new(pac::Interrupt::OC0, true, true), &mut dp.SYSCONFIG, Some(&mut dp.IRQSEL), 50.mhz(), dp.TIM0, ); let delay = Delay::new(tim0); unsafe { cortex_m::peripheral::NVIC::unmask(pac::Interrupt::OC0); } let pinsa = PinsA::new(&mut dp.SYSCONFIG, None, dp.PORTA); let spi_cfg = SpiConfig::default(); let (sck, mosi, miso) = ( pinsa.pa20.into_funsel_2(), pinsa.pa19.into_funsel_2(), pinsa.pa18.into_funsel_2(), ); if MUX_MODE == MuxMode::PortB19to17 { port_mux(&mut dp.IOCONFIG, PortSel::PortB, 19, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 18, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 17, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 16, Funsel::Funsel1).ok(); } let mut accel_cs = pinsa.pa16.into_push_pull_output(); accel_cs .set_high() .expect("Setting accelerometer chip select high failed"); let transfer_cfg = TransferConfig::new( 3.mhz(), spi::MODE_0, Some(pinsa.pa17.into_funsel_2()), true, false, ); let spi = Spi::spib( dp.SPIB, (sck, miso, mosi), 50.mhz(), spi_cfg, Some(&mut dp.SYSCONFIG), Some(&transfer_cfg.downgrade()), ) .downgrade(); match EXAMPLE_MODE { ExampleMode::NotUsingEoc => spi_example_externally_clocked(spi, delay), ExampleMode::UsingEoc => { spi_example_internally_clocked(spi, delay, pinsa.pa14.into_floating_input()); } ExampleMode::NotUsingEocWithDelay => { let delay_us = CountDownTimer::new(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM2); spi_example_externally_clocked_with_delay(spi, delay, delay_us); } } } #[interrupt] fn OC0() { default_ms_irq_handler(); } fn spi_example_externally_clocked(spi: SpiBase<SPIB>, mut delay: Delay) -> ! { let mut adc = max11619_externally_clocked_no_wakeup(spi) .expect("Creating externally clocked MAX11619 device failed"); if READ_MODE == ReadMode::AverageN { adc.averaging( AveragingConversions::FourConversions, AveragingResults::FourResults, ) .expect("Error setting up averaging register"); } let mut cmd_buf: [u8; 32] = [0; 32]; let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { rprintln!("Reading single potentiometer channel"); let pot_val = adc .read_single_channel(&mut cmd_buf, POTENTIOMETER_CHANNEL) .expect("Creating externally clocked MAX11619 ADC failed"); rprintln!("Single channel read:"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { let mut res_buf: [u16; 4] = [0; 4]; adc.read_multiple_channels_0_to_n( &mut cmd_buf, &mut res_buf.iter_mut(), POTENTIOMETER_CHANNEL, ) .expect("Multi-Channel read failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { let mut res_buf: [u16; 2] = [0; 2]; adc.read_multiple_channels_n_to_highest( &mut cmd_buf, &mut res_buf.iter_mut(), AN2_CHANNEL, ) .expect("Multi-Channel read failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { rprintln!("Scanning and averaging not possible for externally clocked mode"); } } counter += 1; delay.delay_ms(500); } } fn spi_example_externally_clocked_with_delay( spi: SpiBase<SPIB>, mut delay: Delay, mut delay_us: impl DelayUs<u8>, ) -> ! { let mut adc = max11619_externally_clocked_with_wakeup(spi).expect("Creating MAX116xx device failed"); let mut cmd_buf: [u8; 32] = [0; 32]; let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { rprintln!("Reading single potentiometer channel"); let pot_val = adc .read_single_channel(&mut cmd_buf, POTENTIOMETER_CHANNEL, &mut delay_us) .expect("Creating externally clocked MAX11619 ADC failed"); rprintln!("Single channel read:"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { let mut res_buf: [u16; 4] = [0; 4]; adc.read_multiple_channels_0_to_n( &mut cmd_buf, &mut res_buf.iter_mut(), POTENTIOMETER_CHANNEL, &mut delay_us, ) .expect("Multi-Channel read failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { let mut res_buf: [u16; 2] = [0; 2]; adc.read_multiple_channels_n_to_highest( &mut cmd_buf, &mut res_buf.iter_mut(), AN2_CHANNEL, &mut delay_us, ) .expect("Multi-Channel read failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { rprintln!("Scanning and averaging not possible for externally clocked mode"); } } counter += 1; delay.delay_ms(500); } } fn spi_example_internally_clocked(spi: SpiBase<SPIB>, mut delay: Delay, eoc_pin: EocPin) -> ! { let mut adc = max11619_internally_clocked( spi, eoc_pin, VoltageRefMode::ExternalSingleEndedNoWakeupDelay, ) .expect("Creating MAX116xx device failed"); let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { adc.request_single_channel(POTENTIOMETER_CHANNEL) .expect("Requesting single channel value failed"); let pot_val = nb::block!(adc.get_single_channel()) .expect("Reading single channel value failed"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { adc.request_multiple_channels_0_to_n(POTENTIOMETER_CHANNEL) .expect("Requesting single channel value failed"); let mut res_buf: [u16; 4] = [0; 4]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { adc.request_multiple_channels_n_to_highest(AN2_CHANNEL) .expect("Requesting single channel value failed"); let mut res_buf: [u16; 4] = [0; 4]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { adc.request_channel_n_repeatedly(POTENTIOMETER_CHANNEL) .expect("Reading channel multiple times failed"); let mut res_buf: [u16; 16] = [0; 16]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); rprintln!("Reading potentiometer 4 times"); rprintln!("\tValue 0: {}", res_buf[0]); rprintln!("\tValue 1: {}", res_buf[1]); rprintln!("\tValue 2: {}", res_buf[2]); rprintln!("\tValue 3: {}", res_buf[3]); } } counter += 1; delay.delay_ms(500); } } fn print_res_buf(buf: &[u16; 4]) { rprintln!("Multi channel read from 0 to 3:"); rprintln!("\tAN0 value: {}", buf[0]); rprintln!("\tAN1 value: {}", buf[1]); rprintln!("\tAN2 value: {}", buf[2]); rprintln!("\tAN3 / Potentiometer value: {}", buf[3]); }
#![no_main] #![no_std] use cortex_m_rt::entry; use embedded_hal::{blocking::delay::DelayUs, spi}; use max116xx_10bit::VoltageRefMode; use max116xx_10bit::{AveragingConversions, AveragingResults}; use panic_rtt_target as _; use rtt_target::{rprintln, rtt_init_print}; use va108xx_hal::timer::CountDownTimer; use va108xx_hal::{ gpio::PinsA, pac::{self, interrupt, SPIB}, prelude::*, spi::{Spi, SpiBase, SpiConfig, TransferConfig}, timer::{default_ms_irq_handler, set_up_ms_timer, Delay, IrqCfg}, utility::{port_mux, Funsel, PortSel}, }; use vorago_reb1::max11619::{ max11619_externally_clocked_no_wakeup, max11619_externally_clocked_with_wakeup, max11619_internally_clocked, EocPin, AN2_CHANNEL, POTENTIOMETER_CHANNEL, }; #[derive(Debug, PartialEq, Copy, Clone)] pub enum ExampleMode { UsingEoc, NotUsingEoc, NotUsingEocWithDelay, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum ReadMode { Single, Multiple, MultipleNToHighest, AverageN, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum MuxMode { None, PortB19to17, } const EXAMPLE_MODE: ExampleMode = ExampleMode::NotUsingEoc; const READ_MODE: ReadMode = ReadMode::Multiple; const MUX_MODE: MuxMode = MuxMode::None; #[entry] fn main() -> ! { rtt_init_print!(); rprintln!("-- Vorago ADC Example --"); let mut dp = pac::Peripherals::take().unwrap(); let tim0 = set_up_ms_timer( IrqCfg::new(pac::Interrupt::OC0, true, true), &mut dp.SYSCONFIG, Some(&mut dp.IRQSEL), 50.mhz(), dp.TIM0, ); let delay = Delay::new(tim0); unsafe { cortex_m::peripheral::NVIC::unmask(pac::Interrupt::OC0); } let pinsa = PinsA::new(&mut dp.SYSCONFIG, None, dp.PORTA); let spi_cfg = SpiConfig::default(); let (sck, mosi, miso) = ( pinsa.pa20.into_funsel_2(), pinsa.pa19.into_funsel_2(), pinsa.pa18.into_funsel_2(), ); if MUX_MODE == MuxMode::PortB19to17 { port_mux(&mut dp.IOCONFIG, PortSel::PortB, 19, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 18, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 17, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 16, Funsel::Funsel1).ok(); } let mut accel_cs = pinsa.pa16.into_push_pull_output(); accel_cs .set_high() .expect("Setting accelerometer chip select high failed"); let transfer_cfg = TransferConfig::new( 3.mhz(), spi::MODE_0, Some(pinsa.pa17.into_funsel_2()), true, false, ); let spi = Spi::spib( dp.SPIB, (sck, miso, mosi), 50.mhz(), spi_cfg, Some(&mut dp.SYSCONFIG), Some(&transfer_cfg.downgrade()), ) .downgrade(); match EXAMPLE_MODE { ExampleMode::NotUsingEoc => spi_example_externally_clocked(spi, delay), ExampleMode::UsingEoc => { spi_example_internally_clocked(spi, delay, pinsa.pa14.into_floating_input()); } ExampleMode::NotUsingEocWithDelay => { let delay_us = CountDownTimer::new(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM2); spi_example_externally_clocked_with_delay(spi, delay, delay_us); } } } #[interrupt] fn OC0() { default_ms_irq_handler(); } fn spi_example_externally_clocked(spi: SpiBase<SPIB>, mut delay: Delay) -> ! { let mut adc = max11619_externally_clocked_no_wake
fn spi_example_externally_clocked_with_delay( spi: SpiBase<SPIB>, mut delay: Delay, mut delay_us: impl DelayUs<u8>, ) -> ! { let mut adc = max11619_externally_clocked_with_wakeup(spi).expect("Creating MAX116xx device failed"); let mut cmd_buf: [u8; 32] = [0; 32]; let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { rprintln!("Reading single potentiometer channel"); let pot_val = adc .read_single_channel(&mut cmd_buf, POTENTIOMETER_CHANNEL, &mut delay_us) .expect("Creating externally clocked MAX11619 ADC failed"); rprintln!("Single channel read:"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { let mut res_buf: [u16; 4] = [0; 4]; adc.read_multiple_channels_0_to_n( &mut cmd_buf, &mut res_buf.iter_mut(), POTENTIOMETER_CHANNEL, &mut delay_us, ) .expect("Multi-Channel read failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { let mut res_buf: [u16; 2] = [0; 2]; adc.read_multiple_channels_n_to_highest( &mut cmd_buf, &mut res_buf.iter_mut(), AN2_CHANNEL, &mut delay_us, ) .expect("Multi-Channel read failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { rprintln!("Scanning and averaging not possible for externally clocked mode"); } } counter += 1; delay.delay_ms(500); } } fn spi_example_internally_clocked(spi: SpiBase<SPIB>, mut delay: Delay, eoc_pin: EocPin) -> ! { let mut adc = max11619_internally_clocked( spi, eoc_pin, VoltageRefMode::ExternalSingleEndedNoWakeupDelay, ) .expect("Creating MAX116xx device failed"); let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { adc.request_single_channel(POTENTIOMETER_CHANNEL) .expect("Requesting single channel value failed"); let pot_val = nb::block!(adc.get_single_channel()) .expect("Reading single channel value failed"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { adc.request_multiple_channels_0_to_n(POTENTIOMETER_CHANNEL) .expect("Requesting single channel value failed"); let mut res_buf: [u16; 4] = [0; 4]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { adc.request_multiple_channels_n_to_highest(AN2_CHANNEL) .expect("Requesting single channel value failed"); let mut res_buf: [u16; 4] = [0; 4]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { adc.request_channel_n_repeatedly(POTENTIOMETER_CHANNEL) .expect("Reading channel multiple times failed"); let mut res_buf: [u16; 16] = [0; 16]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); rprintln!("Reading potentiometer 4 times"); rprintln!("\tValue 0: {}", res_buf[0]); rprintln!("\tValue 1: {}", res_buf[1]); rprintln!("\tValue 2: {}", res_buf[2]); rprintln!("\tValue 3: {}", res_buf[3]); } } counter += 1; delay.delay_ms(500); } } fn print_res_buf(buf: &[u16; 4]) { rprintln!("Multi channel read from 0 to 3:"); rprintln!("\tAN0 value: {}", buf[0]); rprintln!("\tAN1 value: {}", buf[1]); rprintln!("\tAN2 value: {}", buf[2]); rprintln!("\tAN3 / Potentiometer value: {}", buf[3]); }
up(spi) .expect("Creating externally clocked MAX11619 device failed"); if READ_MODE == ReadMode::AverageN { adc.averaging( AveragingConversions::FourConversions, AveragingResults::FourResults, ) .expect("Error setting up averaging register"); } let mut cmd_buf: [u8; 32] = [0; 32]; let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { rprintln!("Reading single potentiometer channel"); let pot_val = adc .read_single_channel(&mut cmd_buf, POTENTIOMETER_CHANNEL) .expect("Creating externally clocked MAX11619 ADC failed"); rprintln!("Single channel read:"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { let mut res_buf: [u16; 4] = [0; 4]; adc.read_multiple_channels_0_to_n( &mut cmd_buf, &mut res_buf.iter_mut(), POTENTIOMETER_CHANNEL, ) .expect("Multi-Channel read failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { let mut res_buf: [u16; 2] = [0; 2]; adc.read_multiple_channels_n_to_highest( &mut cmd_buf, &mut res_buf.iter_mut(), AN2_CHANNEL, ) .expect("Multi-Channel read failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { rprintln!("Scanning and averaging not possible for externally clocked mode"); } } counter += 1; delay.delay_ms(500); } }
function_block-function_prefixed
[ { "content": "#[entry]\n\nfn main() -> ! {\n\n rtt_init_print!();\n\n rprintln!(\"-- Vorago Accelerometer Example --\");\n\n let mut dp = pac::Peripherals::take().unwrap();\n\n let mut delay = set_up_ms_delay_provider(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM0);\n\n let pinsa = PinsA::new(&mut dp.SYSCONFIG, None, dp.PORTA);\n\n let spi_cfg = SpiConfig::default();\n\n let (sck, mosi, miso) = (\n\n pinsa.pa20.into_funsel_2(),\n\n pinsa.pa19.into_funsel_2(),\n\n pinsa.pa18.into_funsel_2(),\n\n );\n\n let cs_pin = pinsa.pa16.into_funsel_2();\n\n\n\n // Need to set the ADC chip select low\n\n let mut adc_cs = pinsa.pa17.into_push_pull_output();\n\n adc_cs\n\n .set_high()\n\n .expect(\"Setting ADC chip select high failed\");\n\n\n", "file_path": "examples/adxl343-accelerometer.rs", "rank": 2, "score": 104261.2363410182 }, { "content": "pub fn max11619_externally_clocked_with_wakeup<SpiE, SPI>(\n\n spi: SPI,\n\n) -> Result<Max11619ExternallyClockedWithWakeup<SPI>, Error<SpiE, Infallible>>\n\nwhere\n\n SPI: Transfer<u8, Error = SpiE> + FullDuplex<u8, Error = SpiE>,\n\n{\n\n let mut adc = Max116xx10Bit::max11619(spi, DummyPin::new_low())?\n\n .into_ext_clkd_with_int_ref_wakeup_delay();\n\n adc.reset(false)?;\n\n adc.setup()?;\n\n Ok(adc)\n\n}\n\n\n", "file_path": "src/max11619.rs", "rank": 6, "score": 86413.88327017383 }, { "content": "pub fn max11619_externally_clocked_no_wakeup<SpiE, SPI>(\n\n spi: SPI,\n\n) -> Result<Max11619ExternallyClockedNoWakeup<SPI>, Error<SpiE, Infallible>>\n\nwhere\n\n SPI: Transfer<u8, Error = SpiE> + FullDuplex<u8, Error = SpiE>,\n\n{\n\n let mut adc = Max116xx10Bit::max11619(spi, DummyPin::new_low())?;\n\n adc.reset(false)?;\n\n adc.setup()?;\n\n Ok(adc)\n\n}\n\n\n", "file_path": "src/max11619.rs", "rank": 7, "score": 86413.88327017383 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let mut dp = pac::Peripherals::take().unwrap();\n\n\n\n let lib_type = LibType::Bsp;\n\n\n\n match lib_type {\n\n LibType::Pac => {\n\n // Enable all peripheral clocks\n\n dp.SYSCONFIG\n\n .peripheral_clk_enable\n\n .modify(|_, w| unsafe { w.bits(0xffffffff) });\n\n dp.PORTA\n\n .dir()\n\n .modify(|_, w| unsafe { w.bits(LED_D2 | LED_D3 | LED_D4) });\n\n dp.PORTA\n\n .datamask()\n\n .modify(|_, w| unsafe { w.bits(LED_D2 | LED_D3 | LED_D4) });\n\n for _ in 0..10 {\n\n dp.PORTA\n\n .clrout()\n", "file_path": "examples/blinky-leds.rs", "rank": 8, "score": 83710.00189612951 }, { "content": "#[interrupt]\n\nfn OC0() {\n\n default_ms_irq_handler();\n\n}\n\n\n", "file_path": "examples/blinky-button-irq.rs", "rank": 9, "score": 81244.71581440594 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n rtt_init_print!();\n\n rprintln!(\"-- Vorago Temperature Sensor and I2C Example --\");\n\n let mut dp = pac::Peripherals::take().unwrap();\n\n let mut delay = set_up_ms_delay_provider(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM0);\n\n unsafe {\n\n cortex_m::peripheral::NVIC::unmask(pac::Interrupt::OC0);\n\n }\n\n\n\n let mut temp_sensor = Adt75TempSensor::new(dp.I2CA, 50.mhz(), Some(&mut dp.SYSCONFIG))\n\n .expect(\"Creating temperature sensor struct failed\");\n\n loop {\n\n let temp = temp_sensor\n\n .read_temperature()\n\n .expect(\"Failed reading temperature\");\n\n rprintln!(\"Temperature in Celcius: {}\", temp);\n\n delay.delay_ms(500_u16);\n\n }\n\n}\n", "file_path": "examples/adt75-temp-sensor.rs", "rank": 10, "score": 80659.45066437384 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n rtt_init_print!();\n\n rprintln!(\"-- Vorago Button IRQ Example --\");\n\n let mut dp = pac::Peripherals::take().unwrap();\n\n let pinsa = PinsA::new(&mut dp.SYSCONFIG, Some(dp.IOCONFIG), dp.PORTA);\n\n let edge_irq = match PRESS_MODE {\n\n PressMode::Toggle => InterruptEdge::HighToLow,\n\n PressMode::Keep => InterruptEdge::BothEdges,\n\n };\n\n\n\n // Configure an edge interrupt on the button and route it to interrupt vector 15\n\n let mut button = Button::new(pinsa.pa11.into_floating_input()).edge_irq(\n\n edge_irq,\n\n IrqCfg::new(pac::interrupt::OC15, true, true),\n\n Some(&mut dp.SYSCONFIG),\n\n Some(&mut dp.IRQSEL),\n\n );\n\n\n\n if PRESS_MODE == PressMode::Toggle {\n\n // This filter debounces the switch for edge based interrupts\n", "file_path": "examples/blinky-button-irq.rs", "rank": 11, "score": 80659.45066437384 }, { "content": "pub fn max11619_internally_clocked<SpiE, SPI>(\n\n spi: SPI,\n\n eoc: EocPin,\n\n v_ref: VoltageRefMode,\n\n) -> Result<Max11619InternallyClocked<SPI, EocPin>, Error<SpiE, Infallible>>\n\nwhere\n\n SPI: Transfer<u8, Error = SpiE> + FullDuplex<u8, Error = SpiE>,\n\n{\n\n let mut adc = Max116xx10Bit::max11619(spi, DummyPin::new_low())?\n\n .into_int_clkd_int_timed_through_ser_if_without_wakeup(v_ref, eoc)?;\n\n adc.reset(false)?;\n\n adc.setup()?;\n\n Ok(adc)\n\n}\n", "file_path": "src/max11619.rs", "rank": 12, "score": 79382.42842482601 }, { "content": "#[allow(dead_code)]\n\nenum LibType {\n\n Pac,\n\n Hal,\n\n Bsp,\n\n}\n\n\n", "file_path": "examples/blinky-leds.rs", "rank": 14, "score": 49533.17534766216 }, { "content": "#[interrupt]\n\nfn OC15() {\n\n cortex_m::interrupt::free(|cs| {\n\n if PRESS_MODE == PressMode::Toggle {\n\n if let Some(ref mut leds) = LEDS.borrow(cs).borrow_mut().as_deref_mut() {\n\n leds[0].toggle();\n\n }\n\n } else {\n\n if let (Some(ref mut leds), Some(ref button)) = (\n\n LEDS.borrow(cs).borrow_mut().as_deref_mut(),\n\n BUTTON.borrow(cs).borrow().as_ref(),\n\n ) {\n\n if button.released() {\n\n leds[0].off();\n\n } else {\n\n leds[0].on();\n\n }\n\n }\n\n }\n\n });\n\n}\n", "file_path": "examples/blinky-button-irq.rs", "rank": 15, "score": 46509.423078440625 }, { "content": " let transfer_cfg = TransferConfig::new(1.mhz(), spi::MODE_3, Some(cs_pin), false, true);\n\n let mut spi = Spi::spib(\n\n dp.SPIB,\n\n (sck, miso, mosi),\n\n 50.mhz(),\n\n spi_cfg,\n\n Some(&mut dp.SYSCONFIG),\n\n Some(&transfer_cfg.downgrade()),\n\n );\n\n\n\n let mut send_buf: [u8; 3] = [0; 3];\n\n send_buf[0] = READ_MASK | DEVID_REG;\n\n let reply = spi\n\n .transfer(&mut send_buf[0..2])\n\n .expect(\"Reading DEVID register failed\");\n\n rprintln!(\"DEVID register: {}\", reply[1]);\n\n\n\n send_buf[0] = POWER_CTL_REG;\n\n send_buf[1] = PWR_MEASUREMENT_MODE_MASK;\n\n spi.write(&send_buf[0..2])\n\n .expect(\"Enabling measurement mode failed\");\n\n\n\n loop {\n\n delay.delay_ms(500_u16);\n\n unimplemented!(\"Not implemented for now, is not populated on the board..\");\n\n }\n\n}\n", "file_path": "examples/adxl343-accelerometer.rs", "rank": 16, "score": 38251.05051016025 }, { "content": "//! ADXL343 accelerometer example\n\n//!\n\n//! Please note that the default REB1 board is not populated with the ADXL343BCCZ-RL7.\n\n//! To use this example, this chip needs to be soldered onto the board.\n\n#![no_main]\n\n#![no_std]\n\nuse cortex_m_rt::entry;\n\nuse embedded_hal::spi;\n\nuse panic_rtt_target as _;\n\nuse rtt_target::{rprintln, rtt_init_print};\n\nuse va108xx_hal::{\n\n gpio::PinsA,\n\n pac,\n\n prelude::*,\n\n spi::{Spi, SpiConfig, TransferConfig},\n\n timer::set_up_ms_delay_provider,\n\n};\n\n\n\nconst READ_MASK: u8 = 1 << 7;\n\nconst _MULTI_BYTE_MASK: u8 = 1 << 6;\n\nconst DEVID_REG: u8 = 0x00;\n\n\n\nconst POWER_CTL_REG: u8 = 0x2D;\n\nconst PWR_MEASUREMENT_MODE_MASK: u8 = 1 << 3;\n\n\n\n#[entry]\n", "file_path": "examples/adxl343-accelerometer.rs", "rank": 17, "score": 38248.67319818401 }, { "content": " let pinsa = PinsA::new(&mut dp.SYSCONFIG, Some(dp.IOCONFIG), dp.PORTA);\n\n let mut leds = Leds::new(\n\n pinsa.pa10.into_push_pull_output(),\n\n pinsa.pa7.into_push_pull_output(),\n\n pinsa.pa6.into_push_pull_output(),\n\n );\n\n let mut delay = set_up_ms_delay_provider(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM0);\n\n loop {\n\n for _ in 0..10 {\n\n // Blink all LEDs quickly\n\n for led in leds.iter_mut() {\n\n led.toggle();\n\n }\n\n delay.delay_ms(200_u16);\n\n }\n\n // Now use a wave pattern\n\n loop {\n\n for led in leds.iter_mut() {\n\n led.toggle();\n\n delay.delay_ms(200_u16);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/blinky-leds.rs", "rank": 29, "score": 16419.289133835042 }, { "content": " .write(|w| unsafe { w.bits(LED_D2 | LED_D3 | LED_D4) });\n\n cortex_m::asm::delay(5_000_000);\n\n dp.PORTA\n\n .setout()\n\n .write(|w| unsafe { w.bits(LED_D2 | LED_D3 | LED_D4) });\n\n cortex_m::asm::delay(5_000_000);\n\n }\n\n loop {\n\n dp.PORTA\n\n .togout()\n\n .write(|w| unsafe { w.bits(LED_D2 | LED_D3 | LED_D4) });\n\n cortex_m::asm::delay(25_000_000);\n\n }\n\n }\n\n LibType::Hal => {\n\n let pins = PinsA::new(&mut dp.SYSCONFIG, Some(dp.IOCONFIG), dp.PORTA);\n\n let mut led1 = pins.pa10.into_push_pull_output();\n\n let mut led2 = pins.pa7.into_push_pull_output();\n\n let mut led3 = pins.pa6.into_push_pull_output();\n\n let mut delay = set_up_ms_delay_provider(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM0);\n", "file_path": "examples/blinky-leds.rs", "rank": 30, "score": 16417.111931063202 }, { "content": "//! Blinky examples using the PAC directly, the HAL, or the BSP\n\n//!\n\n//! Additional note on LEDs:\n\n//! Be not afraid: Pulling the GPIOs low makes the LEDs blink. See REB1\n\n//! schematic for more details.\n\n#![no_main]\n\n#![no_std]\n\n\n\nuse cortex_m_rt::entry;\n\nuse embedded_hal::digital::v2::ToggleableOutputPin;\n\nuse panic_halt as _;\n\nuse va108xx_hal::{gpio::pins::PinsA, pac, prelude::*, timer::set_up_ms_delay_provider};\n\nuse vorago_reb1::leds::Leds;\n\n\n\n// REB LED pin definitions. All on port A\n\nconst LED_D2: u32 = 1 << 10;\n\nconst LED_D3: u32 = 1 << 7;\n\nconst LED_D4: u32 = 1 << 6;\n\n\n\n#[allow(dead_code)]\n", "file_path": "examples/blinky-leds.rs", "rank": 31, "score": 16416.081161553233 }, { "content": " for _ in 0..10 {\n\n led1.set_low().ok();\n\n led2.set_low().ok();\n\n led3.set_low().ok();\n\n delay.delay_ms(200_u16);\n\n led1.set_high().ok();\n\n led2.set_high().ok();\n\n led3.set_high().ok();\n\n delay.delay_ms(200_u16);\n\n }\n\n loop {\n\n led1.toggle().ok();\n\n delay.delay_ms(200_u16);\n\n led2.toggle().ok();\n\n delay.delay_ms(200_u16);\n\n led3.toggle().ok();\n\n delay.delay_ms(200_u16);\n\n }\n\n }\n\n LibType::Bsp => {\n", "file_path": "examples/blinky-leds.rs", "rank": 32, "score": 16411.023123303337 }, { "content": " );\n\n }\n\n let mut leds = Leds::new(\n\n pinsa.pa10.into_push_pull_output(),\n\n pinsa.pa7.into_push_pull_output(),\n\n pinsa.pa6.into_push_pull_output(),\n\n );\n\n for led in leds.iter_mut() {\n\n led.off();\n\n }\n\n set_up_ms_timer(\n\n IrqCfg::new(pac::Interrupt::OC0, true, true),\n\n &mut dp.SYSCONFIG,\n\n Some(&mut dp.IRQSEL),\n\n 50.mhz(),\n\n dp.TIM0,\n\n );\n\n (Shared {}, Local { leds, button, mode }, init::Monotonics())\n\n }\n\n\n", "file_path": "examples/blinky-button-rtic.rs", "rank": 33, "score": 15462.513764525196 }, { "content": "#![no_main]\n\n#![no_std]\n\nuse cortex_m_rt::entry;\n\nuse panic_rtt_target as _;\n\nuse rtt_target::{rprintln, rtt_init_print};\n\nuse va108xx_hal::{pac, prelude::*, timer::set_up_ms_delay_provider};\n\nuse vorago_reb1::temp_sensor::Adt75TempSensor;\n\n\n\n#[entry]\n", "file_path": "examples/adt75-temp-sensor.rs", "rank": 34, "score": 15461.951526152614 }, { "content": " }\n\n\n\n #[shared]\n\n struct Shared {}\n\n\n\n #[init]\n\n fn init(ctx: init::Context) -> (Shared, Local, init::Monotonics) {\n\n let channels = rtt_init_default!();\n\n set_print_channel(channels.up.0);\n\n rprintln!(\"-- Vorago Button IRQ Example --\");\n\n let mode = match CFG_MODE {\n\n // Ask mode from user via RTT\n\n CfgMode::Prompt => prompt_mode(channels.down.0),\n\n // Use mode hardcoded in `DEFAULT_MODE`\n\n CfgMode::Fixed => DEFAULT_MODE,\n\n };\n\n rprintln!(\"Using {:?} mode\", mode);\n\n\n\n let mut dp = ctx.device;\n\n let pinsa = PinsA::new(&mut dp.SYSCONFIG, Some(dp.IOCONFIG), dp.PORTA);\n", "file_path": "examples/blinky-button-rtic.rs", "rank": 35, "score": 15461.50590629357 }, { "content": " button = button.filter_type(FilterType::FilterFourClockCycles, FilterClkSel::Clk1);\n\n set_clk_div_register(\n\n &mut dp.SYSCONFIG,\n\n FilterClkSel::Clk1,\n\n Hertz::from(50.khz()).0,\n\n );\n\n }\n\n\n\n set_up_ms_timer(\n\n IrqCfg::new(pac::Interrupt::OC0, true, true),\n\n &mut dp.SYSCONFIG,\n\n Some(&mut dp.IRQSEL),\n\n 50.mhz(),\n\n dp.TIM0,\n\n );\n\n let mut leds = Leds::new(\n\n pinsa.pa10.into_push_pull_output(),\n\n pinsa.pa7.into_push_pull_output(),\n\n pinsa.pa6.into_push_pull_output(),\n\n );\n", "file_path": "examples/blinky-button-irq.rs", "rank": 36, "score": 15461.46234446948 }, { "content": " let edge_irq = match mode {\n\n PressMode::Toggle => InterruptEdge::HighToLow,\n\n PressMode::Keep => InterruptEdge::BothEdges,\n\n };\n\n\n\n // Configure an edge interrupt on the button and route it to interrupt vector 15\n\n let mut button = Button::new(pinsa.pa11.into_floating_input()).edge_irq(\n\n edge_irq,\n\n IrqCfg::new(pac::interrupt::OC15, true, true),\n\n Some(&mut dp.SYSCONFIG),\n\n Some(&mut dp.IRQSEL),\n\n );\n\n\n\n if mode == PressMode::Toggle {\n\n // This filter debounces the switch for edge based interrupts\n\n button = button.filter_type(FilterType::FilterFourClockCycles, FilterClkSel::Clk1);\n\n set_clk_div_register(\n\n &mut dp.SYSCONFIG,\n\n FilterClkSel::Clk1,\n\n Hertz::from(50.khz()).0,\n", "file_path": "examples/blinky-button-rtic.rs", "rank": 37, "score": 15460.284325910052 }, { "content": "//! Blinky button application for the REB1 board\n\n#![no_main]\n\n#![no_std]\n\n\n\nuse core::cell::RefCell;\n\n\n\nuse cortex_m::interrupt::Mutex;\n\nuse cortex_m_rt::entry;\n\nuse panic_rtt_target as _;\n\nuse rtt_target::{rprintln, rtt_init_print};\n\nuse va108xx_hal::{\n\n clock::{set_clk_div_register, FilterClkSel},\n\n gpio::{FilterType, InterruptEdge, PinsA},\n\n pac::{self, interrupt},\n\n prelude::*,\n\n time::Hertz,\n\n timer::{default_ms_irq_handler, set_up_ms_timer, IrqCfg},\n\n};\n\nuse vorago_reb1::button::Button;\n\nuse vorago_reb1::leds::Leds;\n", "file_path": "examples/blinky-button-irq.rs", "rank": 38, "score": 15459.86891575505 }, { "content": "//! Blinky button application for the REB1 board using RTIC\n\n#![no_main]\n\n#![no_std]\n\n\n\n#[rtic::app(device = pac)]\n\nmod app {\n\n use panic_rtt_target as _;\n\n use rtt_target::{rprintln, rtt_init_default, set_print_channel};\n\n use va108xx_hal::{\n\n clock::{set_clk_div_register, FilterClkSel},\n\n gpio::{FilterType, InterruptEdge, PinsA},\n\n pac,\n\n prelude::*,\n\n time::Hertz,\n\n timer::{default_ms_irq_handler, set_up_ms_timer, IrqCfg},\n\n };\n\n use vorago_reb1::button::Button;\n\n use vorago_reb1::leds::Leds;\n\n\n\n #[derive(Debug, PartialEq)]\n", "file_path": "examples/blinky-button-rtic.rs", "rank": 39, "score": 15457.198081928578 }, { "content": " }\n\n\n\n #[task(binds = OC0)]\n\n fn ms_tick(_cx: ms_tick::Context) {\n\n default_ms_irq_handler();\n\n }\n\n\n\n fn prompt_mode(mut down_channel: rtt_target::DownChannel) -> PressMode {\n\n rprintln!(\"Using prompt mode\");\n\n rprintln!(\"Please enter the mode [0: Toggle, 1: Keep]\");\n\n let mut read_buf: [u8; 16] = [0; 16];\n\n let mut read;\n\n loop {\n\n read = down_channel.read(&mut read_buf);\n\n for i in 0..read {\n\n let val = read_buf[i] as char;\n\n if val == '0' || val == '1' {\n\n return if val == '0' {\n\n PressMode::Toggle\n\n } else {\n\n PressMode::Keep\n\n };\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/blinky-button-rtic.rs", "rank": 40, "score": 15454.595184132591 }, { "content": "\n\nstatic LEDS: Mutex<RefCell<Option<Leds>>> = Mutex::new(RefCell::new(None));\n\nstatic BUTTON: Mutex<RefCell<Option<Button>>> = Mutex::new(RefCell::new(None));\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum PressMode {\n\n Toggle,\n\n Keep,\n\n}\n\n\n\n// You can change the press mode here\n\nconst PRESS_MODE: PressMode = PressMode::Keep;\n\n\n\n#[entry]\n", "file_path": "examples/blinky-button-irq.rs", "rank": 41, "score": 15452.632201433727 }, { "content": " for led in leds.iter_mut() {\n\n led.off();\n\n }\n\n // Make both button and LEDs accessible from the IRQ handler as well\n\n cortex_m::interrupt::free(|cs| {\n\n LEDS.borrow(cs).replace(Some(leds));\n\n BUTTON.borrow(cs).replace(Some(button));\n\n });\n\n loop {}\n\n}\n\n\n", "file_path": "examples/blinky-button-irq.rs", "rank": 42, "score": 15450.750284869031 }, { "content": " pub enum PressMode {\n\n Toggle,\n\n Keep,\n\n }\n\n\n\n #[derive(Debug, PartialEq)]\n\n pub enum CfgMode {\n\n Prompt,\n\n Fixed,\n\n }\n\n\n\n const CFG_MODE: CfgMode = CfgMode::Fixed;\n\n // You can change the press mode here\n\n const DEFAULT_MODE: PressMode = PressMode::Toggle;\n\n\n\n #[local]\n\n struct Local {\n\n leds: Leds,\n\n button: Button,\n\n mode: PressMode,\n", "file_path": "examples/blinky-button-rtic.rs", "rank": 43, "score": 15449.143738144023 }, { "content": " // `shared` cannot be accessed from this context\n\n #[idle]\n\n fn idle(_cx: idle::Context) -> ! {\n\n loop {}\n\n }\n\n\n\n #[task(binds = OC15, local=[button, leds, mode])]\n\n fn button_task(cx: button_task::Context) {\n\n let leds = cx.local.leds;\n\n let button = cx.local.button;\n\n let mode = cx.local.mode;\n\n if *mode == PressMode::Toggle {\n\n leds[0].toggle();\n\n } else {\n\n if button.released() {\n\n leds[0].off();\n\n } else {\n\n leds[0].on();\n\n }\n\n }\n", "file_path": "examples/blinky-button-rtic.rs", "rank": 44, "score": 15446.738218245904 }, { "content": "[![Crates.io](https://img.shields.io/crates/v/vorago-reb1)](https://crates.io/crates/vorago-reb1)\n\n[![ci](https://github.com/us-irs/vorago-reb1-rs/actions/workflows/ci.yml/badge.svg)](https://github.com/us-irs/vorago-reb1-rs/actions/workflows/ci.yml)\n\n[![docs.rs](https://img.shields.io/docsrs/vorago-reb1)](https://docs.rs/vorago-reb1)\n\n\n\n# Rust BSP for the Vorago REB1 development board\n\n\n\nThis is the Rust **B**oard **S**upport **P**ackage crate for the Vorago REB1 development board.\n\nIts aim is to provide drivers for the board features of the REB1 board\n\n\n\nThe BSP builds on top of the [HAL crate for VA108xx devices](https://egit.irs.uni-stuttgart.de/rust/va108xx-hal).\n\n\n\n## Building\n\n\n\nBuilding an application requires the `thumbv6m-none-eabi` cross-compiler toolchain.\n\nIf you have not installed it yet, you can do so with\n\n\n\n```sh\n\nrustup target add thumbv6m-none-eabi\n\n```\n\n\n\nThis repository provides some example applications to show how the BSP is used. For example\n\nyou can build the blinky example with\n\n\n\n```sh\n\ncargo build --example blinky-leds\n\n```\n\n\n\nIf you have not done this yet, it is recommended to read some of the excellent resources\n\navailable to learn Rust:\n\n\n\n- [Rust Embedded Book](https://docs.rust-embedded.org/book/)\n\n- [Rust Discovery Book](https://docs.rust-embedded.org/discovery/)\n\n\n", "file_path": "README.md", "rank": 45, "score": 12518.930358323722 }, { "content": "Change Log\n\n=======\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](http://keepachangelog.com/)\n\nand this project adheres to [Semantic Versioning](http://semver.org/).\n\n\n\n## [unreleased]\n\n\n\n## [v0.4.0]\n\n\n\n- Update manifest file to have correct links and license\n\n- Update some dependencies\n\n - `cortex-m-rtic` (dev-depencency) to 1.1.2\n\n - Other dependencies: Only revision has changed\n\n\n\n## [v0.3.2]\n\n\n\n- Bump HAL dependency to v0.5.0. Changed API, especially for IRQ handling\n\n\n\n## [v0.3.1]\n\n\n\n- Updated ADC code and dependency\n\n\n\n## [v0.3.0]\n\n\n\n- Completed baseline features to support all sensors on the REB1 sevice\n\n- Relicensed as Apache-2.0 and moved to https://egit.irs.uni-stuttgart.de/rust/vorago-reb1\n\n\n\n## [v0.2.3]\n\n\n\n- Added basic accelerometer example. Board in not populated so it is not complete, but\n\n it provides a starting point\n\n- Added ADC base library and example building on the new max116xx-10bit device driver crate\n", "file_path": "CHANGELOG.md", "rank": 46, "score": 12518.084733070982 }, { "content": "## Flashing from the command line\n\n\n\nA `jlink.gdb` file is provided to allow flashing of the board from the command line.\n\n\n\n\n\n1. Ensure that you have a suitable GDB application like `arm-none-eabi-gdb` or `gdb-multiarch`\n\n installed first. On Windows, you can use [xPacks](https://xpack.github.io/arm-none-eabi-gcc/).\n\n On Linux, you can install `gdb-multiarch` from the package manager.\n\n\n\n2. Install the [JLink Tools](https://www.segger.com/downloads/jlink/#J-LinkSoftwareAndDocumentationPack).\n\n\n\n3. Start the JLink GDB server with the GUI or from the command line. The device should be recognized\n\n automatically\n\n\n\n4. Make sure to select an appropriate runner in the `.cargo/config.toml` file depending on which\n\n GDB application you are using\n\n\n\n5. Use\n\n\n\n ```sh\n\n cargo run --example blinky-leds\n\n ```\n\n\n\n to flash the board. The debugger should stop at the start of the main.\n\n\n\n## Debugging with VS Code\n\n\n\nThe REB1 board features an on-board JTAG, so all that is required to flash the board is a\n\nMicro-USB cable and an \n\nYou can debug applications on the REB1 board with a graphical user interface using VS Code with\n\nthe [`Cortex-Debug` plugin](https://marketplace.visualstudio.com/items?itemName=marus25.cortex-debug).\n\n\n\nSome sample configuration files for VS code were provided as well. You can simply use `Run and Debug`\n\nto automatically rebuild and flash your application.\n\n\n\nThe `tasks.json` and the `launch.json` files are generic and you can use them immediately by\n\nopening the folder in VS code or adding it to a workspace.\n\n\n\nIf you would like to use a custom GDB application, you can specify the gdb binary in the following\n\nconfiguration variables in your `settings.json`:\n\n\n\n- `\"cortex-debug.gdbPath\"`\n\n- `\"cortex-debug.gdbPath.linux\"`\n\n- `\"cortex-debug.gdbPath.windows\"`\n\n- `\"cortex-debug.gdbPath.osx\"`\n\n\n\n## Flashing the non-volatile memory\n\n\n\nComing Soon\n", "file_path": "README.md", "rank": 47, "score": 12517.655254137182 }, { "content": "//! This module provides a thin REB1 specific layer on top of the `max116xx_10bit` driver crate\n\n//!\n\n//! ## Examples\n\n//!\n\n//! - [ADC example](https://egit.irs.uni-stuttgart.de/rust/vorago-reb1/src/branch/main/examples/max11619-adc.rs)\n\nuse core::convert::Infallible;\n\nuse dummy_pin::DummyPin;\n\nuse embedded_hal::{blocking::spi::Transfer, spi::FullDuplex};\n\nuse max116xx_10bit::{\n\n Error, ExternallyClocked, InternallyClockedInternallyTimedSerialInterface, Max116xx10Bit,\n\n Max116xx10BitEocExt, VoltageRefMode, WithWakeupDelay, WithoutWakeupDelay,\n\n};\n\nuse va108xx_hal::gpio::{Floating, Input, Pin, PA14};\n\n\n\npub type Max11619ExternallyClockedNoWakeup<SPI> =\n\n Max116xx10Bit<SPI, DummyPin, ExternallyClocked, WithoutWakeupDelay>;\n\npub type Max11619ExternallyClockedWithWakeup<SPI> =\n\n Max116xx10Bit<SPI, DummyPin, ExternallyClocked, WithWakeupDelay>;\n\npub type Max11619InternallyClocked<SPI, EOC> =\n\n Max116xx10BitEocExt<SPI, DummyPin, EOC, InternallyClockedInternallyTimedSerialInterface>;\n\npub type EocPin = Pin<PA14, Input<Floating>>;\n\n\n\npub const AN0_CHANNEL: u8 = 0;\n\npub const AN1_CHANNEL: u8 = 1;\n\npub const AN2_CHANNEL: u8 = 2;\n\npub const POTENTIOMETER_CHANNEL: u8 = 3;\n\n\n", "file_path": "src/max11619.rs", "rank": 48, "score": 14.684003586445634 }, { "content": "//! # API for using the REB1 LEDs\n\n//!\n\n//! ## Examples\n\n//!\n\n//! - [LED example](https://egit.irs.uni-stuttgart.de/rust/vorago-reb1/src/branch/main/examples/blinky-leds.rs)\n\n//! - [Button Blinky using IRQs](https://egit.irs.uni-stuttgart.de/rust/vorago-reb1/src/branch/main/examples/blinky-button-irq.rs)\n\n//! - [Button Blinky using IRQs and RTIC](https://egit.irs.uni-stuttgart.de/rust/vorago-reb1/src/branch/main/examples/blinky-button-rtic.rs)\n\nuse va108xx_hal::{\n\n gpio::dynpins::DynPin,\n\n gpio::pins::{Pin, PushPullOutput, PA10, PA6, PA7},\n\n prelude::*,\n\n};\n\n\n\npub type LD2 = Pin<PA10, PushPullOutput>;\n\npub type LD3 = Pin<PA7, PushPullOutput>;\n\npub type LD4 = Pin<PA6, PushPullOutput>;\n\n\n\npub struct Leds {\n\n leds: [Led; 3],\n\n}\n", "file_path": "src/leds.rs", "rank": 49, "score": 12.437204332384255 }, { "content": "//! # API for the REB1 button\n\n//!\n\n//! ## Examples\n\n//!\n\n//! - [Button Blinky with IRQs](https://egit.irs.uni-stuttgart.de/rust/vorago-reb1/src/branch/main/examples/blinky-button-irq.rs)\n\n//! - [Button Blinky with IRQs and RTIC](https://egit.irs.uni-stuttgart.de/rust/vorago-reb1/src/branch/main/examples/blinky-button-rtic.rs)\n\nuse va108xx_hal::{\n\n gpio::{FilterClkSel, FilterType, InputFloating, InterruptEdge, InterruptLevel, Pin, PA11},\n\n pac,\n\n prelude::*,\n\n utility::IrqCfg,\n\n};\n\n\n\npub struct Button {\n\n button: Pin<PA11, InputFloating>,\n\n}\n\n\n\nimpl Button {\n\n pub fn new(pin: Pin<PA11, InputFloating>) -> Button {\n\n Button { button: pin }\n", "file_path": "src/button.rs", "rank": 50, "score": 12.263853312723533 }, { "content": " }\n\n\n\n pub fn read_temperature(&mut self) -> Result<f32, Error> {\n\n if self.current_reg != RegAddresses::Temperature {\n\n self.select_reg(RegAddresses::Temperature)?;\n\n }\n\n let mut reply: [u8; 2] = [0; 2];\n\n self.sensor_if.read(ADT75_I2C_ADDR, &mut reply)?;\n\n let adc_code = (((reply[0] as u16) << 8) | reply[1] as u16) >> 4;\n\n let temp_celcius: f32 = if ((adc_code >> 11) & 0x01) == 0 {\n\n // Sign bit not set, positiv value\n\n // Divide ADC code by 16 according to datasheet\n\n adc_code as f32 / 16.0\n\n } else {\n\n // Calculation for negative values, assuming all 12 bits are used\n\n (adc_code - 4096) as f32 / 16.0\n\n };\n\n Ok(temp_celcius)\n\n }\n\n}\n", "file_path": "src/temp_sensor.rs", "rank": 51, "score": 9.743597238177971 }, { "content": "//! # API for the On-Board Analog Devices ADT75 temperature sensor\n\n//!\n\n//! [Datasheet](https://www.analog.com/media/en/technical-documentation/data-sheets/ADT75.pdf)\n\n//!\n\n//! ## Examples\n\n//!\n\n//! - [Temperature Sensor example](https://egit.irs.uni-stuttgart.de/rust/vorago-reb1/src/branch/main/examples/adt75-temp-sensor.rs)\n\nuse cortex_m::prelude::_embedded_hal_blocking_i2c_Write;\n\nuse embedded_hal::blocking::i2c::{Read, SevenBitAddress};\n\nuse va108xx_hal::{\n\n i2c::{Error, I2cMaster, I2cSpeed, MasterConfig},\n\n pac::{I2CA, SYSCONFIG},\n\n time::Hertz,\n\n};\n\n\n\nconst ADT75_I2C_ADDR: u8 = 0b1001000;\n\n\n\npub struct Adt75TempSensor {\n\n sensor_if: I2cMaster<I2CA, SevenBitAddress>,\n\n cmd_buf: [u8; 1],\n", "file_path": "src/temp_sensor.rs", "rank": 52, "score": 9.097222246532517 }, { "content": " }\n\n\n\n pub fn pressed(&self) -> bool {\n\n self.button.is_low().ok().unwrap()\n\n }\n\n\n\n pub fn released(&self) -> bool {\n\n self.button.is_high().ok().unwrap()\n\n }\n\n\n\n /// Configures an IRQ on edge.\n\n pub fn edge_irq(\n\n mut self,\n\n edge_type: InterruptEdge,\n\n irq_cfg: IrqCfg,\n\n syscfg: Option<&mut pac::SYSCONFIG>,\n\n irqsel: Option<&mut pac::IRQSEL>,\n\n ) -> Self {\n\n self.button = self\n\n .button\n", "file_path": "src/button.rs", "rank": 53, "score": 9.00196861967507 }, { "content": " .interrupt_edge(edge_type, irq_cfg, syscfg, irqsel);\n\n self\n\n }\n\n\n\n /// Configures an IRQ on level.\n\n pub fn level_irq(\n\n mut self,\n\n level: InterruptLevel,\n\n irq_cfg: IrqCfg,\n\n syscfg: Option<&mut pac::SYSCONFIG>,\n\n irqsel: Option<&mut pac::IRQSEL>,\n\n ) -> Self {\n\n self.button = self.button.interrupt_level(level, irq_cfg, syscfg, irqsel);\n\n self\n\n }\n\n\n\n /// Configures a filter on the button. This can be useful for debouncing the switch.\n\n ///\n\n /// Please note that you still have to set a clock divisor yourself using the\n\n /// [`va108xx_hal::clock::set_clk_div_register`] function in order for this to work.\n\n pub fn filter_type(mut self, filter: FilterType, clksel: FilterClkSel) -> Self {\n\n self.button = self.button.filter_type(filter, clksel);\n\n self\n\n }\n\n}\n", "file_path": "src/button.rs", "rank": 54, "score": 8.451529925337997 }, { "content": " current_reg: RegAddresses,\n\n}\n\n\n\n#[derive(PartialEq, Debug, Copy, Clone)]\n\npub enum RegAddresses {\n\n Temperature = 0x00,\n\n Configuration = 0x01,\n\n THystSetpoint = 0x02,\n\n TOsSetPoint = 0x03,\n\n OneShot = 0x04,\n\n}\n\n\n\nimpl Adt75TempSensor {\n\n pub fn new(\n\n i2ca: I2CA,\n\n sys_clk: impl Into<Hertz> + Copy,\n\n sys_cfg: Option<&mut SYSCONFIG>,\n\n ) -> Result<Self, Error> {\n\n let mut sensor = Adt75TempSensor {\n\n sensor_if: I2cMaster::i2ca(\n", "file_path": "src/temp_sensor.rs", "rank": 55, "score": 8.149440898353816 }, { "content": " self.pin.set_high().ok();\n\n }\n\n\n\n /// Turns the LED on. Setting the pin low actually turns the LED on\n\n pub fn on(&mut self) {\n\n self.pin.set_low().ok();\n\n }\n\n\n\n /// Toggles the LED\n\n pub fn toggle(&mut self) {\n\n self.pin.toggle().ok();\n\n }\n\n}\n", "file_path": "src/leds.rs", "rank": 56, "score": 7.417026449865679 }, { "content": "\n\nmacro_rules! ctor {\n\n\t($($ldx:ident),+) => {\n\n\t\t$(\n\n\t\t\timpl From<$ldx> for Led {\n\n\t\t\t\tfn from(led: $ldx) -> Self {\n\n\t\t\t\t\tLed {\n\n\t\t\t\t\t\tpin: led.into()\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t)+\n\n\t}\n\n}\n\n\n\nctor!(LD2, LD3, LD4);\n\n\n\nimpl Led {\n\n /// Turns the LED off. Setting the pin high actually turns the LED off\n\n pub fn off(&mut self) {\n", "file_path": "src/leds.rs", "rank": 57, "score": 5.972913485792705 }, { "content": " i2ca,\n\n MasterConfig::default(),\n\n sys_clk,\n\n I2cSpeed::Regular100khz,\n\n sys_cfg,\n\n ),\n\n cmd_buf: [RegAddresses::Temperature as u8],\n\n current_reg: RegAddresses::Temperature,\n\n };\n\n sensor.select_reg(RegAddresses::Temperature)?;\n\n Ok(sensor)\n\n }\n\n\n\n pub fn select_reg(&mut self, reg: RegAddresses) -> Result<(), Error> {\n\n if reg != self.current_reg {\n\n self.cmd_buf[0] = reg as u8;\n\n self.current_reg = reg;\n\n self.sensor_if.write(ADT75_I2C_ADDR, &self.cmd_buf[0..1])?;\n\n }\n\n Ok(())\n", "file_path": "src/temp_sensor.rs", "rank": 58, "score": 5.3149709600846275 }, { "content": " }\n\n}\n\n\n\nimpl core::ops::Index<usize> for Leds {\n\n type Output = Led;\n\n\n\n fn index(&self, i: usize) -> &Led {\n\n &self.leds[i]\n\n }\n\n}\n\n\n\nimpl core::ops::IndexMut<usize> for Leds {\n\n fn index_mut(&mut self, i: usize) -> &mut Led {\n\n &mut self.leds[i]\n\n }\n\n}\n\n\n\npub struct Led {\n\n pin: DynPin,\n\n}\n", "file_path": "src/leds.rs", "rank": 59, "score": 4.970026046653485 }, { "content": "\n\nimpl Leds {\n\n pub fn new(led_pin1: LD2, led_pin2: LD3, led_pin3: LD4) -> Leds {\n\n Leds {\n\n leds: [led_pin1.into(), led_pin2.into(), led_pin3.into()],\n\n }\n\n }\n\n}\n\n\n\nimpl core::ops::Deref for Leds {\n\n type Target = [Led];\n\n\n\n fn deref(&self) -> &[Led] {\n\n &self.leds\n\n }\n\n}\n\n\n\nimpl core::ops::DerefMut for Leds {\n\n fn deref_mut(&mut self) -> &mut [Led] {\n\n &mut self.leds\n", "file_path": "src/leds.rs", "rank": 60, "score": 4.611938832921907 }, { "content": "#![no_std]\n\n\n\npub mod button;\n\npub mod leds;\n\npub mod max11619;\n\npub mod temp_sensor;\n", "file_path": "src/lib.rs", "rank": 61, "score": 3.283563605272514 } ]
Rust
violetabftstore/src/store/worker/split_check.rs
whtcorpsinc/einsteindb-prod
3b1c63a421ff37f1ca6bef4d67ef7f4c4c78a6d7
use std::cmp::Ordering; use std::collections::BinaryHeap; use std::fmt::{self, Display, Formatter}; use std::mem; use edb::{CfName, IterOptions, Iterable, Iteron, CausetEngine, Causet_WRITE, LARGE_CausetS}; use ekvproto::meta_timeshare::Brane; use ekvproto::meta_timeshare::BraneEpoch; use ekvproto::fidel_timeshare::CheckPolicy; use crate::interlock::Config; use crate::interlock::InterlockHost; use crate::interlock::SplitCheckerHost; use crate::store::{Callback, CasualMessage, CasualRouter}; use crate::Result; use configuration::{ConfigChange, Configuration}; use violetabftstore::interlock::::CausetLearnedKey::CausetLearnedKey; use violetabftstore::interlock::::worker::Runnable; use super::metrics::*; #[derive(PartialEq, Eq)] pub struct KeyEntry { key: Vec<u8>, pos: usize, value_size: usize, causet: CfName, } impl KeyEntry { pub fn new(key: Vec<u8>, pos: usize, value_size: usize, causet: CfName) -> KeyEntry { KeyEntry { key, pos, value_size, causet, } } pub fn key(&self) -> &[u8] { self.key.as_ref() } pub fn is_commit_version(&self) -> bool { self.causet == Causet_WRITE } pub fn entry_size(&self) -> usize { self.value_size + self.key.len() } } impl PartialOrd for KeyEntry { fn partial_cmp(&self, rhs: &KeyEntry) -> Option<Ordering> { Some(self.key.cmp(&rhs.key).reverse()) } } impl Ord for KeyEntry { fn cmp(&self, rhs: &KeyEntry) -> Ordering { self.partial_cmp(rhs).unwrap() } } struct MergedIterator<I> { iters: Vec<(CfName, I)>, heap: BinaryHeap<KeyEntry>, } impl<I> MergedIterator<I> where I: Iteron, { fn new<E: CausetEngine>( db: &E, causets: &[CfName], spacelike_key: &[u8], lightlike_key: &[u8], fill_cache: bool, ) -> Result<MergedIterator<E::Iteron>> { let mut iters = Vec::with_capacity(causets.len()); let mut heap = BinaryHeap::with_capacity(causets.len()); for (pos, causet) in causets.iter().enumerate() { let iter_opt = IterOptions::new( Some(CausetLearnedKey::from_slice(spacelike_key, 0, 0)), Some(CausetLearnedKey::from_slice(lightlike_key, 0, 0)), fill_cache, ); let mut iter = db.Iteron_causet_opt(causet, iter_opt)?; let found: Result<bool> = iter.seek(spacelike_key.into()).map_err(|e| box_err!(e)); if found? { heap.push(KeyEntry::new( iter.key().to_vec(), pos, iter.value().len(), *causet, )); } iters.push((*causet, iter)); } Ok(MergedIterator { iters, heap }) } fn next(&mut self) -> Option<KeyEntry> { let pos = match self.heap.peek() { None => return None, Some(e) => e.pos, }; let (causet, iter) = &mut self.iters[pos]; if iter.next().unwrap() { let mut e = KeyEntry::new(iter.key().to_vec(), pos, iter.value().len(), causet); let mut front = self.heap.peek_mut().unwrap(); mem::swap(&mut e, &mut front); Some(e) } else { self.heap.pop() } } } pub enum Task { SplitCheckTask { brane: Brane, auto_split: bool, policy: CheckPolicy, }, ChangeConfig(ConfigChange), #[causet(any(test, feature = "testexport"))] Validate(Box<dyn FnOnce(&Config) + lightlike>), } impl Task { pub fn split_check(brane: Brane, auto_split: bool, policy: CheckPolicy) -> Task { Task::SplitCheckTask { brane, auto_split, policy, } } } impl Display for Task { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Task::SplitCheckTask { brane, auto_split, .. } => write!( f, "[split check worker] Split Check Task for {}, auto_split: {:?}", brane.get_id(), auto_split ), Task::ChangeConfig(_) => write!(f, "[split check worker] Change Config Task"), #[causet(any(test, feature = "testexport"))] Task::Validate(_) => write!(f, "[split check worker] Validate config"), } } } pub struct Runner<E, S> where E: CausetEngine, { engine: E, router: S, interlock: InterlockHost<E>, causet: Config, } impl<E, S> Runner<E, S> where E: CausetEngine, S: CasualRouter<E>, { pub fn new(engine: E, router: S, interlock: InterlockHost<E>, causet: Config) -> Runner<E, S> { Runner { engine, router, interlock, causet, } } fn check_split(&mut self, brane: &Brane, auto_split: bool, policy: CheckPolicy) { let brane_id = brane.get_id(); let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane); let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane); debug!( "executing task"; "brane_id" => brane_id, "spacelike_key" => log_wrappers::Key(&spacelike_key), "lightlike_key" => log_wrappers::Key(&lightlike_key), ); CHECK_SPILT_COUNTER.all.inc(); let mut host = self.interlock.new_split_checker_host( &self.causet, brane, &self.engine, auto_split, policy, ); if host.skip() { debug!("skip split check"; "brane_id" => brane.get_id()); return; } let split_tuplespaceInstanton = match host.policy() { CheckPolicy::Scan => { match self.scan_split_tuplespaceInstanton(&mut host, brane, &spacelike_key, &lightlike_key) { Ok(tuplespaceInstanton) => tuplespaceInstanton, Err(e) => { error!(%e; "failed to scan split key"; "brane_id" => brane_id,); return; } } } CheckPolicy::Approximate => match host.approximate_split_tuplespaceInstanton(brane, &self.engine) { Ok(tuplespaceInstanton) => tuplespaceInstanton .into_iter() .map(|k| tuplespaceInstanton::origin_key(&k).to_vec()) .collect(), Err(e) => { error!(%e; "failed to get approximate split key, try scan way"; "brane_id" => brane_id, ); match self.scan_split_tuplespaceInstanton(&mut host, brane, &spacelike_key, &lightlike_key) { Ok(tuplespaceInstanton) => tuplespaceInstanton, Err(e) => { error!(%e; "failed to scan split key"; "brane_id" => brane_id,); return; } } } }, CheckPolicy::Usekey => vec![], }; if !split_tuplespaceInstanton.is_empty() { let brane_epoch = brane.get_brane_epoch().clone(); let msg = new_split_brane(brane_epoch, split_tuplespaceInstanton); let res = self.router.lightlike(brane_id, msg); if let Err(e) = res { warn!("failed to lightlike check result"; "brane_id" => brane_id, "err" => %e); } CHECK_SPILT_COUNTER.success.inc(); } else { debug!( "no need to lightlike, split key not found"; "brane_id" => brane_id, ); CHECK_SPILT_COUNTER.ignore.inc(); } } fn scan_split_tuplespaceInstanton( &self, host: &mut SplitCheckerHost<'_, E>, brane: &Brane, spacelike_key: &[u8], lightlike_key: &[u8], ) -> Result<Vec<Vec<u8>>> { let timer = CHECK_SPILT_HISTOGRAM.spacelike_coarse_timer(); MergedIterator::<<E as Iterable>::Iteron>::new( &self.engine, LARGE_CausetS, spacelike_key, lightlike_key, false, ) .map(|mut iter| { let mut size = 0; let mut tuplespaceInstanton = 0; while let Some(e) = iter.next() { if host.on_kv(brane, &e) { return; } size += e.entry_size() as u64; tuplespaceInstanton += 1; } info!( "fidelio approximate size and tuplespaceInstanton with accurate value"; "brane_id" => brane.get_id(), "size" => size, "tuplespaceInstanton" => tuplespaceInstanton, ); let _ = self.router.lightlike( brane.get_id(), CasualMessage::BraneApproximateSize { size }, ); let _ = self.router.lightlike( brane.get_id(), CasualMessage::BraneApproximateTuplespaceInstanton { tuplespaceInstanton }, ); })?; timer.observe_duration(); Ok(host.split_tuplespaceInstanton()) } fn change_causet(&mut self, change: ConfigChange) { info!( "split check config fideliod"; "change" => ?change ); self.causet.fidelio(change); } } impl<E, S> Runnable for Runner<E, S> where E: CausetEngine, S: CasualRouter<E>, { type Task = Task; fn run(&mut self, task: Task) { match task { Task::SplitCheckTask { brane, auto_split, policy, } => self.check_split(&brane, auto_split, policy), Task::ChangeConfig(c) => self.change_causet(c), #[causet(any(test, feature = "testexport"))] Task::Validate(f) => f(&self.causet), } } } fn new_split_brane<E>(brane_epoch: BraneEpoch, split_tuplespaceInstanton: Vec<Vec<u8>>) -> CasualMessage<E> where E: CausetEngine, { CasualMessage::SplitBrane { brane_epoch, split_tuplespaceInstanton, callback: Callback::None, } }
use std::cmp::Ordering; use std::collections::BinaryHeap; use std::fmt::{self, Display, Formatter}; use std::mem; use edb::{CfName, IterOptions, Iterable, Iteron, CausetEngine, Causet_WRITE, LARGE_CausetS}; use ekvproto::meta_timeshare::Brane; use ekvproto::meta_timeshare::BraneEpoch; use ekvproto::fidel_timeshare::CheckPolicy; use crate::interlock::Config; use crate::interlock::InterlockHost; use crate::interlock::SplitCheckerHost; use crate::store::{Callback, CasualMessage, CasualRouter}; use crate::Result; use configuration::{ConfigChange, Configuration}; use violetabftstore::interlock::::CausetLearnedKey::CausetLearnedKey; use violetabftstore::interlock::::worker::Runnable; use super::metrics::*; #[derive(PartialEq, Eq)] pub struct KeyEntry { key: Vec<u8>, pos: usize, value_size: usize, causet: CfName, } impl KeyEntry { pub fn new(key: Vec<u8>, pos: usize, value_size: usize, causet: CfName) -> KeyEntry { KeyEntry { key, pos, value_size, causet, } } pub fn key(&self) -> &[u8] { self.key.as_ref() } pub fn is_commit_version(&self) -> bool { self.causet == Causet_WRITE } pub fn entry_size(&self) -> usize { self.value_size + self.key.len() } } impl PartialOrd for KeyEntry { fn partial_cmp(&self, rhs: &KeyEntry) -> Option<Ordering> { Some(self.key.cmp(&rhs.key).reverse()) } } impl Ord for KeyEntry { fn cmp(&self, rhs: &KeyEntry) -> Ordering { self.partial_cmp(rhs).unwrap() } } struct MergedIterator<I> { iters: Vec<(CfName, I)>, heap: BinaryHeap<KeyEntry>, } impl<I> MergedIterator<I> where I: Iteron, { fn new<E: CausetEngine>( db: &E, causets: &[CfName], spacelike_key: &[u8], lightlike_key: &[u8], fill_cache: bool, ) -> Result<MergedIterator<E::Iteron>> { let mut iters = Vec::with_capacity(causets.len()); let mut heap = BinaryHeap::with_capacity(causets.len()); for (pos, causet) in causets.iter().enumerate() { let iter_opt = IterOptions::new( Some(CausetLearnedKey::from_slice(spacelike_key, 0, 0)), Some(CausetLearnedKey::from_slice(lightlike_key, 0, 0)), fill_cache, ); let mut iter = db.Iteron_causet_opt(causet, iter_opt)?; let found: Result<bool> = iter.seek(spacelike_key.into()).map_err(|e| box_err!(e)); if found? { heap.push(KeyEntry::new( iter.key().to_vec(), pos, iter.value().len(), *causet, )); } iters.push((*causet, iter)); } Ok(MergedIterator { iters, heap }) } fn next(&mut self) -> Option<KeyEntry> { let pos = match self.heap.peek() { None => return None, Some(e) => e.pos, }; let (causet, iter) = &mut self.iters[pos]; if iter.next().unwrap() { let mut e = KeyEntry::new(iter.key().to_vec(), pos, iter.value().len(), causet); let mut front = self.heap.peek_mut().unwrap(); mem::swap(&mut e, &mut front); Some(e) } else { self.heap.pop() } } } pub enum Task { SplitCheckTask { brane: Brane, auto_split: bool, policy: CheckPolicy, }, ChangeConfig(ConfigChange), #[causet(any(test, feature = "testexport"))] Validate(Box<dyn FnOnce(&Config) + lightlike>), } impl Task { pub fn split_check(brane: Brane, auto_split: bool, policy: CheckPolicy) -> Task { Task::SplitCheckTask { brane, auto_split, policy, } } } impl Display for Task { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Task::SplitCheckTask { brane, auto_split, .. } => write!( f, "[split check worker] Split Check Task for {}, auto_split: {:?}", brane.get_id(), auto_split ), Task::ChangeConfig(_) => write!(f, "[split check worker] Change Config Task"), #[causet(any(test, feature = "testexport"))] Task::Validate(_) => write!(f, "[split check worker] Validate config"), } } } pub struct Runner<E, S> where E: CausetEngine, { engine: E, router: S, interlock: InterlockHost<E>, causet: Config, } impl<E, S> Runner<E, S> where E: CausetEngine, S: CasualRouter<E>, { pub fn new(engine: E, router: S, interlock: InterlockHost<E>, causet: Config) -> Runner<E, S> { Runner { engine, router, interlock, causet, } } fn check_split(&mut self, brane: &Brane, auto_split: bool, policy: CheckPolicy) { let brane_id = brane.get_id(); let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane); let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane); debug!( "executing task"; "brane_id" => brane_id, "spacelike_key" => log_wrappers::Key(&spacelike_key), "lightlike_key" => log_wrappers::Key(&lightlike_key), ); CHECK_SPILT_COUNTER.all.inc(); let mut host = self.interlock.new_split_checker_host( &self.causet, brane, &self.engine, auto_split, policy, ); if host.skip() { debug!("skip split check"; "brane_id" => brane.get_id()); return; } let split_tuplespaceInstanton = match host.policy() { CheckPolicy::Scan => { match self.scan_split_tuplespaceInstanton(&mut host, brane, &spacelike_key, &lightlike_key) { Ok(tuplespaceInstanton) => tuplespaceInstanton, Err(e) => { error!(%e; "failed to scan split key"; "brane_id" => brane_id,); return; } } } CheckPolicy::Approximate => match host.approximate_split_tuplespaceInstanton(brane, &self.engine) { Ok(tuplespaceInstanton) => tuplespaceInstanton .into_iter() .map(|k| tuplespaceInstanton::origin_key(&k).to_vec()) .collect(), Err(e) => { error!(%e; "failed to get approximate split key, try scan way"; "brane_id" => brane_id, ); match self.scan_split_tuplespaceInstanton(&mut host, brane, &spacelike_key, &lightlike_key) { Ok(tuplespaceInstanton) => tuplespaceInstanton, Err(e) => { error!(%e; "failed to scan split key"; "brane_id" => brane_id,); return; } } } }, CheckPolicy::Usekey => vec![], }; if !split_tuplespaceInstanton.is_empty() { let brane_epoch = brane.get_brane_epoch().clone(); let msg = new_split_brane(brane_epoch, split_tuplespaceInstanton); let res = self.router.lightlike(brane_id, msg); if let Err(e) = res { warn!("failed to lightlike check result"; "brane_id" => brane_id, "err" => %e); } CHECK_SPILT_COUNTER.success.inc(); } else { debug!( "no need to lightlike, split key not found"; "brane_id" => brane_id, ); CHECK_SPILT_COUNTER.ignore.inc(); } } fn scan_split_tuplespaceInstanton( &self, host: &mut SplitCheckerHost<'_, E>, brane: &Brane, spacelike_key: &[u8], lightlike_key: &[u8], ) -> Result<Vec<Vec<u8>>> { let timer = CHECK_SPILT_HISTOGRAM.spacelike_coarse_timer(); MergedIterator::<<E as Iterable>::Iteron>::new( &self.engine, LARGE_CausetS, spacelike_key, lightlike_key, false, ) .map(|mut iter| { let mut size = 0; let mut tuplespaceInstanton = 0; while let Some(e) = iter.next() { if host.on_kv(brane, &e) { return; } size += e.entry_size() as u64; tuplespaceInstanton += 1; } info!( "fidelio approximate size and tuplespaceInstanton with accurate value"; "brane_id" => brane.get_id(), "size" => size, "tuplespaceInstanton" => tuplespaceInstanton, ); let _ = self.router.lightlike( brane.get_id(), CasualMessage::BraneApproximateSize { size }, ); let _ = self.router.lightlike( brane.get_id(), CasualMessage::BraneApproximateTuplespaceInstanton { tuplespaceInstanton }, ); })?; timer.observe_duration(); Ok(host.split_tuplespaceInstanton()) }
} impl<E, S> Runnable for Runner<E, S> where E: CausetEngine, S: CasualRouter<E>, { type Task = Task; fn run(&mut self, task: Task) { match task { Task::SplitCheckTask { brane, auto_split, policy, } => self.check_split(&brane, auto_split, policy), Task::ChangeConfig(c) => self.change_causet(c), #[causet(any(test, feature = "testexport"))] Task::Validate(f) => f(&self.causet), } } } fn new_split_brane<E>(brane_epoch: BraneEpoch, split_tuplespaceInstanton: Vec<Vec<u8>>) -> CasualMessage<E> where E: CausetEngine, { CasualMessage::SplitBrane { brane_epoch, split_tuplespaceInstanton, callback: Callback::None, } }
fn change_causet(&mut self, change: ConfigChange) { info!( "split check config fideliod"; "change" => ?change ); self.causet.fidelio(change); }
function_block-full_function
[ { "content": "fn last_key_of_brane(db: &impl CausetEngine, brane: &Brane) -> Result<Option<Vec<u8>>> {\n\n let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane);\n\n let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane);\n\n let mut last_key = None;\n\n\n\n let iter_opt = IterOptions::new(\n\n Some(CausetLearnedKey::from_vec(spacelike_key, 0, 0)),\n\n Some(CausetLearnedKey::from_vec(lightlike_key, 0, 0)),\n\n false,\n\n );\n\n let mut iter = box_try!(db.Iteron_causet_opt(Causet_WRITE, iter_opt));\n\n\n\n // the last key\n\n let found: Result<bool> = iter.seek(SeekKey::End).map_err(|e| box_err!(e));\n\n if found? {\n\n let key = iter.key().to_vec();\n\n last_key = Some(key);\n\n } // else { No data in this Causet }\n\n\n\n match last_key {\n\n Some(lk) => Ok(Some(lk)),\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "violetabftstore/src/interlock/split_check/table.rs", "rank": 0, "score": 824946.0895377096 }, { "content": "fn scan_impl<Iter, F>(mut it: Iter, spacelike_key: &[u8], mut f: F) -> Result<()>\n\nwhere\n\n Iter: Iteron,\n\n F: FnMut(&[u8], &[u8]) -> Result<bool>,\n\n{\n\n let mut remained = it.seek(SeekKey::Key(spacelike_key))?;\n\n while remained {\n\n remained = f(it.key(), it.value())? && it.next()?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl<'a> From<&'a [u8]> for SeekKey<'a> {\n\n fn from(bs: &'a [u8]) -> SeekKey {\n\n SeekKey::Key(bs)\n\n }\n\n}\n\n\n", "file_path": "edb/components/embedded_promises/src/iterable.rs", "rank": 1, "score": 706617.5198747386 }, { "content": "/// Collect all items of `it` into a vector, generally used for tests.\n\n///\n\n/// # Panics\n\n///\n\n/// If any errors occur during Iteron.\n\npub fn collect<I: Iteron>(mut it: I) -> Vec<(Vec<u8>, Vec<u8>)> {\n\n let mut v = Vec::new();\n\n let mut it_valid = it.valid().unwrap();\n\n while it_valid {\n\n let kv = (it.key().to_vec(), it.value().to_vec());\n\n v.push(kv);\n\n it_valid = it.next().unwrap();\n\n }\n\n v\n\n}\n", "file_path": "edb/components/embedded_promises/src/iterable.rs", "rank": 3, "score": 651733.2922400625 }, { "content": "fn decode_write(key: Vec<u8>, value: &[u8], Evcausetidx: &mut EventEvent) -> bool {\n\n let write = WriteRef::parse(value).unwrap().to_owned();\n\n let (op_type, r_type) = match write.write_type {\n\n WriteType::Put => (EventEventOpType::Put, EventLogType::Commit),\n\n WriteType::Delete => (EventEventOpType::Delete, EventLogType::Commit),\n\n WriteType::Rollback => (EventEventOpType::Unknown, EventLogType::Rollback),\n\n other => {\n\n debug!(\"skip write record\"; \"write\" => ?other, \"key\" => hex::encode_upper(key));\n\n return true;\n\n }\n\n };\n\n let key = Key::from_encoded(key);\n\n let commit_ts = if write.write_type == WriteType::Rollback {\n\n 0\n\n } else {\n\n key.decode_ts().unwrap().into_inner()\n\n };\n\n Evcausetidx.spacelike_ts = write.spacelike_ts.into_inner();\n\n Evcausetidx.commit_ts = commit_ts;\n\n Evcausetidx.key = key.truncate_ts().unwrap().into_raw().unwrap();\n\n Evcausetidx.op_type = op_type.into();\n\n set_event_row_type(Evcausetidx, r_type);\n\n if let Some(value) = write.short_value {\n\n Evcausetidx.value = value;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "edb/components/causet_context/src/pushdown.rs", "rank": 4, "score": 616959.4318415693 }, { "content": "pub fn must_get_causet_none(engine: &Arc<DB>, causet: &str, key: &[u8]) {\n\n must_get(engine, causet, key, None);\n\n}\n\n\n", "file_path": "edb/components/test_violetabftstore/src/util.rs", "rank": 5, "score": 611582.0128141578 }, { "content": "pub fn get_brane_approximate_size_causet(\n\n db: &impl CausetEngine,\n\n causetname: &str,\n\n brane: &Brane,\n\n large_memory_barrier: u64,\n\n) -> Result<u64> {\n\n let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane);\n\n let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane);\n\n let cone = Cone::new(&spacelike_key, &lightlike_key);\n\n Ok(box_try!(db.get_cone_approximate_size_causet(\n\n causetname,\n\n cone,\n\n brane.get_id(),\n\n large_memory_barrier\n\n )))\n\n}\n\n\n", "file_path": "violetabftstore/src/interlock/split_check/size.rs", "rank": 6, "score": 610949.8902579398 }, { "content": "pub fn must_get_causet_equal(engine: &Arc<DB>, causet: &str, key: &[u8], value: &[u8]) {\n\n must_get(engine, causet, key, Some(value));\n\n}\n\n\n", "file_path": "edb/components/test_violetabftstore/src/util.rs", "rank": 7, "score": 610119.643144043 }, { "content": "pub fn new_split_brane(policy: CheckPolicy, tuplespaceInstanton: Vec<Vec<u8>>) -> BraneHeartbeatResponse {\n\n let mut split_brane = SplitBrane::default();\n\n split_brane.set_policy(policy);\n\n split_brane.set_tuplespaceInstanton(tuplespaceInstanton.into());\n\n let mut resp = BraneHeartbeatResponse::default();\n\n resp.set_split_brane(split_brane);\n\n resp\n\n}\n\n\n", "file_path": "edb/components/test_violetabftstore/src/util.rs", "rank": 8, "score": 601967.5408863034 }, { "content": "/// Decode brane meta key, return the brane id and meta suffix type.\n\npub fn decode_brane_meta_key(key: &[u8]) -> Result<(u64, u8)> {\n\n decode_brane_key(REGION_META_PREFIX_KEY, key, \"meta\")\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 9, "score": 597387.1061718457 }, { "content": "/// Decode brane violetabft key, return the brane id and violetabft suffix type.\n\npub fn decode_brane_violetabft_key(key: &[u8]) -> Result<(u64, u8)> {\n\n decode_brane_key(REGION_VIOLETABFT_PREFIX_KEY, key, \"violetabft\")\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 10, "score": 597387.1061718457 }, { "content": "fn setup(causet: EINSTEINDBConfig, engine: Arc<DB>) -> (ConfigController, Worker<Task>) {\n\n let (router, _) = sync_channel(1);\n\n let runner = Runner::new(\n\n engine.c().clone(),\n\n router.clone(),\n\n InterlockHost::new(router),\n\n causet.interlock.clone(),\n\n );\n\n let mut worker: Worker<Task> = Worker::new(\"split-check-config\");\n\n worker.spacelike(runner).unwrap();\n\n\n\n let causet_controller = ConfigController::new(causet);\n\n causet_controller.register(\n\n Module::Interlock,\n\n Box::new(SplitCheckConfigManager(worker.interlock_semaphore())),\n\n );\n\n\n\n (causet_controller, worker)\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/split_check.rs", "rank": 11, "score": 596338.7433520264 }, { "content": "pub fn must_get(engine: &Arc<DB>, causet: &str, key: &[u8], value: Option<&[u8]>) {\n\n for _ in 1..300 {\n\n let res = engine.c().get_value_causet(causet, &tuplespaceInstanton::data_key(key)).unwrap();\n\n if let (Some(value), Some(res)) = (value, res.as_ref()) {\n\n assert_eq!(value, &res[..]);\n\n return;\n\n }\n\n if value.is_none() && res.is_none() {\n\n return;\n\n }\n\n thread::sleep(Duration::from_millis(20));\n\n }\n\n debug!(\"last try to get {}\", hex::encode_upper(key));\n\n let res = engine.c().get_value_causet(causet, &tuplespaceInstanton::data_key(key)).unwrap();\n\n if value.is_none() && res.is_none()\n\n || value.is_some() && res.is_some() && value.unwrap() == &*res.unwrap()\n\n {\n\n return;\n\n }\n\n panic!(\n\n \"can't get value {:?} for key {}\",\n\n value.map(escape),\n\n hex::encode_upper(key)\n\n )\n\n}\n\n\n", "file_path": "edb/components/test_violetabftstore/src/util.rs", "rank": 12, "score": 593968.9491190187 }, { "content": "/// Check if key in brane cone [`spacelike_key`, `lightlike_key`).\n\npub fn check_key_in_brane(key: &[u8], brane: &meta_timeshare::Brane) -> Result<()> {\n\n let lightlike_key = brane.get_lightlike_key();\n\n let spacelike_key = brane.get_spacelike_key();\n\n if key >= spacelike_key && (lightlike_key.is_empty() || key < lightlike_key) {\n\n Ok(())\n\n } else {\n\n Err(Error::KeyNotInBrane(key.to_vec(), brane.clone()))\n\n }\n\n}\n\n\n\n/// `is_first_vote_msg` checks `msg` is the first vote (or prevote) message or not. It's used for\n\n/// when the message is received but there is no such brane in `CausetStore::brane_peers` and the\n\n/// brane overlaps with others. In this case we should put `msg` into `plightlikeing_votes` instead of\n\n/// create the peer.\n", "file_path": "violetabftstore/src/store/util.rs", "rank": 13, "score": 587320.3728689658 }, { "content": "pub fn get_brane_approximate_tuplespaceInstanton_causet(\n\n db: &impl CausetEngine,\n\n causetname: &str,\n\n brane: &Brane,\n\n large_memory_barrier: u64,\n\n) -> Result<u64> {\n\n let spacelike = tuplespaceInstanton::enc_spacelike_key(brane);\n\n let lightlike = tuplespaceInstanton::enc_lightlike_key(brane);\n\n let cone = Cone::new(&spacelike, &lightlike);\n\n Ok(box_try!(db.get_cone_approximate_tuplespaceInstanton_causet(\n\n causetname,\n\n cone,\n\n brane.get_id(),\n\n large_memory_barrier\n\n )))\n\n}\n\n\n\n#[causet(test)]\n\nmod tests {\n\n use super::super::size::tests::must_split_at;\n", "file_path": "violetabftstore/src/interlock/split_check/keys.rs", "rank": 14, "score": 584532.0036712079 }, { "content": "/// Check if key in brane cone (`spacelike_key`, `lightlike_key`).\n\npub fn check_key_in_brane_exclusive(key: &[u8], brane: &meta_timeshare::Brane) -> Result<()> {\n\n let lightlike_key = brane.get_lightlike_key();\n\n let spacelike_key = brane.get_spacelike_key();\n\n if spacelike_key < key && (key < lightlike_key || lightlike_key.is_empty()) {\n\n Ok(())\n\n } else {\n\n Err(Error::KeyNotInBrane(key.to_vec(), brane.clone()))\n\n }\n\n}\n\n\n", "file_path": "violetabftstore/src/store/util.rs", "rank": 15, "score": 580947.27555271 }, { "content": "/// Check if key in brane cone [`spacelike_key`, `lightlike_key`].\n\npub fn check_key_in_brane_inclusive(key: &[u8], brane: &meta_timeshare::Brane) -> Result<()> {\n\n let lightlike_key = brane.get_lightlike_key();\n\n let spacelike_key = brane.get_spacelike_key();\n\n if key >= spacelike_key && (lightlike_key.is_empty() || key <= lightlike_key) {\n\n Ok(())\n\n } else {\n\n Err(Error::KeyNotInBrane(key.to_vec(), brane.clone()))\n\n }\n\n}\n\n\n", "file_path": "violetabftstore/src/store/util.rs", "rank": 16, "score": 580947.27555271 }, { "content": "/// Get the `lightlike_key` of current brane in encoded form.\n\npub fn enc_lightlike_key(brane: &Brane) -> Vec<u8> {\n\n // only initialized brane's lightlike_key can be encoded, otherwise there must be bugs\n\n // somewhere.\n\n assert!(!brane.get_peers().is_empty());\n\n data_lightlike_key(brane.get_lightlike_key())\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 17, "score": 576196.4664844207 }, { "content": "pub fn get_engine_causet_used_size(engine: &DB, handle: &CausetHandle) -> u64 {\n\n let mut causet_used_size = engine\n\n .get_property_int_causet(handle, LMDB_TOTAL_SST_FILES_SIZE)\n\n .expect(\"lmdb is too old, missing total-sst-files-size property\");\n\n // For memBlock\n\n if let Some(mem_Block) = engine.get_property_int_causet(handle, LMDB_CUR_SIZE_ALL_MEM_BlockS) {\n\n causet_used_size += mem_Block;\n\n }\n\n // For blob files\n\n if let Some(live_blob) = engine.get_property_int_causet(handle, LMDB_TITANDB_LIVE_BLOB_FILE_SIZE)\n\n {\n\n causet_used_size += live_blob;\n\n }\n\n if let Some(obsolete_blob) =\n\n engine.get_property_int_causet(handle, LMDB_TITANDB_OBSOLETE_BLOB_FILE_SIZE)\n\n {\n\n causet_used_size += obsolete_blob;\n\n }\n\n\n\n causet_used_size\n\n}\n\n\n", "file_path": "edb/components/embedded_lmdb/src/util.rs", "rank": 18, "score": 574505.837184384 }, { "content": "fn decode_lock(key: Vec<u8>, value: &[u8], Evcausetidx: &mut EventEvent) -> bool {\n\n let dagger = Dagger::parse(value).unwrap();\n\n let op_type = match dagger.lock_type {\n\n LockType::Put => EventEventOpType::Put,\n\n LockType::Delete => EventEventOpType::Delete,\n\n other => {\n\n debug!(\"skip dagger record\";\n\n \"type\" => ?other,\n\n \"spacelike_ts\" => ?dagger.ts,\n\n \"key\" => hex::encode_upper(key),\n\n \"for_fidelio_ts\" => ?dagger.for_fidelio_ts);\n\n return true;\n\n }\n\n };\n\n let key = Key::from_encoded(key);\n\n Evcausetidx.spacelike_ts = dagger.ts.into_inner();\n\n Evcausetidx.key = key.into_raw().unwrap();\n\n Evcausetidx.op_type = op_type.into();\n\n set_event_row_type(Evcausetidx, EventLogType::Prewrite);\n\n if let Some(value) = dagger.short_value {\n\n Evcausetidx.value = value;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "edb/components/causet_context/src/pushdown.rs", "rank": 19, "score": 572071.7299389363 }, { "content": "fn causet_storage_raw_get<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let store = SyncTestStorageBuilder::from_engine(engine).build().unwrap();\n\n b.iter_batched(\n\n || {\n\n let kvs = KvGenerator::new(config.key_length, config.value_length)\n\n .generate(DEFAULT_ITERATIONS);\n\n let data: Vec<(Context, Vec<u8>)> = kvs\n\n .iter()\n\n .map(|(k, _)| (Context::default(), k.clone()))\n\n .collect();\n\n (data, &store)\n\n },\n\n |(data, store)| {\n\n for (context, key) in data {\n\n black_box(store.raw_get(context, Causet_DEFAULT.to_owned(), key).unwrap());\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/storage/mod.rs", "rank": 20, "score": 571645.938093781 }, { "content": "/// Get the approximate size of the cone.\n\npub fn get_brane_approximate_size(\n\n db: &impl CausetEngine,\n\n brane: &Brane,\n\n large_memory_barrier: u64,\n\n) -> Result<u64> {\n\n let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane);\n\n let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane);\n\n let cone = Cone::new(&spacelike_key, &lightlike_key);\n\n Ok(box_try!(db.get_cone_approximate_size(\n\n cone,\n\n brane.get_id(),\n\n large_memory_barrier\n\n )))\n\n}\n\n\n", "file_path": "violetabftstore/src/interlock/split_check/size.rs", "rank": 21, "score": 570424.0548116311 }, { "content": "#[inline]\n\npub fn build_key_cone(spacelike_key: &[u8], lightlike_key: &[u8], reverse_scan: bool) -> KeyCone {\n\n let mut cone = KeyCone::default();\n\n if reverse_scan {\n\n cone.set_spacelike_key(lightlike_key.to_vec());\n\n cone.set_lightlike_key(spacelike_key.to_vec());\n\n } else {\n\n cone.set_spacelike_key(spacelike_key.to_vec());\n\n cone.set_lightlike_key(lightlike_key.to_vec());\n\n }\n\n cone\n\n}\n\n\n", "file_path": "violetabftstore/src/store/util.rs", "rank": 22, "score": 559579.3744345654 }, { "content": "pub fn apply_sst_causet_file<E>(path: &str, db: &E, causet: &str) -> Result<(), Error>\n\nwhere\n\n E: CausetEngine,\n\n{\n\n let causet_handle = box_try!(db.causet_handle(causet));\n\n let mut ingest_opt = <E as ImportExt>::IngestExternalFileOptions::new();\n\n ingest_opt.move_files(true);\n\n box_try!(db.ingest_external_file_causet(causet_handle, &ingest_opt, &[path]));\n\n Ok(())\n\n}\n\n\n", "file_path": "violetabftstore/src/store/snap/io.rs", "rank": 23, "score": 559339.5913064238 }, { "content": "pub fn load_configs<E: Engine, F: EngineFactory<E>>(engine_factory: F) -> Vec<BenchConfig<F>> {\n\n let key_lengths = DEFAULT_KEY_LENGTHS;\n\n let value_lengths = DEFAULT_VALUE_LENGTHS;\n\n let mut configs = vec![];\n\n\n\n for &kl in &key_lengths {\n\n for &vl in &value_lengths {\n\n configs.push(BenchConfig {\n\n key_length: kl,\n\n value_length: vl,\n\n engine_factory,\n\n })\n\n }\n\n }\n\n configs\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/mod.rs", "rank": 24, "score": 557934.6795268164 }, { "content": "fn assert_has_causet<E: Engine>(ctx: &Context, engine: &E, causet: CfName, key: &[u8], value: &[u8]) {\n\n let snapshot = engine.snapshot(ctx).unwrap();\n\n assert_eq!(\n\n snapshot.get_causet(causet, &Key::from_raw(key)).unwrap().unwrap(),\n\n value\n\n );\n\n}\n\n\n", "file_path": "tests/integrations/storage/test_violetabftkv.rs", "rank": 25, "score": 557210.1891474454 }, { "content": "pub fn must_get_none(engine: &Arc<DB>, key: &[u8]) {\n\n must_get(engine, \"default\", key, None);\n\n}\n\n\n", "file_path": "edb/components/test_violetabftstore/src/util.rs", "rank": 26, "score": 556063.7719308775 }, { "content": "pub fn must_get_equal(engine: &Arc<DB>, key: &[u8], value: &[u8]) {\n\n must_get(engine, \"default\", key, Some(value));\n\n}\n\n\n", "file_path": "edb/components/test_violetabftstore/src/util.rs", "rank": 27, "score": 554436.1243217788 }, { "content": "fn must_put_causet<E: Engine>(ctx: &Context, engine: &E, causet: CfName, key: &[u8], value: &[u8]) {\n\n engine\n\n .put_causet(ctx, causet, Key::from_raw(key), value.to_vec())\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/storage/test_violetabftkv.rs", "rank": 28, "score": 551831.916622719 }, { "content": "/// Get the brane id and index from violetabft log key generated by `violetabft_log_key`.\n\npub fn decode_violetabft_log_key(key: &[u8]) -> Result<(u64, u64)> {\n\n let suffix_idx = REGION_VIOLETABFT_PREFIX_KEY.len() + mem::size_of::<u64>();\n\n let expect_key_len = suffix_idx + mem::size_of::<u8>() + mem::size_of::<u64>();\n\n if key.len() != expect_key_len\n\n || !key.spacelikes_with(REGION_VIOLETABFT_PREFIX_KEY)\n\n || key[suffix_idx] != VIOLETABFT_LOG_SUFFIX\n\n {\n\n return Err(Error::InvalidVioletaBftLogKey(key.to_owned()));\n\n }\n\n let brane_id = BigEndian::read_u64(&key[REGION_VIOLETABFT_PREFIX_KEY.len()..suffix_idx]);\n\n let index = BigEndian::read_u64(&key[suffix_idx + mem::size_of::<u8>()..]);\n\n Ok((brane_id, index))\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 29, "score": 549797.0836273817 }, { "content": "#[inline]\n\npub fn data_lightlike_key(brane_lightlike_key: &[u8]) -> Vec<u8> {\n\n if brane_lightlike_key.is_empty() {\n\n DATA_MAX_KEY.to_vec()\n\n } else {\n\n data_key(brane_lightlike_key)\n\n }\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 30, "score": 547578.2564431791 }, { "content": "fn assert_none_causet<E: Engine>(ctx: &Context, engine: &E, causet: CfName, key: &[u8]) {\n\n let snapshot = engine.snapshot(ctx).unwrap();\n\n assert_eq!(snapshot.get_causet(causet, &Key::from_raw(key)).unwrap(), None);\n\n}\n\n\n", "file_path": "tests/integrations/storage/test_violetabftkv.rs", "rank": 31, "score": 547231.2806564593 }, { "content": "fn validate<F>(interlock_semaphore: &Interlock_Semaphore<Task>, f: F)\n\nwhere\n\n F: FnOnce(&Config) + lightlike + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel();\n\n interlock_semaphore\n\n .schedule(Task::Validate(Box::new(move |causet: &Config| {\n\n f(causet);\n\n tx.lightlike(()).unwrap();\n\n })))\n\n .unwrap();\n\n rx.recv_timeout(Duration::from_secs(1)).unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/config/dynamic/split_check.rs", "rank": 32, "score": 544923.0955369557 }, { "content": "/// Get the approximate number of tuplespaceInstanton in the cone.\n\npub fn get_brane_approximate_tuplespaceInstanton(\n\n db: &impl CausetEngine,\n\n brane: &Brane,\n\n large_memory_barrier: u64,\n\n) -> Result<u64> {\n\n let spacelike = tuplespaceInstanton::enc_spacelike_key(brane);\n\n let lightlike = tuplespaceInstanton::enc_lightlike_key(brane);\n\n let cone = Cone::new(&spacelike, &lightlike);\n\n Ok(box_try!(db.get_cone_approximate_tuplespaceInstanton(\n\n cone,\n\n brane.get_id(),\n\n large_memory_barrier\n\n )))\n\n}\n\n\n", "file_path": "violetabftstore/src/interlock/split_check/keys.rs", "rank": 33, "score": 544822.6954126913 }, { "content": "/// Decodes bytes which are encoded by `encode_bytes` before.\n\n///\n\n/// Please note that, data is a mut reference to slice. After calling this the\n\n/// slice that data point to would change.\n\npub fn decode_bytes(data: &mut BytesSlice<'_>, desc: bool) -> Result<Vec<u8>> {\n\n let mut key = Vec::with_capacity(data.len() / (ENC_GROUP_SIZE + 1) * ENC_GROUP_SIZE);\n\n let mut offset = 0;\n\n let Soliton_len = ENC_GROUP_SIZE + 1;\n\n loop {\n\n // everytime make ENC_GROUP_SIZE + 1 elements as a decode unit\n\n let next_offset = offset + Soliton_len;\n\n let Soliton = if next_offset <= data.len() {\n\n &data[offset..next_offset]\n\n } else {\n\n return Err(Error::unexpected_eof());\n\n };\n\n offset = next_offset;\n\n // the last byte in decode unit is for marker which indicates pad size\n\n let (&marker, bytes) = Soliton.split_last().unwrap();\n\n let pad_size = if desc {\n\n marker as usize\n\n } else {\n\n (ENC_MARKER - marker) as usize\n\n };\n", "file_path": "edb/edbn/einsteindb_util/src/codec/bytes.rs", "rank": 34, "score": 542183.1112837968 }, { "content": "pub fn bench_engine<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n c.bench_function_over_inputs(\n\n \"engine_get(exclude snapshot)\",\n\n bench_engine_get,\n\n configs.to_vec(),\n\n );\n\n c.bench_function_over_inputs(\"engine_put\", bench_engine_put, configs.to_owned());\n\n c.bench_function_over_inputs(\"engine_snapshot\", bench_engine_snapshot, configs.to_owned());\n\n}\n", "file_path": "tests/benches/hierarchy/engine/mod.rs", "rank": 35, "score": 541271.3245418028 }, { "content": "fn check_kv_in_all_causets(db: &DB, i: u8, found: bool) {\n\n for causet_name in &[Causet_DEFAULT, Causet_DAGGER] {\n\n let handle = db.causet_handle(causet_name).unwrap();\n\n let k = tuplespaceInstanton::data_key(&[i]);\n\n let v = db.get_causet(handle, &k).unwrap();\n\n if found {\n\n assert_eq!(v.unwrap(), &k);\n\n } else {\n\n assert!(v.is_none());\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/integrations/violetabftstore/test_clear_stale_data.rs", "rank": 36, "score": 539828.1621151777 }, { "content": "/// Decodes bytes which are encoded by `encode_bytes` before just in place without malloc.\n\n/// Please use this instead of `decode_bytes` if possible.\n\npub fn decode_bytes_in_place(data: &mut Vec<u8>, desc: bool) -> Result<()> {\n\n let mut write_offset = 0;\n\n let mut read_offset = 0;\n\n loop {\n\n let marker_offset = read_offset + ENC_GROUP_SIZE;\n\n if marker_offset >= data.len() {\n\n return Err(Error::unexpected_eof());\n\n };\n\n\n\n unsafe {\n\n // it is semantically equivalent to C's memmove()\n\n // and the src and dest may overlap\n\n // if src == dest do nothing\n\n ptr::copy(\n\n data.as_ptr().add(read_offset),\n\n data.as_mut_ptr().add(write_offset),\n\n ENC_GROUP_SIZE,\n\n );\n\n }\n\n write_offset += ENC_GROUP_SIZE;\n", "file_path": "edb/edbn/einsteindb_util/src/codec/bytes.rs", "rank": 37, "score": 536864.6903643704 }, { "content": "/// Get the log index from violetabft log key generated by `violetabft_log_key`.\n\npub fn violetabft_log_index(key: &[u8]) -> Result<u64> {\n\n let expect_key_len = REGION_VIOLETABFT_PREFIX_KEY.len()\n\n + mem::size_of::<u64>()\n\n + mem::size_of::<u8>()\n\n + mem::size_of::<u64>();\n\n if key.len() != expect_key_len {\n\n return Err(Error::InvalidVioletaBftLogKey(key.to_owned()));\n\n }\n\n Ok(BigEndian::read_u64(\n\n &key[expect_key_len - mem::size_of::<u64>()..],\n\n ))\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 38, "score": 535673.9491838898 }, { "content": "fn causet_storage_commit<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let store = SyncTestStorageBuilder::from_engine(engine).build().unwrap();\n\n b.iter_batched(\n\n || {\n\n let kvs = KvGenerator::new(config.key_length, config.value_length)\n\n .generate(DEFAULT_ITERATIONS);\n\n\n\n for (k, v) in &kvs {\n\n store\n\n .prewrite(\n\n Context::default(),\n\n vec![Mutation::Put((Key::from_raw(&k), v.clone()))],\n\n k.clone(),\n\n 1,\n\n )\n\n .unwrap();\n\n }\n\n\n\n (kvs, &store)\n\n },\n\n |(kvs, store)| {\n\n for (k, _) in &kvs {\n\n black_box(store.commit(Context::default(), vec![Key::from_raw(k)], 1, 2)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/storage/mod.rs", "rank": 39, "score": 534608.904213476 }, { "content": "fn causet_storage_prewrite<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let store = SyncTestStorageBuilder::from_engine(engine).build().unwrap();\n\n b.iter_batched(\n\n || {\n\n let kvs = KvGenerator::new(config.key_length, config.value_length)\n\n .generate(DEFAULT_ITERATIONS);\n\n\n\n let data: Vec<(Context, Vec<Mutation>, Vec<u8>)> = kvs\n\n .iter()\n\n .map(|(k, v)| {\n\n (\n\n Context::default(),\n\n vec![Mutation::Put((Key::from_raw(&k), v.clone()))],\n\n k.clone(),\n\n )\n\n })\n\n .collect();\n\n (data, &store)\n\n },\n\n |(data, store)| {\n\n for (context, mutations, primary) in data {\n\n black_box(store.prewrite(context, mutations, primary, 1).unwrap());\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/storage/mod.rs", "rank": 40, "score": 534608.9042134759 }, { "content": "#[causet(not(unix))]\n\npub fn check_max_open_fds(_: u64) -> Result<(), ConfigError> {\n\n Ok(())\n\n}\n\n\n\n#[causet(target_os = \"linux\")]\n\nmod check_kernel {\n\n use std::fs;\n\n\n\n use super::ConfigError;\n\n\n\n // pub for tests.\n\n pub type Checker = dyn Fn(i64, i64) -> bool;\n\n\n\n // pub for tests.\n\n pub fn check_kernel_params(\n\n param_path: &str,\n\n expect: i64,\n\n checker: Box<Checker>,\n\n ) -> Result<(), ConfigError> {\n\n let buffer = fs::read_to_string(param_path)\n", "file_path": "edb/edbn/einsteindb_util/src/config.rs", "rank": 41, "score": 532895.5612812105 }, { "content": "fn can_read<E: Engine>(ctx: &Context, engine: &E, key: &[u8], value: &[u8]) -> bool {\n\n if let Ok(s) = engine.snapshot(ctx) {\n\n assert_eq!(s.get(&Key::from_raw(key)).unwrap().unwrap(), value);\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "tests/integrations/storage/test_violetabftkv.rs", "rank": 42, "score": 530184.1923112073 }, { "content": "/// `encode` encodes a datum slice into a buffer.\n\n/// Uses comparable to encode or not to encode a memory comparable buffer.\n\npub fn encode(ctx: &mut EvalContext, values: &[Datum], comparable: bool) -> Result<Vec<u8>> {\n\n let mut buf = vec![];\n\n encode_to(ctx, &mut buf, values, comparable)?;\n\n buf.shrink_to_fit();\n\n Ok(buf)\n\n}\n\n\n", "file_path": "edb/milevadb_query/milevadb_query_datatype/src/codec/datum.rs", "rank": 43, "score": 528518.6894075731 }, { "content": "pub fn bench_txn<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n c.bench_function_over_inputs(\"txn_prewrite\", txn_prewrite, configs.to_owned());\n\n c.bench_function_over_inputs(\"txn_commit\", txn_commit, configs.to_owned());\n\n c.bench_function_over_inputs(\n\n \"txn_rollback_prewrote\",\n\n txn_rollback_prewrote,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\n\n \"txn_rollback_conflict\",\n\n txn_rollback_conflict,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\n\n \"txn_rollback_non_prewrote\",\n\n txn_rollback_non_prewrote,\n\n configs.to_owned(),\n\n );\n\n}\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 44, "score": 527493.0544986892 }, { "content": "/// Get the `spacelike_key` of current brane in encoded form.\n\npub fn enc_spacelike_key(brane: &Brane) -> Vec<u8> {\n\n // only initialized brane's spacelike_key can be encoded, otherwise there must be bugs\n\n // somewhere.\n\n assert!(!brane.get_peers().is_empty());\n\n data_key(brane.get_spacelike_key())\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 45, "score": 526490.3351091496 }, { "content": "pub fn validate_data_key(key: &[u8]) -> bool {\n\n key.spacelikes_with(DATA_PREFIX_KEY)\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 46, "score": 522431.84589553194 }, { "content": "pub fn bench_tail_pointer<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n c.bench_function_over_inputs(\"tail_pointer_prewrite\", tail_pointer_prewrite, configs.to_owned());\n\n c.bench_function_over_inputs(\"tail_pointer_commit\", tail_pointer_commit, configs.to_owned());\n\n c.bench_function_over_inputs(\n\n \"tail_pointer_rollback_prewrote\",\n\n tail_pointer_rollback_prewrote,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\n\n \"tail_pointer_rollback_conflict\",\n\n tail_pointer_rollback_conflict,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\n\n \"tail_pointer_rollback_non_prewrote\",\n\n tail_pointer_rollback_non_prewrote,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\"tail_pointer_load_lock\", tail_pointer_reader_load_lock, configs.to_owned());\n\n c.bench_function_over_inputs(\n\n \"tail_pointer_seek_write\",\n\n tail_pointer_reader_seek_write,\n\n configs.to_owned(),\n\n );\n\n}\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 47, "score": 522119.737381283 }, { "content": "/// `encode_key` encodes a datum slice into a memory comparable buffer as the key.\n\npub fn encode_key(ctx: &mut EvalContext, values: &[Datum]) -> Result<Vec<u8>> {\n\n encode(ctx, values, true)\n\n}\n\n\n", "file_path": "edb/milevadb_query/milevadb_query_datatype/src/codec/datum.rs", "rank": 48, "score": 520446.65993560007 }, { "content": "pub fn new_put_causet_cmd(causet: &str, key: &[u8], value: &[u8]) -> Request {\n\n let mut cmd = Request::default();\n\n cmd.set_cmd_type(CmdType::Put);\n\n cmd.mut_put().set_key(key.to_vec());\n\n cmd.mut_put().set_value(value.to_vec());\n\n cmd.mut_put().set_causet(causet.to_string());\n\n cmd\n\n}\n\n\n", "file_path": "edb/components/test_violetabftstore/src/util.rs", "rank": 49, "score": 518476.75968593254 }, { "content": "pub fn new_get_causet_cmd(causet: &str, key: &[u8]) -> Request {\n\n let mut cmd = Request::default();\n\n cmd.set_cmd_type(CmdType::Get);\n\n cmd.mut_get().set_key(key.to_vec());\n\n cmd.mut_get().set_causet(causet.to_string());\n\n cmd\n\n}\n\n\n", "file_path": "edb/components/test_violetabftstore/src/util.rs", "rank": 50, "score": 517434.2953121515 }, { "content": "pub fn check_db_cone<E>(db: &E, cone: (u8, u8))\n\nwhere\n\n E: CausetEngine,\n\n{\n\n for i in cone.0..cone.1 {\n\n let k = tuplespaceInstanton::data_key(&[i]);\n\n assert_eq!(db.get_value(&k).unwrap().unwrap(), &[i]);\n\n }\n\n}\n\n\n", "file_path": "edb/components/test_sst_importer/src/lib.rs", "rank": 51, "score": 516601.330238246 }, { "content": "pub fn brane_state_key(brane_id: u64) -> [u8; 11] {\n\n make_brane_meta_key(brane_id, REGION_STATE_SUFFIX)\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 52, "score": 513633.2452681412 }, { "content": "#[inline]\n\npub fn decode_common_handle(mut key: &[u8]) -> Result<&[u8]> {\n\n check_record_key(key)?;\n\n key = &key[PREFIX_LEN..];\n\n Ok(key)\n\n}\n\n\n", "file_path": "edb/milevadb_query/milevadb_query_datatype/src/codec/table.rs", "rank": 53, "score": 509058.43686444045 }, { "content": "#[inline]\n\nfn read_num_bytes<T, F>(size: usize, data: &mut &[u8], f: F) -> Result<T>\n\nwhere\n\n F: Fn(&[u8]) -> T,\n\n{\n\n if data.len() >= size {\n\n let buf = &data[..size];\n\n *data = &data[size..];\n\n return Ok(f(buf));\n\n }\n\n Err(Error::unexpected_eof())\n\n}\n\n\n\n/// Decodes value encoded by `encode_i64` before.\n", "file_path": "edb/edbn/einsteindb_util/src/codec/number.rs", "rank": 54, "score": 508817.24577187106 }, { "content": "fn get_entry_size(value: &[u8], entry_type: DBEntryType) -> std::result::Result<u64, ()> {\n\n match entry_type {\n\n DBEntryType::Put => Ok(value.len() as u64),\n\n DBEntryType::BlobIndex => match NoetherBlobIndex::decode(value) {\n\n Ok(index) => Ok(index.blob_size + value.len() as u64),\n\n Err(_) => Err(()),\n\n },\n\n _ => Err(()),\n\n }\n\n}\n\n\n\n// Deprecated. Only for compatible issue from v2.0 or older version.\n\n#[derive(Debug, Default)]\n\npub struct SizeProperties {\n\n pub total_size: u64,\n\n pub index_handles: IndexHandles,\n\n}\n\n\n\nimpl SizeProperties {\n\n pub fn encode(&self) -> UserProperties {\n", "file_path": "causetq/causetq-allegrosql/src/properties.rs", "rank": 55, "score": 508265.95444746595 }, { "content": "/// Get brane approximate middle key based on default and write causet size.\n\npub fn get_brane_approximate_middle(\n\n db: &impl CausetEngine,\n\n brane: &Brane,\n\n) -> Result<Option<Vec<u8>>> {\n\n let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane);\n\n let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane);\n\n let cone = Cone::new(&spacelike_key, &lightlike_key);\n\n Ok(box_try!(\n\n db.get_cone_approximate_middle(cone, brane.get_id())\n\n ))\n\n}\n\n\n\n/// Get the approximate middle key of the brane. If we suppose the brane\n\n/// is stored on disk as a plain file, \"middle key\" means the key whose\n\n/// position is in the middle of the file.\n\n///\n\n/// The returned key maybe is timestamped if transaction KV is used,\n\n/// and must spacelike with \"z\".\n\n///\n\n/// FIXME the causet(test) here probably indicates that the test doesn't belong\n\n/// here. It should be a test of the edb or engine_lmdb crates.\n", "file_path": "violetabftstore/src/interlock/split_check/half.rs", "rank": 56, "score": 503202.20457188983 }, { "content": "fn must_delete_causet<E: Engine>(ctx: &Context, engine: &E, causet: CfName, key: &[u8]) {\n\n engine.delete_causet(ctx, causet, Key::from_raw(key)).unwrap();\n\n}\n\n\n", "file_path": "tests/integrations/storage/test_violetabftkv.rs", "rank": 57, "score": 503173.91803981585 }, { "content": "/// Convert the key to the smallest key which is larger than the key given.\n\npub fn convert_to_prefix_next(key: &mut Vec<u8>) {\n\n if key.is_empty() {\n\n key.push(0);\n\n return;\n\n }\n\n let mut i = key.len() - 1;\n\n\n\n // Add 1 to the last byte that is not 255, and set it's following bytes to 0.\n\n loop {\n\n if key[i] == 255 {\n\n key[i] = 0;\n\n } else {\n\n key[i] += 1;\n\n return;\n\n }\n\n if i == 0 {\n\n // All bytes are 255. Applightlike a 0 to the key.\n\n for byte in key.iter_mut() {\n\n *byte = 255;\n\n }\n\n key.push(0);\n\n return;\n\n }\n\n i -= 1;\n\n }\n\n}\n\n\n", "file_path": "edb/milevadb_query/milevadb_query_common/src/util.rs", "rank": 58, "score": 501502.5947934098 }, { "content": "// It will return prefix sum of iter. `read` is a function to be used to read data from iter.\n\nfn prefix_sum<F, T>(iter: Iter<T>, read: F) -> Vec<usize>\n\nwhere\n\n F: Fn(&T) -> usize,\n\n{\n\n let mut pre_sum = vec![];\n\n let mut sum = 0;\n\n for item in iter {\n\n sum += read(&item);\n\n pre_sum.push(sum);\n\n }\n\n pre_sum\n\n}\n\n\n", "file_path": "violetabftstore/src/store/worker/split_controller.rs", "rank": 59, "score": 500890.9407086384 }, { "content": "pub fn brane_violetabft_prefix(brane_id: u64) -> [u8; 10] {\n\n let mut key = [0; 10];\n\n key[0..2].copy_from_slice(REGION_VIOLETABFT_PREFIX_KEY);\n\n BigEndian::write_u64(&mut key[2..10], brane_id);\n\n key\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 60, "score": 498006.58164603007 }, { "content": "pub fn brane_meta_prefix(brane_id: u64) -> [u8; 10] {\n\n let mut key = [0; 10];\n\n key[0..2].copy_from_slice(REGION_META_PREFIX_KEY);\n\n BigEndian::write_u64(&mut key[2..10], brane_id);\n\n key\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 61, "score": 498006.58164603007 }, { "content": "pub fn violetabft_state_key(brane_id: u64) -> [u8; 11] {\n\n make_brane_prefix(brane_id, VIOLETABFT_STATE_SUFFIX)\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 62, "score": 497994.539841632 }, { "content": "pub fn apply_state_key(brane_id: u64) -> [u8; 11] {\n\n make_brane_prefix(brane_id, APPLY_STATE_SUFFIX)\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 63, "score": 497994.539841632 }, { "content": "#[inline]\n\npub fn handle_lightlike_error<T>(brane_id: u64, e: TrylightlikeError<T>) -> VioletaBftStoreError {\n\n match e {\n\n TrylightlikeError::Full(_) => VioletaBftStoreError::Transport(DiscardReason::Full),\n\n TrylightlikeError::Disconnected(_) => VioletaBftStoreError::BraneNotFound(brane_id),\n\n }\n\n}\n\n\n\nimpl<EK: CausetEngine, ER: VioletaBftEngine> VioletaBftStoreRouter<EK> for VioletaBftRouter<EK, ER> {\n\n fn lightlike_violetabft_msg(&self, msg: VioletaBftMessage) -> VioletaBftStoreResult<()> {\n\n let brane_id = msg.get_brane_id();\n\n self.lightlike_violetabft_message(msg)\n\n .map_err(|e| handle_lightlike_error(brane_id, e))\n\n }\n\n\n\n fn significant_lightlike(\n\n &self,\n\n brane_id: u64,\n\n msg: SignificantMsg<EK::Snapshot>,\n\n ) -> VioletaBftStoreResult<()> {\n\n if let Err(lightlikeError(msg)) = self\n", "file_path": "violetabftstore/src/router.rs", "rank": 64, "score": 497228.2061889336 }, { "content": "/// Decode brane key, return the brane id and meta suffix type.\n\nfn decode_brane_key(prefix: &[u8], key: &[u8], category: &str) -> Result<(u64, u8)> {\n\n if prefix.len() + mem::size_of::<u64>() + mem::size_of::<u8>() != key.len() {\n\n return Err(Error::InvalidBraneKeyLength(\n\n category.to_owned(),\n\n key.to_owned(),\n\n ));\n\n }\n\n\n\n if !key.spacelikes_with(prefix) {\n\n return Err(Error::InvalidBranePrefix(\n\n category.to_owned(),\n\n key.to_owned(),\n\n ));\n\n }\n\n\n\n let brane_id = BigEndian::read_u64(&key[prefix.len()..prefix.len() + mem::size_of::<u64>()]);\n\n\n\n Ok((brane_id, key[key.len() - 1]))\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 65, "score": 496808.4077926151 }, { "content": "fn txn_prewrite<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n let cm = ConcurrencyManager::new(1.into());\n\n b.iter_batched(\n\n || {\n\n let mutations: Vec<(Mutation, Vec<u8>)> =\n\n KvGenerator::new(config.key_length, config.value_length)\n\n .generate(DEFAULT_ITERATIONS)\n\n .iter()\n\n .map(|(k, v)| (Mutation::Put((Key::from_raw(&k), v.clone())), k.clone()))\n\n .collect();\n\n mutations\n\n },\n\n |mutations| {\n\n for (mutation, primary) in mutations {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n let mut txn = tail_pointer::MvccTxn::new(snapshot, 1.into(), true, cm.clone());\n\n txn.prewrite(mutation, &primary, &None, false, 0, 0, TimeStamp::default())\n\n .unwrap();\n\n let write_data = WriteData::from_modifies(txn.into_modifies());\n\n black_box(engine.write(&ctx, write_data)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 66, "score": 496297.4162941899 }, { "content": "fn txn_commit<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n let cm = ConcurrencyManager::new(1.into());\n\n b.iter_batched(\n\n || setup_prewrite(&engine, &config, 1),\n\n |tuplespaceInstanton| {\n\n for key in tuplespaceInstanton {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n let mut txn = tail_pointer::MvccTxn::new(snapshot, 1.into(), true, cm.clone());\n\n commit(&mut txn, key, 2.into()).unwrap();\n\n let write_data = WriteData::from_modifies(txn.into_modifies());\n\n black_box(engine.write(&ctx, write_data)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 67, "score": 496297.4162941899 }, { "content": "/// `encode_value` encodes a datum slice into a buffer.\n\npub fn encode_value(ctx: &mut EvalContext, values: &[Datum]) -> Result<Vec<u8>> {\n\n encode(ctx, values, false)\n\n}\n\n\n", "file_path": "edb/milevadb_query/milevadb_query_datatype/src/codec/datum.rs", "rank": 68, "score": 494378.13490237144 }, { "content": "fn is_same_Block(left_key: &[u8], right_key: &[u8]) -> bool {\n\n is_Block_key(left_key)\n\n && is_Block_key(right_key)\n\n && left_key[..ENCODED_Block_Block_PREFIX] == right_key[..ENCODED_Block_Block_PREFIX]\n\n}\n\n\n\n#[causet(test)]\n\nmod tests {\n\n use std::io::Write;\n\n use std::sync::mpsc;\n\n\n\n use ekvproto::meta_timeshare::Peer;\n\n use ekvproto::fidel_timeshare::CheckPolicy;\n\n use tempfile::Builder;\n\n\n\n use crate::store::{CasualMessage, SplitCheckRunner, SplitCheckTask};\n\n use engine_lmdb::util::new_engine;\n\n use edb::{SyncMuBlock, ALL_CausetS};\n\n use milevadb_query_datatype::codec::Block::{Block_PREFIX, Block_PREFIX_KEY_LEN};\n\n use violetabftstore::interlock::::codec::number::NumberEncoder;\n", "file_path": "violetabftstore/src/interlock/split_check/table.rs", "rank": 69, "score": 494261.69386041147 }, { "content": "fn is_Block_key(encoded_key: &[u8]) -> bool {\n\n encoded_key.spacelikes_with(Block_codec::Block_PREFIX)\n\n && encoded_key.len() >= ENCODED_Block_Block_PREFIX\n\n}\n\n\n", "file_path": "violetabftstore/src/interlock/split_check/table.rs", "rank": 70, "score": 492905.187303092 }, { "content": "/// Decodes bytes which are encoded by `encode_compact_bytes` before.\n\npub fn decode_compact_bytes(data: &mut BytesSlice<'_>) -> Result<Vec<u8>> {\n\n let vn = number::decode_var_i64(data)? as usize;\n\n if data.len() >= vn {\n\n let bs = data[0..vn].to_vec();\n\n *data = &data[vn..];\n\n return Ok(bs);\n\n }\n\n Err(Error::unexpected_eof())\n\n}\n\n\n", "file_path": "edb/edbn/einsteindb_util/src/codec/bytes.rs", "rank": 71, "score": 492076.7644541081 }, { "content": "pub fn violetabft_log_key(brane_id: u64, log_index: u64) -> [u8; 19] {\n\n make_brane_key(brane_id, VIOLETABFT_LOG_SUFFIX, log_index)\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 72, "score": 492047.21243564144 }, { "content": "pub fn snapshot_violetabft_state_key(brane_id: u64) -> [u8; 11] {\n\n make_brane_prefix(brane_id, SNAPSHOT_VIOLETABFT_STATE_SUFFIX)\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 73, "score": 491882.2857900882 }, { "content": "fn txn_rollback_conflict<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n let cm = ConcurrencyManager::new(1.into());\n\n b.iter_batched(\n\n || setup_prewrite(&engine, &config, 2),\n\n |tuplespaceInstanton| {\n\n for key in tuplespaceInstanton {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n let mut txn = tail_pointer::MvccTxn::new(snapshot, 1.into(), true, cm.clone());\n\n txn.rollback(key).unwrap();\n\n let write_data = WriteData::from_modifies(txn.into_modifies());\n\n black_box(engine.write(&ctx, write_data)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 74, "score": 491572.6119602613 }, { "content": "fn tail_pointer_prewrite<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n let cm = ConcurrencyManager::new(1.into());\n\n b.iter_batched(\n\n || {\n\n let mutations: Vec<(Mutation, Vec<u8>)> = KvGenerator::with_seed(\n\n config.key_length,\n\n config.value_length,\n\n DEFAULT_KV_GENERATOR_SEED,\n\n )\n\n .generate(DEFAULT_ITERATIONS)\n\n .iter()\n\n .map(|(k, v)| (Mutation::Put((Key::from_raw(&k), v.clone())), k.clone()))\n\n .collect();\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n (mutations, snapshot)\n\n },\n\n |(mutations, snapshot)| {\n\n for (mutation, primary) in mutations {\n\n let mut txn = tail_pointer::MvccTxn::new(snapshot.clone(), 1.into(), true, cm.clone());\n\n txn.prewrite(mutation, &primary, &None, false, 0, 0, TimeStamp::default())\n\n .unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 75, "score": 491572.61196026136 }, { "content": "fn tail_pointer_commit<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let cm = ConcurrencyManager::new(1.into());\n\n b.iter_batched(\n\n || setup_prewrite(&engine, &config, 1),\n\n |(snapshot, tuplespaceInstanton)| {\n\n for key in tuplespaceInstanton {\n\n let mut txn = tail_pointer::MvccTxn::new(snapshot.clone(), 1.into(), true, cm.clone());\n\n black_box(commit(&mut txn, key, 1.into())).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 76, "score": 491572.6119602613 }, { "content": "fn txn_rollback_prewrote<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n let cm = ConcurrencyManager::new(1.into());\n\n b.iter_batched(\n\n || setup_prewrite(&engine, &config, 1),\n\n |tuplespaceInstanton| {\n\n for key in tuplespaceInstanton {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n let mut txn = tail_pointer::MvccTxn::new(snapshot, 1.into(), true, cm.clone());\n\n txn.rollback(key).unwrap();\n\n let write_data = WriteData::from_modifies(txn.into_modifies());\n\n black_box(engine.write(&ctx, write_data)).unwrap();\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n )\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 77, "score": 491572.6119602613 }, { "content": "#[inline]\n\npub fn check_index_key(key: &[u8]) -> Result<()> {\n\n check_key_type(key, INDEX_PREFIX_SEP)\n\n}\n\n\n\n/// `check_key_type` checks if the key is the type we want, `wanted_type` should be\n\n/// `Block::RECORD_PREFIX_SEP` or `Block::INDEX_PREFIX_SEP` .\n", "file_path": "edb/milevadb_query/milevadb_query_datatype/src/codec/table.rs", "rank": 78, "score": 490096.58938242495 }, { "content": "#[inline]\n\npub fn check_record_key(key: &[u8]) -> Result<()> {\n\n check_key_type(key, RECORD_PREFIX_SEP)\n\n}\n\n\n", "file_path": "edb/milevadb_query/milevadb_query_datatype/src/codec/table.rs", "rank": 79, "score": 490096.589382425 }, { "content": "#[allow(dead_code)]\n\npub fn validate_and_persist_config(config: &mut EINSTEINDBConfig, persist: bool) {\n\n config.compatible_adjust();\n\n if let Err(e) = config.validate() {\n\n fatal!(\"invalid configuration: {}\", e);\n\n }\n\n\n\n if let Err(e) = check_critical_config(config) {\n\n fatal!(\"critical config check failed: {}\", e);\n\n }\n\n\n\n if persist {\n\n if let Err(e) = persist_config(&config) {\n\n fatal!(\"persist critical config failed: {}\", e);\n\n }\n\n }\n\n}\n\n\n", "file_path": "causetq/cmd/src/setup.rs", "rank": 80, "score": 489700.5486883536 }, { "content": "fn check_cluster(cluster: &mut Cluster<impl Simulator>, k: &[u8], v: &[u8], all_committed: bool) {\n\n let brane = cluster.fidel_client.get_brane(k).unwrap();\n\n let mut tried_cnt = 0;\n\n let leader = loop {\n\n match cluster.leader_of_brane(brane.get_id()) {\n\n None => {\n\n tried_cnt += 1;\n\n if tried_cnt >= 3 {\n\n panic!(\"leader should be elected\");\n\n }\n\n continue;\n\n }\n\n Some(l) => break l,\n\n }\n\n };\n\n let mut missing_count = 0;\n\n for i in 1..=brane.get_peers().len() as u64 {\n\n let engine = cluster.get_engine(i);\n\n if all_committed || i == leader.get_store_id() {\n\n must_get_equal(&engine, k, v);\n", "file_path": "tests/integrations/violetabftstore/test_split_region.rs", "rank": 81, "score": 489535.8697644933 }, { "content": "/// Computes the next key of the given key.\n\n///\n\n/// If the key has no successor key (e.g. the input is \"\\xff\\xff\"), the result\n\n/// would be an empty vector.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tuplespaceInstanton::next_key;\n\n/// assert_eq!(next_key(b\"123\"), b\"124\");\n\n/// assert_eq!(next_key(b\"12\\xff\"), b\"13\");\n\n/// assert_eq!(next_key(b\"\\xff\\xff\"), b\"\");\n\n/// assert_eq!(next_key(b\"\\xff\\xfe\"), b\"\\xff\\xff\");\n\n/// assert_eq!(next_key(b\"T\"), b\"U\");\n\n/// assert_eq!(next_key(b\"\"), b\"\");\n\n/// ```\n\npub fn next_key(key: &[u8]) -> Vec<u8> {\n\n if let Some((s, e)) = next_key_no_alloc(key) {\n\n let mut res = Vec::with_capacity(s.len() + 1);\n\n res.extlightlike_from_slice(s);\n\n res.push(e);\n\n res\n\n } else {\n\n Vec::new()\n\n }\n\n}\n\n\n\n#[derive(Debug, Display, Fail)]\n\npub enum Error {\n\n #[display(fmt = \"{} is not a valid violetabft log key\", \"hex::encode_upper(_0)\")]\n\n InvalidVioletaBftLogKey(Vec<u8>),\n\n #[display(\n\n fmt = \"invalid brane {} key length for key {}\",\n\n \"_0\",\n\n \"hex::encode_upper(_1)\"\n\n )]\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 82, "score": 489257.0004146056 }, { "content": "pub fn data_key(key: &[u8]) -> Vec<u8> {\n\n let mut v = Vec::with_capacity(DATA_PREFIX_KEY.len() + key.len());\n\n v.extlightlike_from_slice(DATA_PREFIX_KEY);\n\n v.extlightlike_from_slice(key);\n\n v\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 83, "score": 489231.16324660624 }, { "content": "#[causet(unix)]\n\npub fn check_max_open_fds(expect: u64) -> Result<(), ConfigError> {\n\n use std::mem;\n\n\n\n unsafe {\n\n let mut fd_limit = mem::zeroed();\n\n let mut err = libc::getrlimit(libc::RLIMIT_NOFILE, &mut fd_limit);\n\n if err != 0 {\n\n return Err(ConfigError::Limit(\"check_max_open_fds failed\".to_owned()));\n\n }\n\n if fd_limit.rlim_cur >= expect {\n\n return Ok(());\n\n }\n\n\n\n let prev_limit = fd_limit.rlim_cur;\n\n fd_limit.rlim_cur = expect;\n\n if fd_limit.rlim_max < expect {\n\n // If the process is not spacelikeed by privileged user, this will fail.\n\n fd_limit.rlim_max = expect;\n\n }\n\n err = libc::setrlimit(libc::RLIMIT_NOFILE, &fd_limit);\n", "file_path": "edb/edbn/einsteindb_util/src/config.rs", "rank": 84, "score": 487717.24778138055 }, { "content": "pub fn make_rpc_error<E: Debug>(err: E) -> RpcStatus {\n\n // FIXME: Just spewing debug error formatting here seems pretty unfrilightlikely\n\n RpcStatus::new(RpcStatusCode::UNKNOWN, Some(format!(\"{:?}\", err)))\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! lightlike_rpc_response {\n\n ($res:ident, $sink:ident, $label:ident, $timer:ident) => {{\n\n let res = match $res {\n\n Ok(resp) => {\n\n IMPORT_RPC_DURATION\n\n .with_label_values(&[$label, \"ok\"])\n\n .observe($timer.elapsed_secs());\n\n $sink.success(resp)\n\n }\n\n Err(e) => {\n\n IMPORT_RPC_DURATION\n\n .with_label_values(&[$label, \"error\"])\n\n .observe($timer.elapsed_secs());\n\n error_inc(&e);\n\n $sink.fail(make_rpc_error(e))\n\n }\n\n };\n\n let _ = res.map_err(|e| warn!(\"lightlike rpc response\"; \"err\" => %e)).await;\n\n }};\n\n}\n", "file_path": "edb/components/sst_importer/src/service.rs", "rank": 85, "score": 485035.43368955085 }, { "content": "/// Checks whether the given PrimaryCausets info are supported.\n\npub fn check_PrimaryCausets_info_supported(PrimaryCausets_info: &[PrimaryCausetInfo]) -> Result<()> {\n\n use std::convert::TryFrom;\n\n use milevadb_query_datatype::EvalType;\n\n use milevadb_query_datatype::FieldTypeAccessor;\n\n\n\n for PrimaryCauset in PrimaryCausets_info {\n\n if PrimaryCauset.get_pk_handle() {\n\n box_try!(EvalType::try_from(PrimaryCauset.as_accessor().tp()));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl<S: causet_storage, I: ScanFreeDaemonImpl> BatchFreeDaemon for ScanFreeDaemon<S, I> {\n\n type StorageStats = S::Statistics;\n\n\n\n #[inline]\n\n fn schemaReplicant(&self) -> &[FieldType] {\n\n self.imp.schemaReplicant()\n\n }\n", "file_path": "edb/milevadb_query/milevadb_query_vec_executors/src/util/scan_executor.rs", "rank": 86, "score": 484117.5459345705 }, { "content": "fn tail_pointer_reader_load_lock<E: Engine, F: EngineFactory<E>>(b: &mut Bencher, config: &BenchConfig<F>) {\n\n let engine = config.engine_factory.build();\n\n let ctx = Context::default();\n\n let test_tuplespaceInstanton: Vec<Key> = KvGenerator::with_seed(\n\n config.key_length,\n\n config.value_length,\n\n DEFAULT_KV_GENERATOR_SEED,\n\n )\n\n .generate(DEFAULT_ITERATIONS)\n\n .iter()\n\n .map(|(k, _)| Key::from_raw(&k))\n\n .collect();\n\n\n\n b.iter_batched(\n\n || {\n\n let snapshot = engine.snapshot(&ctx).unwrap();\n\n (snapshot, &test_tuplespaceInstanton)\n\n },\n\n |(snapshot, test_kvs)| {\n\n for key in test_kvs {\n\n let mut reader =\n\n MvccReader::new(snapshot.clone(), None, true, ctx.get_isolation_level());\n\n black_box(reader.load_lock(&key).unwrap());\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 87, "score": 482531.87792976544 }, { "content": "pub fn violetabft_log_prefix(brane_id: u64) -> [u8; 11] {\n\n make_brane_prefix(brane_id, VIOLETABFT_LOG_SUFFIX)\n\n}\n\n\n", "file_path": "edb/components/keys/src/lib.rs", "rank": 88, "score": 482367.8762195209 }, { "content": "pub fn sha256(input: &[u8]) -> Result<Vec<u8>, ErrorStack> {\n\n hash::hash(MessageDigest::sha256(), input).map(|digest| digest.to_vec())\n\n}\n\n\n\n/// Wrapper of a reader which computes its SHA-256 hash while reading.\n\npub struct Sha256Reader<R> {\n\n reader: R,\n\n hasher: Arc<Mutex<Hasher>>,\n\n}\n\n\n\nimpl<R> Sha256Reader<R> {\n\n /// Creates a new `Sha256Reader`, wrapping the given reader.\n\n pub fn new(reader: R) -> Result<(Self, Arc<Mutex<Hasher>>), ErrorStack> {\n\n let hasher = Arc::new(Mutex::new(Hasher::new(MessageDigest::sha256())?));\n\n Ok((\n\n Sha256Reader {\n\n reader,\n\n hasher: hasher.clone(),\n\n },\n\n hasher,\n", "file_path": "edb/edbn/einsteindb_util/src/file.rs", "rank": 89, "score": 480159.19285184226 }, { "content": "#[inline]\n\npub fn decode_int_handle(mut key: &[u8]) -> Result<i64> {\n\n check_record_key(key)?;\n\n key = &key[PREFIX_LEN..];\n\n key.read_i64().map_err(Error::from)\n\n}\n\n\n\n/// `decode_common_handle` decodes key key and gets the common handle.\n", "file_path": "edb/milevadb_query/milevadb_query_datatype/src/codec/table.rs", "rank": 90, "score": 479255.830595161 }, { "content": "// When we bootstrap the brane or handling split new brane, we must\n\n// call this to initialize brane apply state first.\n\npub fn write_initial_apply_state<T: MuBlock>(kv_wb: &mut T, brane_id: u64) -> Result<()> {\n\n let mut apply_state = VioletaBftApplyState::default();\n\n apply_state.set_applied_index(VIOLETABFT_INIT_LOG_INDEX);\n\n apply_state\n\n .mut_truncated_state()\n\n .set_index(VIOLETABFT_INIT_LOG_INDEX);\n\n apply_state\n\n .mut_truncated_state()\n\n .set_term(VIOLETABFT_INIT_LOG_TERM);\n\n\n\n kv_wb.put_msg_causet(Causet_VIOLETABFT, &tuplespaceInstanton::apply_state_key(brane_id), &apply_state)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "violetabftstore/src/store/peer_storage.rs", "rank": 91, "score": 478993.36947130633 }, { "content": "/// Gets the number of files at given level of given PrimaryCauset family.\n\npub fn get_causet_num_files_at_level(engine: &DB, handle: &CausetHandle, level: usize) -> Option<u64> {\n\n let prop = format!(\"{}{}\", LMDB_NUM_FILES_AT_LEVEL, level);\n\n engine.get_property_int_causet(handle, &prop)\n\n}\n\n\n", "file_path": "edb/components/embedded_lmdb/src/util.rs", "rank": 92, "score": 475325.46953614976 }, { "content": "fn half_split_bucket_size(brane_max_size: u64) -> u64 {\n\n let mut half_split_bucket_size = brane_max_size / BUCKET_NUMBER_LIMIT as u64;\n\n let bucket_size_limit = ReadableSize::mb(BUCKET_SIZE_LIMIT_MB).0;\n\n if half_split_bucket_size == 0 {\n\n half_split_bucket_size = 1;\n\n } else if half_split_bucket_size > bucket_size_limit {\n\n half_split_bucket_size = bucket_size_limit;\n\n }\n\n half_split_bucket_size\n\n}\n\n\n", "file_path": "violetabftstore/src/interlock/split_check/half.rs", "rank": 93, "score": 473289.67014091025 }, { "content": "fn do_compare<T, E, F>(mut e: E, op: CmpOp, get_order: F) -> Result<Option<i64>>\n\nwhere\n\n E: FnMut(usize) -> Result<Option<T>>,\n\n F: Fn(T, T) -> Result<Ordering>,\n\n{\n\n let lhs = e(0)?;\n\n if lhs.is_none() && op != CmpOp::NullEQ {\n\n return Ok(None);\n\n }\n\n let rhs = e(1)?;\n\n match (lhs, rhs) {\n\n (None, None) => Ok(Some(1)),\n\n (Some(lhs), Some(rhs)) => {\n\n let ordering = get_order(lhs, rhs)?;\n\n let r = match op {\n\n CmpOp::LT => ordering == Ordering::Less,\n\n CmpOp::LE => ordering != Ordering::Greater,\n\n CmpOp::GT => ordering == Ordering::Greater,\n\n CmpOp::GE => ordering != Ordering::Less,\n\n CmpOp::NE => ordering != Ordering::Equal,\n", "file_path": "edb/milevadb_query/milevadb_query_normal_expr/src/builtin_compare.rs", "rank": 94, "score": 473227.8250797299 }, { "content": "pub fn error_stream(e: io::Error) -> impl Stream<Item = io::Result<Bytes>> + Unpin + lightlike + Sync {\n\n stream::iter(iter::once(Err(e)))\n\n}\n\n\n", "file_path": "edb/components/causet_storage/src/util.rs", "rank": 95, "score": 473102.7856900828 }, { "content": "pub fn get_causet_handle<'a>(db: &'a DB, causet: &str) -> Result<&'a CausetHandle> {\n\n let handle = db\n\n .causet_handle(causet)\n\n .ok_or_else(|| Error::Engine(format!(\"causet {} not found\", causet)))?;\n\n Ok(handle)\n\n}\n\n\n", "file_path": "edb/components/embedded_lmdb/src/util.rs", "rank": 96, "score": 473016.8236436081 }, { "content": "fn assert_has<E: Engine>(ctx: &Context, engine: &E, key: &[u8], value: &[u8]) {\n\n let snapshot = engine.snapshot(ctx).unwrap();\n\n assert_eq!(snapshot.get(&Key::from_raw(key)).unwrap().unwrap(), value);\n\n}\n\n\n", "file_path": "tests/integrations/storage/test_violetabftkv.rs", "rank": 97, "score": 471561.2297175684 }, { "content": "fn to_encoded_Block_prefix(encoded_key: &[u8]) -> Option<Vec<u8>> {\n\n if let Ok(raw_key) = Key::from_encoded_slice(encoded_key).to_raw() {\n\n Block_codec::extract_Block_prefix(&raw_key)\n\n .map(|k| Key::from_raw(k).into_encoded())\n\n .ok()\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n// Encode a key like `t{i64}` will applightlike some unnecessary bytes to the output,\n\n// The first 10 bytes are enough to find out which Block this key belongs to.\n\nconst ENCODED_Block_Block_PREFIX: usize = Block_codec::Block_PREFIX_KEY_LEN + 1;\n\n\n", "file_path": "violetabftstore/src/interlock/split_check/table.rs", "rank": 98, "score": 470875.4777623569 }, { "content": "pub fn bench_causet_storage<E: Engine, F: EngineFactory<E>>(\n\n c: &mut Criterion,\n\n configs: &[BenchConfig<F>],\n\n) {\n\n c.bench_function_over_inputs(\n\n \"causet_storage_async_prewrite\",\n\n causet_storage_prewrite,\n\n configs.to_owned(),\n\n );\n\n c.bench_function_over_inputs(\"causet_storage_async_commit\", causet_storage_commit, configs.to_owned());\n\n c.bench_function_over_inputs(\"causet_storage_async_raw_get\", causet_storage_raw_get, configs.to_owned());\n\n}\n", "file_path": "tests/benches/hierarchy/storage/mod.rs", "rank": 99, "score": 469605.5441961742 } ]
Rust
src/senet/renderer.rs
raybritton/rust_games_proto
5268bf7d37836d7e7bc1038ebbf49abdee1c649e
use crate::boards::idx_coord::BoardCoord; use crate::boards::{board_cols, board_rows}; use crate::constants::colors::{ BROWN, CREAM, LIGHT_BLUE, LIGHT_GRAY, PIECE_COMPUTER, PIECE_HUMAN, RED, WHITE, }; use crate::constants::Direction; use crate::senet::rules::{HOUSE_BEAUTY, HOUSE_HAPPINESS, HOUSE_REBIRTH, HOUSE_WATER}; use crate::senet::{Move, Square, State}; use crate::system::letter_mesh::make_letter_mesh; use crate::system::math::{pt, Offset, OffsetTuple, Point}; use crate::system::mesh_helper::MeshHelper; use crate::system::TurnState::{SelectingMove, SelectingPiece}; use ggez::graphics::DrawMode; use ggez::{Context, GameResult}; pub(super) fn render( ctx: &mut Context, mesh_helper: &mut MeshHelper, state: &State, ) -> GameResult<()> { let cell_size = mesh_helper.calc_width(0.09); let stick_width = mesh_helper.calc_height(0.015); let stick_height = mesh_helper.calc_height(0.1); let msg_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_height(0.45)); let stick_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_height(0.5)); let board_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_width(0.05)); let human = mesh_helper.make_circle(ctx, cell_size, cell_size * 0.3, DrawMode::fill())?; let computer = mesh_helper.make_triangle(ctx, cell_size * 0.6, cell_size * 0.6, Direction::Up)?; let grid = mesh_helper.make_grid( ctx, cell_size * board_cols() as f32, cell_size * board_rows() as f32, board_cols(), board_rows(), 2., LIGHT_GRAY, None, )?; let rect = mesh_helper.make_rect( ctx, cell_size * board_cols() as f32, cell_size * board_rows() as f32, DrawMode::stroke(2.), )?; mesh_helper.draw_mesh(ctx, grid.as_ref(), board_start); mesh_helper.draw_mesh(ctx, rect.as_ref(), board_start); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_WATER, "WATER", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_HAPPINESS, "HAPPINESS", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_BEAUTY, "BEAUTY", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_REBIRTH, "REBIRTH", ); mesh_helper.draw_white_text( ctx, "WATER", Point::from(BoardCoord::from(HOUSE_WATER)) .multiply(cell_size, cell_size) .offset_point(board_start) .offset(cell_size * 0.5, 8.), 12., true, ); state.board.iter().enumerate().for_each(|(idx, square)| { let result = match square { Square::Empty => None, Square::Human => Some((human.as_ref(), PIECE_HUMAN, board_start)), Square::Computer => Some(( computer.as_ref(), PIECE_COMPUTER, board_start.offset(cell_size * 0.16, cell_size * 0.15), )), }; if let Some((mesh, colour, offset)) = result { let pos = Point::from(BoardCoord::from(idx)) .multiply(cell_size, cell_size) .offset_point(offset); mesh_helper.draw_coloured_mesh(ctx, mesh, pos, colour); } }); if state.play_state.is_either(SelectingPiece) && state.roll.is_some() { state .cursor .render(ctx, mesh_helper, board_start, cell_size)?; for mov in state.get_moves_for_selected_piece() { draw_move(ctx, mesh_helper, cell_size, board_start, &mov, false)?; } } else if state.play_state.is_either(SelectingMove) { state .cursor .render_dark(ctx, mesh_helper, board_start, cell_size)?; draw_move( ctx, mesh_helper, cell_size, board_start, &state.get_selected_move(), true, )?; } if let Some(roll) = state.roll { let stick = mesh_helper.make_rect(ctx, stick_width, stick_height, DrawMode::fill())?; for i in 0..4 { let colour = if i < roll && roll != 5 { CREAM } else { BROWN }; mesh_helper.draw_coloured_mesh( ctx, stick.as_ref(), stick_start.offset(stick_width * 1.5 * i as f32, 0.), colour, ); } } if let Some(msg) = &state.msg { mesh_helper.draw_white_text(ctx, msg, msg_start, 20., false); } Ok(()) } fn draw_cell_text( ctx: &mut Context, mesh_helper: &mut MeshHelper, cell_size: f32, board_start: Point, index: usize, text: &str, ) { mesh_helper.draw_white_text( ctx, text, Point::from(BoardCoord::from(index)) .multiply(cell_size, cell_size) .offset_point(board_start) .offset(cell_size * 0.5, 8.), 12., true, ); } fn draw_move( ctx: &mut Context, mesh_helper: &mut MeshHelper, cell_size: f32, board_start: Point, mov: &Move, highlight: bool, ) -> GameResult<()> { let move_mesh = mesh_helper.make_circle(ctx, cell_size, cell_size * 0.1, DrawMode::stroke(1.))?; let capture_mesh = make_letter_mesh(ctx, mesh_helper, cell_size * 0.3, 'x')?; let point = Point::from(BoardCoord::from(mov.dest)) .multiply(cell_size, cell_size) .offset_point(board_start); let (mesh, colour, pt) = if mov.exchange { ( capture_mesh.as_ref(), RED, point.offset(cell_size * 0.35, cell_size * 0.35), ) } else { (move_mesh.as_ref(), WHITE, point) }; mesh_helper.draw_coloured_mesh(ctx, mesh, pt, if highlight { LIGHT_BLUE } else { colour }); Ok(()) }
use crate::boards::idx_coord::BoardCoord; use crate::boards::{board_cols, board_rows}; use crate::constants::colors::{ BROWN, CREAM, LIGHT_BLUE, LIGHT_GRAY, PIECE_COMPUTER, PIECE_HUMAN, RED, WHITE, }; use crate::constants::Direction; use crate::senet::rules::{HOUSE_BEAUTY, HOUSE_HAPPINESS, HOUSE_REBIRTH, HOUSE_WATER}; use crate::senet::{Move, Square, State}; use crate::system::letter_mesh::make_letter_mesh; use crate::system::math::{pt, Offset, OffsetTuple, Point}; use crate::system::mesh_helper::MeshHelper; use crate::system::TurnState::{SelectingMove, SelectingPiece}; use ggez::graphics::DrawMode; use ggez::{Context, GameResult}; pub(super) fn render( ctx: &mut Context, mesh_helper: &mut MeshHelper, state: &State, ) -> GameResult<()> { let cell_size = mesh_helper.calc_width(0.09); let stick_width = mesh_helper.calc_height(0.015); let stick_height = mesh_helper.calc_height(0.1); let msg_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_height(0.45)); let stick_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_height(0.5)); let board_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_width(0.05)); let human = mesh_helper.make_circle(ctx, cell_size, cell_size * 0.3, DrawMode::fill())?; let computer = mesh_helper.make_triangle(ctx, cell_size * 0.6, cell_size * 0.6, Direction::Up)?; let grid = mesh_helper.make_grid( ctx, cell_size * board_cols() as f32, cell_size * board_rows() as f32, board_cols(), board_rows(), 2., LIGHT_GRAY, None, )?; let rect = mesh_helper.make_rect( ctx, cell_size * board_cols() as f32, cell_size * board_rows() as f32, DrawMode::stroke(2.), )?; mesh_helper.draw_mesh(ctx, grid.as_ref(), board_start); mesh_helper.draw_mesh(ctx, rect.as_ref(), board_start); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_WATER, "WATER", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_HAPPINESS, "HAPPINESS", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_BEAUTY, "BEAUTY", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_REBIRTH, "REBIRTH", ); mesh_helper.draw_white_text( ctx, "WATER", Point::from(BoardCoord::from(HOUSE_WATER)) .multiply(cell_size, cell_size) .offset_point(board_start) .offset(cell_size * 0.5, 8.), 12., true, ); state.board.iter().enumerate().for_each(|(idx, square)| { let result =
; if let Some((mesh, colour, offset)) = result { let pos = Point::from(BoardCoord::from(idx)) .multiply(cell_size, cell_size) .offset_point(offset); mesh_helper.draw_coloured_mesh(ctx, mesh, pos, colour); } }); if state.play_state.is_either(SelectingPiece) && state.roll.is_some() { state .cursor .render(ctx, mesh_helper, board_start, cell_size)?; for mov in state.get_moves_for_selected_piece() { draw_move(ctx, mesh_helper, cell_size, board_start, &mov, false)?; } } else if state.play_state.is_either(SelectingMove) { state .cursor .render_dark(ctx, mesh_helper, board_start, cell_size)?; draw_move( ctx, mesh_helper, cell_size, board_start, &state.get_selected_move(), true, )?; } if let Some(roll) = state.roll { let stick = mesh_helper.make_rect(ctx, stick_width, stick_height, DrawMode::fill())?; for i in 0..4 { let colour = if i < roll && roll != 5 { CREAM } else { BROWN }; mesh_helper.draw_coloured_mesh( ctx, stick.as_ref(), stick_start.offset(stick_width * 1.5 * i as f32, 0.), colour, ); } } if let Some(msg) = &state.msg { mesh_helper.draw_white_text(ctx, msg, msg_start, 20., false); } Ok(()) } fn draw_cell_text( ctx: &mut Context, mesh_helper: &mut MeshHelper, cell_size: f32, board_start: Point, index: usize, text: &str, ) { mesh_helper.draw_white_text( ctx, text, Point::from(BoardCoord::from(index)) .multiply(cell_size, cell_size) .offset_point(board_start) .offset(cell_size * 0.5, 8.), 12., true, ); } fn draw_move( ctx: &mut Context, mesh_helper: &mut MeshHelper, cell_size: f32, board_start: Point, mov: &Move, highlight: bool, ) -> GameResult<()> { let move_mesh = mesh_helper.make_circle(ctx, cell_size, cell_size * 0.1, DrawMode::stroke(1.))?; let capture_mesh = make_letter_mesh(ctx, mesh_helper, cell_size * 0.3, 'x')?; let point = Point::from(BoardCoord::from(mov.dest)) .multiply(cell_size, cell_size) .offset_point(board_start); let (mesh, colour, pt) = if mov.exchange { ( capture_mesh.as_ref(), RED, point.offset(cell_size * 0.35, cell_size * 0.35), ) } else { (move_mesh.as_ref(), WHITE, point) }; mesh_helper.draw_coloured_mesh(ctx, mesh, pt, if highlight { LIGHT_BLUE } else { colour }); Ok(()) }
match square { Square::Empty => None, Square::Human => Some((human.as_ref(), PIECE_HUMAN, board_start)), Square::Computer => Some(( computer.as_ref(), PIECE_COMPUTER, board_start.offset(cell_size * 0.16, cell_size * 0.15), )), }
if_condition
[ { "content": "fn render(ctx: &mut Context, mesh_helper: &mut MeshHelper, state: &State) -> GameResult<()> {\n\n let menu_start = pt(34., 100.);\n\n let cursor_start = pt(16., 100.);\n\n let cursor = mesh_helper.make_triangle(ctx, 12., 12., Direction::Right)?;\n\n\n\n mesh_helper.draw_mesh(\n\n ctx,\n\n cursor.as_ref(),\n\n cursor_start.offsety(state.cursor.value * 16),\n\n );\n\n\n\n ITEMS.iter().enumerate().for_each(|(idx, item)| {\n\n mesh_helper.draw_text(ctx, item.1, menu_start.offsety(idx * 16), WHITE, 14., false);\n\n });\n\n\n\n Ok(())\n\n}\n", "file_path": "src/graphics_testing/mod.rs", "rank": 0, "score": 305038.52349913685 }, { "content": "fn render_game(ctx: &mut Context, mesh_helper: &mut MeshHelper, state: &State) -> GameResult<()> {\n\n let cell_size = mesh_helper.calc_height(0.09);\n\n let board_start = pt(cell_size * board_cols() as f32 * 0.5, cell_size);\n\n\n\n let grid = mesh_helper.make_grid(\n\n ctx,\n\n cell_size * board_cols() as f32,\n\n cell_size * board_rows() as f32,\n\n board_cols(),\n\n board_rows(),\n\n 2.,\n\n LIGHT_GRAY,\n\n None,\n\n )?;\n\n let rect = mesh_helper.make_rect(\n\n ctx,\n\n cell_size * board_cols() as f32,\n\n cell_size * board_rows() as f32,\n\n DrawMode::stroke(2.),\n\n )?;\n", "file_path": "src/tablut/renderer.rs", "rank": 1, "score": 298426.88391686295 }, { "content": "fn draw_drop_moves(ctx: &mut Context, mesh_helper: &mut MeshHelper, pos: Point, count: usize) {\n\n mesh_helper.draw_white_text(ctx, &format!(\"▼ {} ▼\", count), pos, 18., true);\n\n}\n\n\n", "file_path": "src/mancala/render.rs", "rank": 2, "score": 223527.84418584514 }, { "content": "fn draw_menu_text(ctx: &mut Context, mesh_helper: &mut MeshHelper, start: Point, color: Color) {\n\n ITEMS.iter().enumerate().for_each(|(idx, item)| {\n\n mesh_helper.draw_text(ctx, item.0.name, start.offsety(idx * 24), color, 24., false);\n\n });\n\n}\n\n\n", "file_path": "src/menu/renderer.rs", "rank": 3, "score": 223527.84418584514 }, { "content": "fn square_pos(start: Point, width: f32, height: f32, spacing: f32, square: &Square) -> Point {\n\n match square.player {\n\n Player::Human => match square.hole {\n\n Hole::Home(idx) => start.offset(\n\n (width + spacing) * (idx + 1) as f32,\n\n height + spacing + spacing,\n\n ),\n\n Hole::End => start.offset((width + spacing) * 7., 0.),\n\n },\n\n Player::Computer => match square.hole {\n\n Hole::Home(idx) => start.offset((width + spacing) * (5 - idx + 1) as f32, 0.),\n\n Hole::End => start,\n\n },\n\n }\n\n}\n\n\n\npub(super) fn render(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n", "file_path": "src/mancala/render.rs", "rank": 4, "score": 220190.81482666003 }, { "content": "fn letter_mesh_n(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let left = width * PADDING;\n\n let right = width - width * PADDING;\n\n let top = height * PADDING;\n\n let bottom = height * PADDING;\n\n\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(left, height - bottom),\n\n pt(left, top * 1.3),\n\n pt(right, height - bottom * 1.3),\n\n pt(right, top),\n\n ],\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/system/letter_mesh.rs", "rank": 5, "score": 175657.74291899533 }, { "content": "fn letter_mesh_c(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let left = width * PADDING;\n\n let bar_top = height * 0.35;\n\n let bar_bottom = height * 0.65;\n\n let step_w = width - left * 2.0;\n\n let step_h = height - (height * PADDING) - bar_bottom;\n\n\n\n builder.line(&[pt(left, bar_top), pt(left, bar_bottom)], THICKNESS, WHITE)?;\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(left + step_w * 0.0, bar_bottom + step_h * 0.0),\n\n pt(left + step_w * 0.2, bar_bottom + step_h * 0.6),\n\n pt(left + step_w * 0.5, bar_bottom + step_h * 1.0),\n\n pt(left + step_w * 0.8, bar_bottom + step_h * 0.6),\n\n pt(left + step_w * 1.0, bar_bottom + step_h * 0.0),\n\n ],\n\n WHITE,\n\n )?;\n\n builder.polyline(\n", "file_path": "src/system/letter_mesh.rs", "rank": 6, "score": 175657.74291899533 }, { "content": "fn letter_mesh_x(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let start_x = width * PADDING;\n\n let start_y = height * PADDING;\n\n let end_x = width - start_x;\n\n let end_y = height - start_y;\n\n builder.line(&[pt(start_x, start_y), pt(end_x, end_y)], THICKNESS, WHITE)?;\n\n builder.line(&[pt(end_x, start_y), pt(start_x, end_y)], THICKNESS, WHITE)?;\n\n\n\n Ok(())\n\n}\n\n\n\n//all other methods are (MeshBuilder, f32, f32) -> GameResult so this should be as well for compatibility\n", "file_path": "src/system/letter_mesh.rs", "rank": 7, "score": 175657.74291899533 }, { "content": "fn letter_mesh_k(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let start_x = width * PADDING;\n\n let start_y = height * PADDING;\n\n let end_x = width - start_x;\n\n let end_y = height - start_y;\n\n let upper_leg_y = (end_y - start_y) * 0.6 + start_y;\n\n let lower_leg_y = (end_y - start_y) * 0.5 + start_y;\n\n builder.line(\n\n &[pt(start_x, start_y), pt(start_x, end_y)],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n builder.line(\n\n &[pt(start_x, upper_leg_y), pt(end_x, start_y)],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n builder.line(\n\n &[pt(start_x + (width * 0.1), lower_leg_y), pt(end_x, end_y)],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 8, "score": 175657.74291899533 }, { "content": "fn letter_mesh_q(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let radius = (height * 0.5) - (height * PADDING);\n\n\n\n builder.circle(\n\n DrawMode::stroke(THICKNESS),\n\n pt(width * 0.5, height * 0.5),\n\n radius,\n\n TOLERANCE,\n\n WHITE,\n\n )?;\n\n\n\n builder.line(\n\n &[\n\n pt(width * 0.5, height * 0.6),\n\n pt(width - width * PADDING, height - height * PADDING),\n\n ],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 9, "score": 175657.74291899533 }, { "content": "fn letter_mesh_t(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let top = height * PADDING;\n\n let left = width * PADDING;\n\n let right = width - width * PADDING;\n\n let mid_x = width * 0.5;\n\n let bottom = height - height * PADDING;\n\n\n\n builder.line(&[pt(left, top), pt(right, top)], THICKNESS, WHITE)?;\n\n builder.line(&[pt(mid_x, top), pt(mid_x, bottom)], THICKNESS, WHITE)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 10, "score": 175657.74291899533 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn letter_mesh_o(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let radius = (height * 0.5) - (height * PADDING);\n\n\n\n builder.circle(\n\n DrawMode::stroke(THICKNESS),\n\n pt(width * 0.5, height * 0.5),\n\n radius,\n\n TOLERANCE,\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 11, "score": 175657.74291899533 }, { "content": "fn letter_mesh_a(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let start_x = width * PADDING;\n\n let start_y = height - height * PADDING;\n\n let top_x = width * 0.5;\n\n let top_y = height * PADDING;\n\n let end_x = width - width * PADDING;\n\n let end_y = height - height * PADDING;\n\n let bar_start_x = width * 0.3;\n\n let bar_start_y = height * 0.6;\n\n let bar_end_x = width * 0.7;\n\n let bar_end_y = height * 0.6;\n\n builder.line(&[pt(start_x, start_y), pt(top_x, top_y)], THICKNESS, WHITE)?;\n\n builder.line(&[pt(end_x, end_y), pt(top_x, top_y)], THICKNESS, WHITE)?;\n\n builder.line(\n\n &[pt(bar_start_x, bar_start_y), pt(bar_end_x, bar_end_y)],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 12, "score": 175657.74291899533 }, { "content": "fn letter_mesh_r(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let start_x = width * PADDING;\n\n let start_y = height * PADDING;\n\n let bottom_x = width * PADDING;\n\n let bottom_y = height * 0.5;\n\n builder.line(\n\n &[\n\n pt(start_x, start_y),\n\n pt(bottom_x, height - height * PADDING),\n\n ],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(start_x, start_y),\n\n pt(start_x + width * 0.3, start_y),\n\n pt(start_x + width * 0.45, start_y + (bottom_y - start_y) * 0.2),\n\n pt(start_x + width * 0.5, start_y + (bottom_y - start_y) * 0.4),\n", "file_path": "src/system/letter_mesh.rs", "rank": 13, "score": 175657.74291899533 }, { "content": "fn letter_mesh_j(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let top = height * PADDING;\n\n let left = width * PADDING;\n\n let right = width - width * PADDING;\n\n let mid_x = width * 0.5;\n\n let bar_bottom = height * 0.6;\n\n let step_w = mid_x - left;\n\n let step_h = height * 0.08;\n\n\n\n builder.line(&[pt(left, top), pt(right, top)], THICKNESS, WHITE)?;\n\n builder.line(&[pt(mid_x, top), pt(mid_x, bar_bottom)], THICKNESS, WHITE)?;\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(left, bar_bottom),\n\n pt(left + step_w * 0.1, bar_bottom + step_h),\n\n pt(left + step_w * 0.32, bar_bottom + step_h * 2.0),\n\n pt(left + step_w * 0.68, bar_bottom + step_h * 2.0),\n\n pt(left + step_w * 0.9, bar_bottom + step_h),\n\n pt(left + step_w * 1.0, bar_bottom),\n\n ],\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 14, "score": 175657.74291899533 }, { "content": "fn letter_mesh_m(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let left = width * PADDING;\n\n let right = width - width * PADDING;\n\n let top = height * PADDING;\n\n let bottom = height - height * PADDING;\n\n let mid_x = width * 0.5;\n\n let mid_y = height * 0.5;\n\n\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(left, bottom),\n\n pt(left, top),\n\n pt(mid_x, mid_y),\n\n pt(right, top),\n\n pt(right, bottom),\n\n ],\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 15, "score": 175657.74291899533 }, { "content": "fn letter_mesh_w(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let left = width * PADDING;\n\n let left_inner = width * PADDING * 1.5;\n\n let right = width - width * PADDING;\n\n let right_inner = width - (width * PADDING * 1.5);\n\n let top = height * PADDING;\n\n let bottom = height - (height * PADDING * 1.5);\n\n let mid_x = width * 0.5;\n\n let mid_y = height * 0.4;\n\n\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(left, top),\n\n pt(left_inner, bottom),\n\n pt(mid_x, mid_y),\n\n pt(right_inner, bottom),\n\n pt(right, top),\n\n ],\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 16, "score": 175657.74291899533 }, { "content": "fn letter_mesh_d(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let start_x = width * PADDING;\n\n let start_y = height * PADDING;\n\n let bottom_x = width * PADDING;\n\n let bottom_y = height - height * PADDING;\n\n builder.line(\n\n &[pt(start_x, start_y), pt(bottom_x, bottom_y)],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(start_x, start_y),\n\n pt(start_x + width * 0.3, start_y),\n\n pt(start_x + width * 0.5, start_y + height * 0.1),\n\n pt(start_x + width * 0.55, start_y + height * 0.2),\n\n pt(bottom_x + width * 0.55, bottom_y - height * 0.2),\n\n pt(bottom_x + width * 0.5, bottom_y - height * 0.1),\n\n pt(bottom_x + width * 0.3, bottom_y),\n\n pt(bottom_x, bottom_y),\n\n ],\n\n WHITE,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 17, "score": 175657.74291899533 }, { "content": "fn letter_mesh_p(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let start_x = width * PADDING;\n\n let start_y = height * PADDING;\n\n let bottom_x = width * PADDING;\n\n let bottom_y = height * 0.5;\n\n builder.line(\n\n &[\n\n pt(start_x, start_y),\n\n pt(bottom_x, height - height * PADDING),\n\n ],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(start_x, start_y),\n\n pt(start_x + width * 0.3, start_y),\n\n pt(start_x + width * 0.45, start_y + (bottom_y - start_y) * 0.2),\n\n pt(start_x + width * 0.5, start_y + (bottom_y - start_y) * 0.4),\n", "file_path": "src/system/letter_mesh.rs", "rank": 18, "score": 175657.74291899533 }, { "content": "fn letter_mesh_b(builder: &mut MeshBuilder, width: f32, height: f32) -> GameResult<()> {\n\n let start_x = width * PADDING;\n\n let start_y = height * PADDING;\n\n let bottom_x = width * PADDING;\n\n let mid_y = height * 0.5;\n\n let bottom_y = height - height * PADDING;\n\n builder.line(\n\n &[pt(start_x, start_y), pt(bottom_x, bottom_y)],\n\n THICKNESS,\n\n WHITE,\n\n )?;\n\n builder.polyline(\n\n DrawMode::stroke(THICKNESS),\n\n &[\n\n pt(start_x, start_y),\n\n pt(start_x + width * 0.3, start_y),\n\n pt(start_x + width * 0.45, start_y + (mid_y - start_y) * 0.2),\n\n pt(start_x + width * 0.5, start_y + (mid_y - start_y) * 0.4),\n\n pt(bottom_x + width * 0.5, start_y + (mid_y - start_y) * 0.6),\n\n pt(bottom_x + width * 0.45, start_y + (mid_y - start_y) * 0.8),\n", "file_path": "src/system/letter_mesh.rs", "rank": 19, "score": 175657.74291899533 }, { "content": "pub fn pt(x: f32, y: f32) -> Point {\n\n Point2{x, y}\n\n}\n", "file_path": "src/system/math.rs", "rank": 20, "score": 162630.27708680392 }, { "content": "pub fn pt_usize(x: usize, y: usize) -> Point {\n\n Point2{x:x as f32,y: y as f32}\n\n}\n\n\n", "file_path": "src/system/math.rs", "rank": 22, "score": 92770.99553999398 }, { "content": "fn draw_piece(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n idx: usize,\n\n cell_size: f32,\n\n color: Color,\n\n mesh: Rc<Mesh>,\n\n board_start: Point,\n\n) {\n\n let xy = Point::from(BoardCoord::from(idx))\n\n .multiply(cell_size, cell_size)\n\n .offset_point(board_start);\n\n mesh_helper.draw_coloured_mesh(ctx, mesh.as_ref(), xy, color);\n\n}\n", "file_path": "src/draughts/renderer.rs", "rank": 24, "score": 90829.68675875913 }, { "content": "fn draw_stones(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n start: Point,\n\n count: usize,\n\n max: usize,\n\n) -> GameResult<()> {\n\n let stone_size = mesh_helper.calc_width(0.02);\n\n let padding = mesh_helper.calc_width(0.004);\n\n let stone = mesh_helper.make_circle(ctx, stone_size, stone_size * 0.5, DrawMode::fill())?;\n\n let start = start.offset(padding, padding);\n\n\n\n if count > max {\n\n let y = (max as f32 / 7.) * stone_size;\n\n let stone_x = 1. * stone_size;\n\n let text_x = 2.2 * stone_size;\n\n mesh_helper.draw_mesh(ctx, stone.as_ref(), start.offset(stone_x, y));\n\n mesh_helper.draw_white_text(\n\n ctx,\n\n &format!(\"x {}\", count),\n", "file_path": "src/mancala/render.rs", "rank": 25, "score": 90829.68675875913 }, { "content": "fn draw_move(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n cell_size: f32,\n\n board_start: Point,\n\n mov: &Move,\n\n highlight: bool,\n\n) -> GameResult<()> {\n\n let move_mesh =\n\n mesh_helper.make_circle(ctx, cell_size, cell_size * 0.1, DrawMode::stroke(1.))?;\n\n let capture_mesh = make_letter_mesh(ctx, mesh_helper, cell_size * 0.3, 'x')?;\n\n\n\n mesh_helper.draw_coloured_mesh(\n\n ctx,\n\n move_mesh.as_ref(),\n\n Point::from(BoardCoord::from(mov.dest))\n\n .multiply(cell_size, cell_size)\n\n .offset_point(board_start),\n\n if highlight { LIGHT_BLUE } else { WHITE },\n\n );\n", "file_path": "src/tablut/renderer.rs", "rank": 26, "score": 90829.68675875913 }, { "content": "fn draw_piece(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n idx: usize,\n\n cell_size: f32,\n\n color: Color,\n\n mesh: Rc<Mesh>,\n\n board_start: Point,\n\n) {\n\n let xy = Point::from(BoardCoord::from(idx))\n\n .multiply(cell_size, cell_size)\n\n .offset_point(board_start);\n\n mesh_helper.draw_coloured_mesh(ctx, mesh.as_ref(), xy, color);\n\n}\n", "file_path": "src/chess/renderer.rs", "rank": 27, "score": 90829.68675875913 }, { "content": "fn draw_submenu_text(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n\n start: Point,\n\n color: Color,\n\n) {\n\n ITEMS[state.cursor.value]\n\n .1\n\n .as_ref()\n\n .unwrap()\n\n .iter()\n\n .enumerate()\n\n .for_each(|(idx, item)| {\n\n mesh_helper.draw_text(ctx, item.name, start.offsety(idx * 22), color, 22., false);\n\n });\n\n}\n", "file_path": "src/menu/renderer.rs", "rank": 28, "score": 88128.57295839107 }, { "content": "fn setup_ggez() -> ContextBuilder {\n\n let mut cb = ContextBuilder::new(\"games\", \"Ray Britton\")\n\n .window_mode(WindowMode {\n\n width: SCREEN_WIDTH,\n\n height: SCREEN_HEIGHT,\n\n resizable: false,\n\n ..WindowMode::default()\n\n })\n\n .window_setup(WindowSetup {\n\n title: String::from(\"Games\"),\n\n ..WindowSetup::default()\n\n });\n\n\n\n if let Ok(manifest_dir) = env::var(\"CARGO_MANIFEST_DIR\") {\n\n let mut path = PathBuf::from(manifest_dir);\n\n path.push(\"resources\");\n\n // println!(\"Adding path {:?} from manifest\", path);\n\n cb = cb.add_resource_path(path);\n\n } else {\n\n cb = cb.add_resource_path(PathBuf::from(\"./resources\"));\n\n }\n\n\n\n cb\n\n}\n", "file_path": "src/main.rs", "rank": 29, "score": 86791.88346800965 }, { "content": "fn is_capturable(from: Square, target: Square) -> bool {\n\n if let Some(origin_player) = from.get_player() {\n\n target.get_player() == None || target.get_player() == Some(origin_player.opposite())\n\n } else {\n\n panic!(\"Checked capturable from empty square\");\n\n }\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 30, "score": 86517.36015797709 }, { "content": "fn offset(coord: &BoardCoord, x: isize, y: isize) -> (isize, isize) {\n\n let mut xy: (isize, isize) = (*coord).into();\n\n xy.0 += x;\n\n xy.1 += y;\n\n xy\n\n}\n", "file_path": "src/system/neighbours.rs", "rank": 31, "score": 85206.88235190848 }, { "content": "fn check_computer_win(board: &Board) -> bool {\n\n check_board(board, COMPUTER_PIECE)\n\n}\n\n\n", "file_path": "src/tictactoe/controller.rs", "rank": 32, "score": 76536.86807739484 }, { "content": "fn get_random_empty_square(board: &Board) -> usize {\n\n let empties: Vec<usize> = board\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(idx, square)| if square == &Empty { Some(idx) } else { None })\n\n .collect();\n\n empties[thread_rng().gen_range(0.. empties.len())]\n\n}\n\n\n\npub(super) fn process(state: &mut State) {\n\n match state.player_mode {\n\n Mode::Order => {\n\n let mut neighbours = get_neighbours(state.cursor.idx, true, true);\n\n neighbours = neighbours\n\n .iter()\n\n .filter(|&idx| state.board[*idx] == Empty)\n\n .cloned()\n\n .collect();\n\n let (idx, square) = if neighbours.is_empty() {\n\n (\n", "file_path": "src/orderchaos/ai.rs", "rank": 33, "score": 74347.0953466061 }, { "content": "fn check_board(board: &Board, piece: Square) -> bool {\n\n check_all_horz(board, 0, piece)\n\n || check_all_horz(board, 3, piece)\n\n || check_all_horz(board, 6, piece)\n\n || check_all_vert(board, 0, piece)\n\n || check_all_vert(board, 1, piece)\n\n || check_all_vert(board, 2, piece)\n\n || check_all(board, 0, 4, 8, piece)\n\n || check_all(board, 6, 4, 2, piece)\n\n}\n\n\n", "file_path": "src/tictactoe/controller.rs", "rank": 34, "score": 71870.29658050634 }, { "content": "fn get_count(board: &Board, piece: Square) -> usize {\n\n board.iter().filter(|&square| square == &piece).count()\n\n}\n", "file_path": "src/draughts/rules/common.rs", "rank": 35, "score": 70008.60637507396 }, { "content": "fn moves_for_square(board: &Board, origin: usize) -> Vec<Move> {\n\n let mut moves = vec![];\n\n debug_log_start!(\"Finding moves for {}\", origin);\n\n let allies = &ALLY[&board[origin]];\n\n get_neighbours(origin, true, false)\n\n .iter()\n\n .for_each(|neighbour| {\n\n let mut current = *neighbour;\n\n let mut next = next_step(origin, current);\n\n loop {\n\n let square = board[current];\n\n if square == Square::Empty {\n\n let captures: Vec<usize> = get_neighbours(current, true, false)\n\n .iter()\n\n .filter_map(|neighbour| {\n\n let next = next_step(current, *neighbour);\n\n if let Some(next) = next {\n\n if (allies.contains(&board[next])\n\n || next == CASTLE\n\n || CORNERS.contains(&next))\n", "file_path": "src/tablut/rules.rs", "rank": 36, "score": 68988.67233881366 }, { "content": "fn check_all_vert(board: &Board, start: usize, piece: Square) -> bool {\n\n check_all(board, start, start + 3, start + 6, piece)\n\n}\n\n\n", "file_path": "src/tictactoe/controller.rs", "rank": 37, "score": 66343.26905273489 }, { "content": "fn check_all_horz(board: &Board, start: usize, piece: Square) -> bool {\n\n check_all(board, start, start + 1, start + 2, piece)\n\n}\n", "file_path": "src/tictactoe/controller.rs", "rank": 38, "score": 66343.26905273489 }, { "content": "struct State {\n\n play_state: PlayState,\n\n msg: Option<String>,\n\n cursor: Cursor,\n\n board: Board,\n\n move_cursor: usize,\n\n roll: Option<usize>,\n\n next_move_time: f64,\n\n last_human_cursor_pos: usize,\n\n valid_moves: HashMap<usize, Vec<Move>>,\n\n}\n\n\n\nimpl State {\n\n //Safe, will return empty vec for empty/other players squares\n\n fn get_moves_for_selected_piece(&self) -> Vec<Move> {\n\n self.valid_moves\n\n .get(&self.cursor.idx)\n\n .unwrap_or(&vec![])\n\n .clone()\n\n }\n", "file_path": "src/senet/mod.rs", "rank": 39, "score": 65299.32587776398 }, { "content": "struct State {\n\n cursor: WrappedUsize,\n\n subcursor: Option<WrappedUsize>,\n\n}\n", "file_path": "src/menu/mod.rs", "rank": 40, "score": 65299.32587776398 }, { "content": "struct State {\n\n play_state: PlayState,\n\n cursor: WrappedUsize,\n\n computer_cursor: usize,\n\n board: Board,\n\n drop_move: Option<DropMove>,\n\n next_move_time: f64,\n\n animation_time: f64,\n\n message: Option<(String, bool)>,\n\n}\n", "file_path": "src/mancala/mod.rs", "rank": 41, "score": 65299.32587776398 }, { "content": "#[derive(Debug)]\n\nstruct State {\n\n board: Board,\n\n board_calc: ChessBoard,\n\n play_state: PlayState,\n\n piece_cursor: Cursor,\n\n all_possible_moves: HashMap<usize, Vec<Move>>,\n\n move_cursor: usize,\n\n move_history: Vec<PastMove>,\n\n next_move_time: f64,\n\n last_human_cursor_pos: usize,\n\n}\n\n\n\nimpl State {\n\n //Safe, will return empty vec for empty/other players squares\n\n fn get_moves_for_selected_piece(&self) -> Vec<Move> {\n\n self.all_possible_moves\n\n .get(&self.piece_cursor.idx)\n\n .unwrap_or(&vec![])\n\n .clone()\n\n }\n", "file_path": "src/draughts/mod.rs", "rank": 42, "score": 65299.32587776398 }, { "content": "#[derive(Debug)]\n\nstruct State {\n\n play_state: PlayState,\n\n piece_cursor: Cursor,\n\n move_cursor: usize,\n\n board: Board,\n\n move_history: Vec<PastMove>,\n\n game_type: GameType,\n\n next_move_time: f64,\n\n last_human_cursor_pos: usize,\n\n all_possible_moves: HashMap<usize, Vec<Move>>,\n\n captured: HashMap<Player, Vec<ChessPiece>>,\n\n moves_left_this_turn: usize,\n\n}\n\n\n", "file_path": "src/chess/mod.rs", "rank": 43, "score": 65299.32587776398 }, { "content": "struct State {\n\n board: Board,\n\n play_state: PlayState,\n\n cursor: Cursor,\n\n last_human_cursor_pos: usize,\n\n last_human_placed: Square,\n\n move_cursor: Mode,\n\n player_mode: Mode,\n\n next_move_time: f64,\n\n}\n", "file_path": "src/orderchaos/mod.rs", "rank": 44, "score": 65299.32587776398 }, { "content": "struct State {\n\n board: Board,\n\n cursor: Cursor,\n\n play_state: PlayState,\n\n last_human_cursor_pos: usize,\n\n valid_moves: Vec<Move>,\n\n player_mode: Mode,\n\n next_move_time: f64,\n\n move_cursor: usize,\n\n}\n\n\n\nimpl State {\n\n fn get_moves_for_selected_piece(&self) -> Vec<Move> {\n\n self.valid_moves\n\n .iter()\n\n .filter(|mov| mov.origin == self.cursor.idx)\n\n .cloned()\n\n .collect()\n\n }\n\n\n", "file_path": "src/tablut/mod.rs", "rank": 45, "score": 65299.32587776398 }, { "content": "#[derive(Debug)]\n\nstruct State {\n\n board: Board,\n\n cursor: Cursor,\n\n next_move_time: f64,\n\n play_state: PlayState,\n\n}\n\n\n\nimpl Display for State {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"State: {:?}\\nSelected: {}\\n{}\",\n\n self.play_state,\n\n self.cursor.idx,\n\n board_to_string(&self.board)\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/tictactoe/mod.rs", "rank": 46, "score": 65299.32587776398 }, { "content": "struct State {\n\n cursor: WrappedUsize,\n\n}\n\n\n\npub struct TestMenu {\n\n state: State,\n\n change_scene: Option<&'static str>,\n\n}\n\n\n\nimpl TestMenu {\n\n pub fn new() -> Self {\n\n TestMenu {\n\n state: State {\n\n cursor: WrappedUsize::new_zero_based(ITEMS.len()),\n\n },\n\n change_scene: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/graphics_testing/mod.rs", "rank": 47, "score": 63793.90651587557 }, { "content": "fn main() {\n\n debug_log!(\"Games starting...\");\n\n let matches = args_matches();\n\n\n\n if let Some(true) = matches.get_one(ARG_RULES) {\n\n debug_log!(\"Rules only\");\n\n let game: &String = matches.get_one(ARG_GAME).unwrap();\n\n print_rules(&game);\n\n } else {\n\n let (mut ctx, event_loop) = setup_ggez()\n\n .build()\n\n .expect(\"Could not create ggez context!\");\n\n\n\n let mut system = GameSystem::new(&mut ctx);\n\n\n\n debug_log!(\"Games started\");\n\n\n\n if let Some(true) = matches.get_one(ARG_TEST) {\n\n system.start_game(TEST_MENU);\n\n } else {\n", "file_path": "src/main.rs", "rank": 48, "score": 61102.3409009679 }, { "content": "fn does_move_result_in_self_check(game_type: &GameType, board: &Board, mov: &Move) -> bool {\n\n let player = board[mov.from]\n\n .get_player()\n\n .expect(\"No player on square for move\");\n\n let board = clone_board_with_move(game_type, board, mov);\n\n game_type.is_king_in_check(&board, player)\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 49, "score": 60431.59104875032 }, { "content": "fn check_all(board: &Board, s1: usize, s2: usize, s3: usize, piece: Square) -> bool {\n\n board[s1] == board[s2] && board[s2] == board[s3] && board[s3] == piece\n\n}\n\n\n", "file_path": "src/tictactoe/controller.rs", "rank": 50, "score": 58951.21933403788 }, { "content": "pub trait Offset<T> {\n\n fn offsetx(&self, amount: T) -> Self;\n\n fn offsety(&self, amount: T) -> Self;\n\n fn offset(&self, amount_x: T, amount_y: T) -> Self;\n\n fn multiply(&self, amount_x: T, amount_y: T) -> Self;\n\n}\n\n\n\nimpl Offset<i32> for Point {\n\n fn offsetx(&self, amount: i32) -> Self {\n\n pt(self.x + amount as f32, self.y)\n\n }\n\n\n\n fn offsety(&self, amount: i32) -> Self {\n\n pt(self.x, self.y + amount as f32)\n\n }\n\n\n\n fn offset(&self, amount_x: i32, amount_y: i32) -> Self {\n\n pt(self.x + amount_x as f32, self.y + amount_y as f32)\n\n }\n\n\n", "file_path": "src/system/math.rs", "rank": 51, "score": 58228.57238131788 }, { "content": "pub fn padding() -> String {\n\n format!(\"{0:<1$}\", \" \", get_padding())\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! debug_log {\n\n ($msg:expr) => {\n\n if $crate::LOGGING_ENABLED {\n\n println!(\"{}{}\", $crate::macros::padding(), $msg);\n\n }\n\n };\n\n ($($x:tt)*)=> {\n\n if $crate::LOGGING_ENABLED {\n\n println!(\"{}{}\", $crate::macros::padding(), format!($($x)*));\n\n }\n\n };\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! debug_log_start {\n", "file_path": "src/macros.rs", "rank": 52, "score": 54142.178760236624 }, { "content": "pub fn get_padding() -> usize {\n\n PADDING.load(Ordering::SeqCst)\n\n}\n\n\n", "file_path": "src/macros.rs", "rank": 53, "score": 52791.87104804833 }, { "content": "pub fn make_letter_mesh(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n size: f32,\n\n letter: char,\n\n) -> GameResult<Rc<Mesh>> {\n\n mesh_helper.make_mesh(\n\n ctx,\n\n format!(\"letter_{}_{}\", letter, size),\n\n &(|builder| match letter {\n\n 'a' => letter_mesh_a(builder, size, size),\n\n 'b' => letter_mesh_b(builder, size, size),\n\n 'c' => letter_mesh_c(builder, size, size),\n\n 'd' => letter_mesh_d(builder, size, size),\n\n 'j' => letter_mesh_j(builder, size, size),\n\n 'k' => letter_mesh_k(builder, size, size),\n\n 'm' => letter_mesh_m(builder, size, size),\n\n 'n' => letter_mesh_n(builder, size, size),\n\n 'o' => letter_mesh_o(builder, size, size),\n\n 'p' => letter_mesh_p(builder, size, size),\n\n 'q' => letter_mesh_q(builder, size, size),\n\n 'r' => letter_mesh_r(builder, size, size),\n\n 't' => letter_mesh_t(builder, size, size),\n\n 'w' => letter_mesh_w(builder, size, size),\n\n 'x' => letter_mesh_x(builder, size, size),\n\n _ => panic!(\"Not implemented for {}\", letter),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "src/system/letter_mesh.rs", "rank": 54, "score": 52177.38860041429 }, { "content": "#[inline]\n\npub fn board_cols() -> usize {\n\n BOARD_COLS.load(Ordering::SeqCst)\n\n}\n\n\n", "file_path": "src/boards/mod.rs", "rank": 55, "score": 51548.35717040414 }, { "content": "#[inline]\n\npub fn board_rows() -> usize {\n\n BOARD_ROWS.load(Ordering::SeqCst)\n\n}\n\n\n", "file_path": "src/boards/mod.rs", "rank": 56, "score": 51548.35717040414 }, { "content": "pub fn set_padding(value: usize) {\n\n PADDING.store(value, Ordering::SeqCst)\n\n}\n\n\n", "file_path": "src/macros.rs", "rank": 57, "score": 50024.30278760088 }, { "content": "pub fn sub_padding(amount: usize) {\n\n let padding = get_padding();\n\n if padding >= amount {\n\n set_padding(padding - amount);\n\n }\n\n}\n\n\n", "file_path": "src/macros.rs", "rank": 58, "score": 50024.30278760088 }, { "content": "pub fn print_rules(name: &str) {\n\n let game = ITEMS\n\n .iter()\n\n .map(|(item, list)| {\n\n let mut full_list = vec![];\n\n if let Some(list) = list {\n\n list.iter().for_each(|item| full_list.push(item.clone()));\n\n }\n\n full_list.push(item.clone());\n\n full_list\n\n })\n\n .flatten()\n\n .find(|item| item.code == name)\n\n .expect(\"Invalid game\");\n\n println!(\"{}\\n{}\", game.name, game.desc);\n\n}\n\n\n", "file_path": "src/menu/mod.rs", "rank": 59, "score": 48875.395433662095 }, { "content": "fn board_to_string(board: &Board) -> String {\n\n format!(\n\n r\" {}|{}|{}\n\n -----\n\n {}|{}|{}\n\n -----\n\n {}|{}|{}\",\n\n board[0], board[1], board[2], board[3], board[4], board[5], board[6], board[7], board[8]\n\n )\n\n}\n\n\n\nimpl Display for Square {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Square::X => write!(f, \"X\"),\n\n Square::O => write!(f, \"O\"),\n\n Square::E => write!(f, \" \"),\n\n }\n\n }\n\n}\n", "file_path": "src/tictactoe/mod.rs", "rank": 60, "score": 48875.395433662095 }, { "content": "fn check_full(board: &Board) -> bool {\n\n board.iter().all(|s| s != &E)\n\n}\n\n\n", "file_path": "src/tictactoe/controller.rs", "rank": 61, "score": 48875.395433662095 }, { "content": "//Return the index of the nearest item in the list to 'from' in the direction of 'search'\n\npub fn find_nearest<T: Debug>(\n\n list: &[T],\n\n from: usize,\n\n search: Direction,\n\n transform: &dyn Fn(&T) -> BoardCoord,\n\n) -> Option<usize> {\n\n debug_log!(\n\n \"Finding nearest {:?} from {:?} out of {:?}\",\n\n search,\n\n from,\n\n list\n\n );\n\n let from = transform(&list[from]);\n\n let nearest = list\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, mov)| {\n\n let pt = transform(mov);\n\n match search {\n\n Direction::Up => pt.1 < from.1,\n", "file_path": "src/system/find_nearest.rs", "rank": 62, "score": 47810.693756491826 }, { "content": "fn check_player_win(board: &Board) -> bool {\n\n check_board(board, PLAYER_PIECE)\n\n}\n\n\n", "file_path": "src/tictactoe/controller.rs", "rank": 63, "score": 47810.693756491826 }, { "content": "pub fn set_board_size(size: (usize, usize)) {\n\n BOARD_ROWS.store(size.0, Ordering::SeqCst);\n\n BOARD_COLS.store(size.1, Ordering::SeqCst);\n\n}\n\n\n", "file_path": "src/boards/mod.rs", "rank": 64, "score": 45500.340687155796 }, { "content": "fn default_piece_letter_conversion(piece: &ChessPiece) -> char {\n\n match piece {\n\n ChessPiece::Pawn => 'p',\n\n ChessPiece::Rook => 'r',\n\n ChessPiece::Knight => 'n',\n\n ChessPiece::Bishop => 'b',\n\n ChessPiece::Queen => 'q',\n\n ChessPiece::King => 'k',\n\n ChessPiece::KnightBishop => 'a',\n\n ChessPiece::KnightRook => 'c',\n\n }\n\n}\n\n\n\nmod init {\n\n use crate::chess::rules::ChessPiece::Bishop as B;\n\n use crate::chess::rules::ChessPiece::King as K;\n\n use crate::chess::rules::ChessPiece::Knight as N;\n\n use crate::chess::rules::ChessPiece::KnightBishop as KB;\n\n use crate::chess::rules::ChessPiece::KnightRook as KR;\n\n use crate::chess::rules::ChessPiece::Pawn as P;\n", "file_path": "src/chess/game_types.rs", "rank": 65, "score": 45038.41813028739 }, { "content": "fn default_piece_value_conversion(piece: &ChessPiece) -> usize {\n\n match piece {\n\n ChessPiece::Pawn => 1,\n\n ChessPiece::Knight => 10,\n\n ChessPiece::Bishop | ChessPiece::Rook => 20,\n\n ChessPiece::KnightRook | ChessPiece::KnightBishop => 50,\n\n ChessPiece::Queen => 80,\n\n ChessPiece::King => 100,\n\n }\n\n}\n\n\n", "file_path": "src/chess/game_types.rs", "rank": 66, "score": 45038.41813028739 }, { "content": "pub fn keycode_to_direction(key: KeyCode) -> Option<Direction> {\n\n match key {\n\n KeyCode::Up => Some(Direction::Up),\n\n KeyCode::Down => Some(Direction::Down),\n\n KeyCode::Left => Some(Direction::Left),\n\n KeyCode::Right => Some(Direction::Right),\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/system/ggez_ext.rs", "rank": 67, "score": 42561.619364187645 }, { "content": "fn calc_kirin(board: &Board, origin: usize) -> Vec<Move> {\n\n vec![\n\n can_jump(board, origin, 0, 2),\n\n can_jump(board, origin, 1, 1),\n\n can_jump(board, origin, 2, 0),\n\n can_jump(board, origin, 1, -1),\n\n can_jump(board, origin, 0, -2),\n\n can_jump(board, origin, -1, -1),\n\n can_jump(board, origin, -2, 0),\n\n can_jump(board, origin, -1, 1),\n\n ]\n\n .into_iter()\n\n .filter_map(|item| item.map(|to| Move::new(origin, to)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 68, "score": 42402.67367605539 }, { "content": "fn calc_rose(board: &Board, origin: usize) -> Vec<Move> {\n\n todo!(\"Not implemented as it requires another move type\")\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 69, "score": 42402.67367605539 }, { "content": "fn calc_knight(board: &Board, origin: usize) -> Vec<Move> {\n\n check_jump_pattern(board, origin, 2, 1)\n\n .into_iter()\n\n .map(|to| Move::new(origin, to))\n\n .collect()\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 70, "score": 42402.67367605539 }, { "content": "fn calc_rook(board: &Board, origin: usize) -> Vec<Move> {\n\n let mut result = vec![];\n\n\n\n result.append(&mut check_line(board, origin, -1, 0));\n\n result.append(&mut check_line(board, origin, 1, 0));\n\n result.append(&mut check_line(board, origin, 0, -1));\n\n result.append(&mut check_line(board, origin, 0, 1));\n\n\n\n result\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 71, "score": 42402.67367605539 }, { "content": "fn calc_bishop(board: &Board, origin: usize) -> Vec<Move> {\n\n let mut result = vec![];\n\n\n\n result.append(&mut check_line(board, origin, -1, -1));\n\n result.append(&mut check_line(board, origin, 1, 1));\n\n result.append(&mut check_line(board, origin, 1, -1));\n\n result.append(&mut check_line(board, origin, -1, 1));\n\n\n\n result\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 72, "score": 42402.67367605539 }, { "content": "fn calc_superknight(board: &Board, origin: usize) -> Vec<Move> {\n\n check_jump_pattern(board, origin, 2, 1)\n\n .into_iter()\n\n .chain(check_jump_pattern(board, origin, 3, 1).into_iter())\n\n .chain(check_jump_pattern(board, origin, 3, 2).into_iter())\n\n .map(|to| Move::new(origin, to))\n\n .collect()\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 73, "score": 42402.67367605539 }, { "content": "fn get_char(str: &str, idx: usize) -> u8 {\n\n str.chars().nth(idx).expect(\"No char\").to_ascii_lowercase() as u8\n\n}\n\n\n\n#[allow(non_snake_case)]\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n lazy_static! {\n\n static ref CONVERTER_8: SingleCharBoardConverter = SingleCharBoardConverter::new(8, 8);\n\n static ref CONVERTER_4: SingleCharBoardConverter = SingleCharBoardConverter::new(4, 4);\n\n static ref CONVERTER_35: SingleCharBoardConverter = SingleCharBoardConverter::new(3, 5);\n\n }\n\n\n\n //\n\n //CONVERTER_8\n\n //\n\n\n\n #[test]\n", "file_path": "src/boards/single_char_board_converter.rs", "rank": 74, "score": 41755.66713308465 }, { "content": "fn to_coord(idx: usize, cols: u8) -> (u8, u8) {\n\n let (x, y) = idx_to_coord(idx, cols as usize);\n\n (x as u8, y as u8)\n\n}\n\n\n", "file_path": "src/boards/single_char_board_converter.rs", "rank": 75, "score": 40735.733096824355 }, { "content": "pub fn idx_to_coord(idx: usize, cols: usize) -> (usize, usize) {\n\n let x = idx % cols;\n\n let y = idx / cols;\n\n (x, y)\n\n}\n\n\n", "file_path": "src/system/math.rs", "rank": 76, "score": 40635.040210845895 }, { "content": "//Adds the difference between origin and mid to mid and returns it\n\n//If origin = idx(3,4) and mid = idx(3,3) then returns idx(3,2)\n\n//If origin = idx(1,3) and mid = idx(3,4) then returns idx(5,5)\n\npub fn next_step(origin: usize, mid: usize) -> Option<usize> {\n\n let origin: (isize, isize) = BoardCoord::from(origin).into();\n\n let mid: (isize, isize) = BoardCoord::from(mid).into();\n\n let diff = (mid.0 - origin.0, mid.1 - origin.1);\n\n let dest = (origin.0 + diff.0 * 2, origin.1 + diff.1 * 2);\n\n if dest.is_in_board() {\n\n Some(BoardCoord::from(dest).idx())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/system/math.rs", "rank": 77, "score": 40635.040210845895 }, { "content": "fn clone_board_with_move(game_type: &GameType, board: &Board, mov: &Move) -> Board {\n\n let mut board = board.clone();\n\n game_type.process_move(&mut board, mov);\n\n board\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 78, "score": 38362.451956301265 }, { "content": "fn calc_elephant(game_type: &GameType, board: &Board, origin: usize) -> Vec<Move> {\n\n vec![\n\n can_jump(board, origin, 2, 2),\n\n can_jump(board, origin, -2, 2),\n\n can_jump(board, origin, 2, -2),\n\n can_jump(board, origin, -2, -2),\n\n ]\n\n .into_iter()\n\n .filter_map(|item| item.map(|to| Move::new(origin, to)))\n\n .chain(calc_king(game_type, board, origin).into_iter())\n\n .collect()\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 79, "score": 37551.21357991627 }, { "content": "fn calc_pawn(game_type: &GameType, board: &Board, origin: usize) -> Vec<Move> {\n\n //TODO En passant\n\n let mut results = vec![];\n\n let origin_pos: (isize, isize) = BoardCoord::from(origin).into();\n\n match board[origin].get_player().expect(\"No player for calc pawn\") {\n\n Player::Human => {\n\n let step_idx = BoardCoord::from((origin_pos.0, 5)).idx();\n\n let long_idx = BoardCoord::from((origin_pos.0, 4)).idx();\n\n if is_capturable(board[origin], board[step_idx]) {\n\n results.push(Move::new(origin, step_idx));\n\n if board[step_idx] == Square::Empty\n\n && origin_pos.1 == 6\n\n && is_capturable(board[origin], board[long_idx])\n\n {\n\n results.push(Move::new(origin, long_idx));\n\n }\n\n }\n\n }\n\n Player::Computer => {\n\n let step_idx = BoardCoord::from((origin_pos.0, 2)).idx();\n", "file_path": "src/chess/rules.rs", "rank": 80, "score": 37551.21357991627 }, { "content": "fn can_jump(board: &Board, current: usize, target: usize, player: Player) -> bool {\n\n if board[target] != player.into() {\n\n let mut consecutive_count = 0;\n\n for slot in board.iter().take(target).skip(current + 1) {\n\n //TODO maybe also reset count after changing row\n\n if slot == &Square::Empty || slot == &player.into() {\n\n consecutive_count = 0\n\n } else {\n\n consecutive_count += 1;\n\n if consecutive_count >= 2 {\n\n break;\n\n }\n\n }\n\n }\n\n if consecutive_count < 2 {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n", "file_path": "src/senet/rules.rs", "rank": 81, "score": 37531.68146318672 }, { "content": "pub fn get_neighbours(origin: usize, plus: bool, cross: bool) -> Vec<usize> {\n\n let coord = BoardCoord::from(origin);\n\n let mut neighbours = vec![];\n\n if cross {\n\n neighbours.push(offset(&coord, -1, -1));\n\n neighbours.push(offset(&coord, 1, -1));\n\n neighbours.push(offset(&coord, -1, 1));\n\n neighbours.push(offset(&coord, 1, 1));\n\n }\n\n if plus {\n\n neighbours.push(offset(&coord, 0, 1));\n\n neighbours.push(offset(&coord, 0, -1));\n\n neighbours.push(offset(&coord, 1, 0));\n\n neighbours.push(offset(&coord, -1, 0));\n\n }\n\n neighbours\n\n .iter()\n\n .filter_map(|pair| {\n\n if pair.is_in_board() {\n\n Some(BoardCoord::from(*pair).idx())\n\n } else {\n\n None\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/system/neighbours.rs", "rank": 82, "score": 37531.68146318672 }, { "content": "fn calc_king(game_type: &GameType, board: &Board, origin_idx: usize) -> Vec<Move> {\n\n debug_log_start!(\"Checking king moves for {}\", origin_idx);\n\n let mut results = vec![];\n\n let origin: (isize, isize) = BoardCoord::from(origin_idx).into();\n\n for x in origin.0 - 1..=origin.0 + 1 {\n\n for y in origin.1 - 1..=origin.1 + 1 {\n\n if (x, y) != origin && (x, y).is_in_board() {\n\n let idx = BoardCoord::from((x, y)).idx();\n\n if is_capturable(board[origin_idx], board[idx]) {\n\n let mov = Move::new(origin_idx, idx);\n\n debug_log_start!(\"Found {}\", mov);\n\n if !does_move_result_in_self_check(game_type, board, &mov) {\n\n results.push(mov);\n\n } else {\n\n debug_log!(\"But would self check\");\n\n }\n\n debug_log_end!();\n\n } else {\n\n debug_log!(\"{} blocked\", idx);\n\n }\n\n }\n\n }\n\n }\n\n debug_log_end!();\n\n results\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 83, "score": 36882.056390146834 }, { "content": "fn calc_value_of_move(board: &Board, origin: usize, current: usize, captures: &[usize]) -> usize {\n\n debug_log_start!(\"Calculating value\");\n\n let king_coord = board\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, square)| {\n\n if square == &Square::King {\n\n Some(BoardCoord::from(i))\n\n } else {\n\n None\n\n }\n\n })\n\n .next();\n\n let origin_coord = BoardCoord::from(origin);\n\n let dest_coord = BoardCoord::from(current);\n\n let castle_coord = BoardCoord::from(CASTLE);\n\n let mut value = 0;\n\n if CORNERS.contains(&current) {\n\n debug_log!(\"Corner in one move +10000\");\n\n value += 10000;\n", "file_path": "src/tablut/rules.rs", "rank": 84, "score": 36821.07514808847 }, { "content": "fn check_line(board: &Board, origin: usize, x_diff: isize, y_diff: isize) -> Vec<Move> {\n\n let mut result = vec![];\n\n let mut target: (isize, isize) = BoardCoord::from(origin).into();\n\n loop {\n\n target.0 += x_diff;\n\n target.1 += y_diff;\n\n if target.is_in_board() {\n\n let idx = BoardCoord::from(target).idx();\n\n if is_capturable(board[origin], board[idx]) {\n\n result.push(Move::new(origin, idx));\n\n if board[idx] != Square::Empty {\n\n break;\n\n }\n\n } else {\n\n break;\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/chess/rules.rs", "rank": 85, "score": 36160.454204300055 }, { "content": "fn can_jump(board: &Board, origin: usize, vert: isize, horz: isize) -> Option<usize> {\n\n let mut pos: (isize, isize) = BoardCoord::from(origin).into();\n\n pos.0 += horz;\n\n pos.1 += vert;\n\n\n\n if pos.is_in_board() {\n\n let idx = BoardCoord::from(pos).idx();\n\n if is_capturable(board[origin], board[idx]) {\n\n return Some(idx);\n\n }\n\n }\n\n None\n\n}\n", "file_path": "src/chess/rules.rs", "rank": 86, "score": 36160.454204300055 }, { "content": "use crate::boards::idx_coord::BoardCoord;\n\nuse crate::constants::colors::{DARK_GRAY, GRAY, WHITE};\n\nuse crate::system::letter_mesh::make_letter_mesh;\n\nuse crate::system::math::{pt, Point, Offset, OffsetTuple};\n\nuse crate::system::mesh_helper::MeshHelper;\n\nuse crate::system::PlayState::*;\n\nuse crate::system::Turn::{Computer, Human};\n\nuse crate::system::TurnState::SelectingPiece;\n\nuse crate::tictactoe::{Square, State};\n\nuse ggez::{Context, GameResult};\n\n\n\npub(super) fn render(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n\n) -> GameResult<()> {\n\n let board_size = mesh_helper.calc_height(0.8);\n\n let cell_size = board_size / 3.;\n\n let grid = mesh_helper.make_grid(ctx, board_size, board_size, 3, 3, 2., GRAY, None)?;\n\n\n", "file_path": "src/tictactoe/renderer.rs", "rank": 89, "score": 35925.531076754974 }, { "content": " render_game(ctx, mesh_helper, state)\n\n }\n\n}\n\n\n\npub(super) fn square_to_color(square: &Square) -> Color {\n\n match square {\n\n Square::Red => RED,\n\n Square::White => WHITE,\n\n Square::Empty => TRANSPARENT,\n\n }\n\n}\n\n\n\npub(super) fn render_game(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n\n) -> GameResult<()> {\n\n let cell_size = mesh_helper.calc_height(0.13);\n\n let board_start = pt(\n\n (mesh_helper.width * 0.5) - (cell_size * (board_cols() as f32 / 2.)),\n", "file_path": "src/orderchaos/renderer.rs", "rank": 90, "score": 35922.1933433494 }, { "content": "use crate::boards::idx_coord::BoardCoord;\n\nuse crate::boards::{board_cols, board_rows};\n\nuse crate::constants::colors::{\n\n DARK_GRAY, DARK_GREEN, FAINT_BLUE, FAINT_RED, LIGHT_BLUE, LIGHT_GRAY, RED, WHITE,\n\n};\n\nuse crate::system::letter_mesh::make_letter_mesh;\n\nuse crate::system::math::{pt, Point, Offset, OffsetTuple};\n\nuse crate::system::mesh_helper::MeshHelper;\n\nuse crate::system::PlayState::ModeSelection;\n\nuse crate::system::TurnState::{SelectingMove, SelectingPiece};\n\nuse crate::tablut::render_mode_selection::render_mode_selection;\n\nuse crate::tablut::{Move, Square, State};\n\nuse ggez::graphics::DrawMode;\n\nuse ggez::{Context, GameResult};\n\n\n\npub(super) fn render(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n\n) -> GameResult<()> {\n\n if state.play_state == ModeSelection {\n\n render_mode_selection(ctx, mesh_helper, state)\n\n } else {\n\n render_game(ctx, mesh_helper, state)\n\n }\n\n}\n\n\n", "file_path": "src/tablut/renderer.rs", "rank": 91, "score": 35921.22474838594 }, { "content": "use crate::boards::idx_coord::BoardCoord;\n\nuse crate::boards::{board_cols, board_rows};\n\nuse crate::constants::colors::{FILTER_BLACK, LIGHT_BLUE, LIGHT_GRAY, RED, TRANSPARENT, WHITE};\n\nuse crate::orderchaos::render_mode_selection::render_mode_selection;\n\nuse crate::orderchaos::{Mode, Square, State};\n\nuse crate::system::math::{Offset, pt};\n\nuse crate::system::mesh_helper::MeshHelper;\n\nuse crate::system::PlayState;\n\nuse crate::system::TurnState::SelectingMove;\n\nuse ggez::graphics::{Color, DrawMode};\n\nuse ggez::{Context, GameResult};\n\n\n\npub(super) fn render(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n\n) -> GameResult<()> {\n\n if state.play_state == PlayState::ModeSelection {\n\n render_mode_selection(ctx, mesh_helper, state)\n\n } else {\n", "file_path": "src/orderchaos/renderer.rs", "rank": 92, "score": 35919.372321336705 }, { "content": "};\n\npub const COMPUTER_MOVE_PIECE: Color = Color {\n\n r: 0.45,\n\n g: 0.5,\n\n b: 0.25,\n\n a: 1.,\n\n};\n\n\n\npub(super) fn render(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n\n) -> GameResult<()> {\n\n let board_size = mesh_helper.calc_height(0.9);\n\n let cell_size = board_size / (state.board_calc.rows as f32);\n\n let grid = mesh_helper.make_grid(\n\n ctx,\n\n board_size,\n\n board_size,\n\n state.board_calc.cols,\n", "file_path": "src/draughts/renderer.rs", "rank": 93, "score": 35917.31699421772 }, { "content": "use crate::constants::colors::{LIGHT_GRAY, WHITE};\n\nuse crate::constants::Direction;\n\nuse crate::ext::NewLines;\n\nuse crate::menu::menu_items::ITEMS;\n\nuse crate::menu::State;\n\nuse crate::system::math::{pt, Point, Offset};\n\nuse crate::system::mesh_helper::MeshHelper;\n\nuse ggez::graphics::Color;\n\nuse ggez::{Context, GameResult};\n\n\n\npub(super) fn render(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n\n) -> GameResult<()> {\n\n let title_start = pt(32., 32.);\n\n let menu_start = pt(34., 100.);\n\n let submenu_start = pt(240., 100.);\n\n let cursor_start = pt(16., 105.);\n\n let subcursor_start = pt(225., 105.);\n", "file_path": "src/menu/renderer.rs", "rank": 94, "score": 35916.84987467724 }, { "content": "pub(super) fn render(\n\n ctx: &mut Context,\n\n mesh_helper: &mut MeshHelper,\n\n state: &State,\n\n) -> GameResult<()> {\n\n let cell_size = (mesh_helper.height * 0.9) / board_cols().max(board_rows()) as f32;\n\n let board_width = cell_size * board_cols() as f32;\n\n let board_height = cell_size * board_rows() as f32;\n\n let grid = mesh_helper.make_grid(\n\n ctx,\n\n board_width,\n\n board_height,\n\n board_cols(),\n\n board_rows(),\n\n 0.,\n\n TRANSPARENT,\n\n Some([APRICOT, COPPER]),\n\n )?;\n\n let grid_box = mesh_helper.make_rect(ctx, board_width, board_height, DrawMode::stroke(3.))?;\n\n\n", "file_path": "src/chess/renderer.rs", "rank": 95, "score": 35915.78973441759 }, { "content": " ctx,\n\n &format!(\"{}\", board_index_to_pdn_num(x + y * board_cols())),\n\n board_start\n\n .offset(x as f32 * cell_size, y as f32 * cell_size)\n\n .offset(3, 3),\n\n CREAM,\n\n 14.,\n\n false,\n\n );\n\n }\n\n }\n\n\n\n state.board.iter().enumerate().for_each(|(i, item)| {\n\n let xy = Point::from(BoardCoord::from(i))\n\n .multiply(cell_size, cell_size)\n\n .offset_point(board_start);\n\n\n\n match item {\n\n Square::ComputerMan => {\n\n mesh_helper.draw_coloured_mesh(ctx, piece.as_ref(), xy, PIECE_COMPUTER)\n", "file_path": "src/draughts/renderer.rs", "rank": 96, "score": 35914.8566689503 }, { "content": "use crate::boards::idx_coord::BoardCoord;\n\nuse crate::boards::{board_cols, board_rows};\n\nuse crate::constants::colors::{\n\n alpha, APRICOT, BLACK, COPPER, CREAM, DARK_GREEN, LIGHT_BLUE, PIECE_COMPUTER, PIECE_HUMAN,\n\n TRANSPARENT, WHITE,\n\n};\n\n\n\nuse crate::chess::State;\n\nuse crate::system::letter_mesh::make_letter_mesh;\n\nuse crate::system::math::{pt, pt_usize, Point, Offset, OffsetTuple};\n\nuse crate::system::mesh_helper::MeshHelper;\n\nuse crate::system::Player;\n\nuse crate::system::TurnState::{SelectingMove, SelectingPiece};\n\nuse ggez::graphics::{Color, DrawMode, Mesh};\n\nuse ggez::{Context, GameResult};\n\nuse std::rc::Rc;\n\n\n\npub const HUMAN_PIECE: Color = WHITE;\n\npub const COMPUTER_PIECE: Color = BLACK;\n\n\n", "file_path": "src/chess/renderer.rs", "rank": 97, "score": 35914.74229367583 }, { "content": " .multiply(cell_size, cell_size)\n\n .offset_point(board_start);\n\n let colour = match square.get_player().unwrap() {\n\n Player::Human => HUMAN_PIECE,\n\n Player::Computer => COMPUTER_PIECE,\n\n };\n\n mesh_helper.draw_coloured_mesh(ctx, mesh.as_ref(), pt, colour);\n\n }\n\n }\n\n }\n\n\n\n //TODO purpose?\n\n // state.board.iter().enumerate().for_each(|(i, item)| {\n\n // let xy = Point::from(BoardCoord::from(i))\n\n // .multiply(cell_size, cell_size)\n\n // .offset_point(board_start);\n\n // });\n\n\n\n if state.play_state.is_human(SelectingPiece) {\n\n state\n", "file_path": "src/chess/renderer.rs", "rank": 98, "score": 35912.91708373167 }, { "content": "use crate::boards::idx_coord::BoardCoord;\n\nuse crate::boards::{board_cols, board_rows};\n\nuse crate::constants::colors::{\n\n alpha, CREAM, DARK_GREEN, LIGHT_BLUE, PIECE_COMPUTER, PIECE_HUMAN, TRANSPARENT,\n\n};\n\nuse crate::draughts::moves::Move::*;\n\nuse crate::draughts::{board_index_to_pdn_num, Square, State};\n\nuse crate::system::letter_mesh::make_letter_mesh;\n\nuse crate::system::math::{pt, Point, Offset, OffsetTuple};\n\nuse crate::system::mesh_helper::MeshHelper;\n\nuse crate::system::TurnState::{SelectingMove, SelectingPiece};\n\nuse ggez::graphics::{Color, DrawMode, Mesh};\n\nuse ggez::{Context, GameResult};\n\nuse std::rc::Rc;\n\n\n\npub const PLAYER_MOVE_PIECE: Color = Color {\n\n r: 0.65,\n\n g: 0.8,\n\n b: 0.6,\n\n a: 1.,\n", "file_path": "src/draughts/renderer.rs", "rank": 99, "score": 35912.37384422704 } ]
Rust
mmids-core/src/endpoints/rtmp_server/actor/tests/rtmp_client.rs
AircastDev/mmids
c304d67d1498f7526e5186d315f07986aade1984
use crate::net::tcp::{OutboundPacket, RequestFailureReason, TcpSocketRequest, TcpSocketResponse}; use crate::net::ConnectionId; use crate::test_utils; use bytes::Bytes; use rml_rtmp::handshake::{Handshake, HandshakeProcessResult, PeerType}; use rml_rtmp::sessions::{ ClientSession, ClientSessionConfig, ClientSessionError, ClientSessionEvent, ClientSessionResult, PublishRequestType, StreamMetadata, }; use rml_rtmp::time::RtmpTimestamp; use std::net::{SocketAddr, SocketAddrV4}; use std::time::Duration; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio::time::timeout; pub const CONNECTION_ID: &'static str = "test-1234"; pub struct RtmpTestClient { socket_manager_receiver: UnboundedReceiver<TcpSocketRequest>, socket_manager_response_sender: Option<UnboundedSender<TcpSocketResponse>>, port: Option<u16>, connection: Option<Connection>, } struct Connection { incoming_bytes: UnboundedSender<Bytes>, outgoing_bytes: UnboundedReceiver<OutboundPacket>, session: ClientSession, } impl RtmpTestClient { pub fn new() -> (Self, UnboundedSender<TcpSocketRequest>) { let (sender, receiver) = unbounded_channel(); let client = RtmpTestClient { socket_manager_receiver: receiver, socket_manager_response_sender: None, port: None, connection: None, }; (client, sender) } pub async fn accept_port_request(&mut self, port: u16, use_tls: bool) { let request = test_utils::expect_mpsc_response(&mut self.socket_manager_receiver).await; match request { TcpSocketRequest::OpenPort { port: requested_port, use_tls: requested_tls, response_channel, } => { assert_eq!( requested_port, port, "Requested port was not the expected port" ); assert_eq!( requested_tls, use_tls, "Requested TLS flag was not expected" ); if response_channel.is_closed() { panic!("Response channel was closed"); } if self.socket_manager_response_sender.is_some() { panic!("Port already registered"); } let _ = response_channel.send(TcpSocketResponse::RequestAccepted {}); self.socket_manager_response_sender = Some(response_channel); self.port = Some(port); } } } pub async fn deny_port_request(&mut self, port: u16, use_tls: bool) { let request = test_utils::expect_mpsc_response(&mut self.socket_manager_receiver).await; match request { TcpSocketRequest::OpenPort { port: requested_port, use_tls: requested_tls, response_channel, } => { assert_eq!( requested_port, port, "Requested port was not the expected port" ); assert_eq!( requested_tls, use_tls, "Requested TLS flag was not expected" ); if response_channel.is_closed() { panic!("Response channel was closed"); } if self.socket_manager_response_sender.is_some() { panic!("Port already registered"); } let _ = response_channel.send(TcpSocketResponse::RequestDenied { reason: RequestFailureReason::PortInUse, }); } } } pub async fn expect_empty_request_channel(&mut self) { test_utils::expect_mpsc_timeout(&mut self.socket_manager_receiver).await; } pub async fn assert_connection_sender_closed(&mut self) { let connection = self .connection .as_mut() .expect("Connection not established yet"); match timeout( Duration::from_millis(10), connection.incoming_bytes.closed(), ) .await { Ok(()) => return, Err(_) => panic!("Response sender not closed as expected (not disconnected"), } } pub async fn perform_handshake(&mut self) { if self.connection.is_some() { panic!("Only one connection is supported at a time"); } let connection_id = ConnectionId(CONNECTION_ID.to_string()); let (incoming_sender, incoming_receiver) = unbounded_channel(); let (outgoing_sender, mut outgoing_receiver) = unbounded_channel(); self.socket_manager_response_sender .as_ref() .unwrap() .send(TcpSocketResponse::NewConnection { port: self.port.unwrap(), connection_id: connection_id.clone(), incoming_bytes: incoming_receiver, outgoing_bytes: outgoing_sender, socket_address: SocketAddr::V4(SocketAddrV4::new([127, 0, 0, 1].into(), 1234)), }) .expect("Failed to send new connection signal"); let mut handshake = Handshake::new(PeerType::Client); let p0_and_p1 = handshake .generate_outbound_p0_and_p1() .expect("Failed to generate p0 and p1"); incoming_sender .send(Bytes::from(p0_and_p1)) .expect("incoming bytes channel closed"); let response = test_utils::expect_mpsc_response(&mut outgoing_receiver).await; let result = handshake .process_bytes(&response.bytes) .expect("Failed to process received p0 and p1 packet"); let response_bytes = match result { HandshakeProcessResult::InProgress { response_bytes } => response_bytes, HandshakeProcessResult::Completed { .. } => { panic!("Did not expect to be completed after first packet") } }; incoming_sender .send(Bytes::from(response_bytes)) .expect("Incoming bytes channel closed"); let response = test_utils::expect_mpsc_response(&mut outgoing_receiver).await; let result = handshake .process_bytes(&response.bytes) .expect("Failed to process p2 packet"); match result { HandshakeProcessResult::InProgress { .. } => { panic!("Did not expect to still be in progress after 2nd packet") } HandshakeProcessResult::Completed { remaining_bytes, .. } => { if remaining_bytes.len() > 0 { panic!("Expected no leftover bytes after handshake completed"); } } } let (mut session, client_results) = ClientSession::new(ClientSessionConfig::new()) .expect("Failed to generate client session"); for result in client_results { match result { ClientSessionResult::OutboundResponse(packet) => { incoming_sender .send(Bytes::from(packet.bytes)) .expect("Incoming bytes channel closed"); } x => panic!("Unexpected session result of {:?}", x), } } loop { let packet = match timeout(Duration::from_millis(10), outgoing_receiver.recv()).await { Ok(Some(packet)) => packet, Ok(None) => panic!("outgoing receiver sender closed"), Err(_) => break, }; let results = session .handle_input(&packet.bytes) .expect("Error processing bytes"); for result in results { match result { ClientSessionResult::OutboundResponse(packet) => { incoming_sender .send(Bytes::from(packet.bytes)) .expect("Incoming bytes channel closed"); } _ => (), } } } self.connection = Some(Connection { session, incoming_bytes: incoming_sender, outgoing_bytes: outgoing_receiver, }) } pub async fn connect_to_app(&mut self, app: String, should_succeed: bool) { self.execute_session_method_single_result(|session| session.request_connection(app)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let response = test_utils::expect_mpsc_response(&mut connection.outgoing_bytes).await; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); let mut event_raised = false; for result in results { match result { ClientSessionResult::RaisedEvent( ClientSessionEvent::ConnectionRequestAccepted, ) => event_raised = true, _ => (), } } if !event_raised { panic!("No connection request accepted event raised"); } } } pub async fn publish_to_stream_key(&mut self, stream_key: String, should_succeed: bool) { self.execute_session_method_single_result(|session| { session.request_publishing(stream_key, PublishRequestType::Live) }); let receiver = &mut self.connection.as_mut().unwrap().outgoing_bytes; let response = test_utils::expect_mpsc_response(receiver).await; self.execute_session_method_vec_result(|session| session.handle_input(&response.bytes)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let mut all_results = Vec::new(); loop { let response = match timeout( Duration::from_millis(10), connection.outgoing_bytes.recv(), ) .await { Ok(Some(response)) => response, Ok(None) => panic!("Outgoing bytes channel closed"), Err(_) => break, }; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); all_results.extend(results); } assert_eq!(all_results.len(), 1, "Only one result expected"); match all_results.remove(0) { ClientSessionResult::RaisedEvent(ClientSessionEvent::PublishRequestAccepted) => (), result => panic!("Unexpected result seen: {:?}", result), } } } pub async fn watch_stream_key(&mut self, stream_key: String, should_succeed: bool) { self.execute_session_method_single_result(|session| session.request_playback(stream_key)); let receiver = &mut self.connection.as_mut().unwrap().outgoing_bytes; let response = test_utils::expect_mpsc_response(receiver).await; self.execute_session_method_vec_result(|session| session.handle_input(&response.bytes)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let mut all_results = Vec::new(); loop { let response = match timeout( Duration::from_millis(10), connection.outgoing_bytes.recv(), ) .await { Ok(Some(response)) => response, Ok(None) => panic!("Outgoing bytes channel closed"), Err(_) => break, }; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); all_results.extend(results); } let mut accepted_event_received = false; for result in all_results { match result { ClientSessionResult::RaisedEvent( ClientSessionEvent::PlaybackRequestAccepted, ) => accepted_event_received = true, _ => (), } } assert!( accepted_event_received, "PlaybackRequestAccepted event not raised" ); } } pub async fn stop_watching(&mut self) { self.execute_session_method_vec_result(|session| session.stop_playback()); } pub fn disconnect(&mut self) { self.connection = None; } pub async fn stop_publishing(&mut self) { self.execute_session_method_vec_result(|session| session.stop_publishing()); } pub fn publish_metadata(&mut self, metadata: StreamMetadata) { self.execute_session_method_single_result(|session| session.publish_metadata(&metadata)); } pub fn publish_video(&mut self, data: Bytes, timestamp: RtmpTimestamp) { self.execute_session_method_single_result(|session| { session.publish_video_data(data, timestamp, false) }); } pub fn publish_audio(&mut self, data: Bytes, timestamp: RtmpTimestamp) { self.execute_session_method_single_result(|session| { session.publish_audio_data(data, timestamp, false) }); } pub fn execute_session_method_single_result( &mut self, function: impl FnOnce(&mut ClientSession) -> Result<ClientSessionResult, ClientSessionError>, ) { let connection = self .connection .as_mut() .expect("Connection not established yet"); let result = function(&mut connection.session).expect("Client session returned error"); match result { ClientSessionResult::OutboundResponse(packet) => connection .incoming_bytes .send(Bytes::from(packet.bytes)) .expect("Failed to send stop publishing command"), x => panic!("Unexpected session result: {:?}", x), } } fn execute_session_method_vec_result( &mut self, function: impl FnOnce( &mut ClientSession, ) -> Result<Vec<ClientSessionResult>, ClientSessionError>, ) { let connection = self .connection .as_mut() .expect("Connection not established yet"); let results = function(&mut connection.session).expect("Client session returned error"); for result in results { match result { ClientSessionResult::OutboundResponse(packet) => connection .incoming_bytes .send(Bytes::from(packet.bytes)) .expect("Failed to send packet"), x => panic!("Unexpected session result: {:?}", x), } } } pub async fn get_next_event(&mut self) -> Option<ClientSessionEvent> { let connection = self .connection .as_mut() .expect("Connection not established yet"); loop { let packet = match timeout(Duration::from_millis(10), connection.outgoing_bytes.recv()).await { Ok(Some(packet)) => packet, _ => break, }; let results = connection .session .handle_input(&packet.bytes) .expect("Failed to handle packet"); for result in results { match result { ClientSessionResult::RaisedEvent(event) => return Some(event), _ => (), } } } return None; } }
use crate::net::tcp::{OutboundPacket, RequestFailureReason, TcpSocketRequest, TcpSocketResponse}; use crate::net::ConnectionId; use crate::test_utils; use bytes::Bytes; use rml_rtmp::handshake::{Handshake, HandshakeProcessResult, PeerType}; use rml_rtmp::sessions::{ ClientSession, ClientSessionConfig, ClientSessionError, ClientSessionEvent, ClientSessionResult, PublishRequestType, StreamMetadata, }; use rml_rtmp::time::RtmpTimestamp; use std::net::{SocketAddr, SocketAddrV4}; use std::time::Duration; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio::time::timeout; pub const CONNECTION_ID: &'static str = "test-1234"; pub struct RtmpTestClient { socket_manager_receiver: UnboundedReceiver<TcpSocketRequest>, socket_manager_response_sender: Option<UnboundedSender<TcpSocketResponse>>, port: Option<u16>, connection: Option<Connection>, } struct Connection { incoming_bytes: UnboundedSender<Bytes>, outgoing_bytes: UnboundedReceiver<OutboundPacket>, session: ClientSession, } impl RtmpTestClient { pub fn new() -> (Self, UnboundedSender<TcpSocketRequest>) { le
pub async fn accept_port_request(&mut self, port: u16, use_tls: bool) { let request = test_utils::expect_mpsc_response(&mut self.socket_manager_receiver).await; match request { TcpSocketRequest::OpenPort { port: requested_port, use_tls: requested_tls, response_channel, } => { assert_eq!( requested_port, port, "Requested port was not the expected port" ); assert_eq!( requested_tls, use_tls, "Requested TLS flag was not expected" ); if response_channel.is_closed() { panic!("Response channel was closed"); } if self.socket_manager_response_sender.is_some() { panic!("Port already registered"); } let _ = response_channel.send(TcpSocketResponse::RequestAccepted {}); self.socket_manager_response_sender = Some(response_channel); self.port = Some(port); } } } pub async fn deny_port_request(&mut self, port: u16, use_tls: bool) { let request = test_utils::expect_mpsc_response(&mut self.socket_manager_receiver).await; match request { TcpSocketRequest::OpenPort { port: requested_port, use_tls: requested_tls, response_channel, } => { assert_eq!( requested_port, port, "Requested port was not the expected port" ); assert_eq!( requested_tls, use_tls, "Requested TLS flag was not expected" ); if response_channel.is_closed() { panic!("Response channel was closed"); } if self.socket_manager_response_sender.is_some() { panic!("Port already registered"); } let _ = response_channel.send(TcpSocketResponse::RequestDenied { reason: RequestFailureReason::PortInUse, }); } } } pub async fn expect_empty_request_channel(&mut self) { test_utils::expect_mpsc_timeout(&mut self.socket_manager_receiver).await; } pub async fn assert_connection_sender_closed(&mut self) { let connection = self .connection .as_mut() .expect("Connection not established yet"); match timeout( Duration::from_millis(10), connection.incoming_bytes.closed(), ) .await { Ok(()) => return, Err(_) => panic!("Response sender not closed as expected (not disconnected"), } } pub async fn perform_handshake(&mut self) { if self.connection.is_some() { panic!("Only one connection is supported at a time"); } let connection_id = ConnectionId(CONNECTION_ID.to_string()); let (incoming_sender, incoming_receiver) = unbounded_channel(); let (outgoing_sender, mut outgoing_receiver) = unbounded_channel(); self.socket_manager_response_sender .as_ref() .unwrap() .send(TcpSocketResponse::NewConnection { port: self.port.unwrap(), connection_id: connection_id.clone(), incoming_bytes: incoming_receiver, outgoing_bytes: outgoing_sender, socket_address: SocketAddr::V4(SocketAddrV4::new([127, 0, 0, 1].into(), 1234)), }) .expect("Failed to send new connection signal"); let mut handshake = Handshake::new(PeerType::Client); let p0_and_p1 = handshake .generate_outbound_p0_and_p1() .expect("Failed to generate p0 and p1"); incoming_sender .send(Bytes::from(p0_and_p1)) .expect("incoming bytes channel closed"); let response = test_utils::expect_mpsc_response(&mut outgoing_receiver).await; let result = handshake .process_bytes(&response.bytes) .expect("Failed to process received p0 and p1 packet"); let response_bytes = match result { HandshakeProcessResult::InProgress { response_bytes } => response_bytes, HandshakeProcessResult::Completed { .. } => { panic!("Did not expect to be completed after first packet") } }; incoming_sender .send(Bytes::from(response_bytes)) .expect("Incoming bytes channel closed"); let response = test_utils::expect_mpsc_response(&mut outgoing_receiver).await; let result = handshake .process_bytes(&response.bytes) .expect("Failed to process p2 packet"); match result { HandshakeProcessResult::InProgress { .. } => { panic!("Did not expect to still be in progress after 2nd packet") } HandshakeProcessResult::Completed { remaining_bytes, .. } => { if remaining_bytes.len() > 0 { panic!("Expected no leftover bytes after handshake completed"); } } } let (mut session, client_results) = ClientSession::new(ClientSessionConfig::new()) .expect("Failed to generate client session"); for result in client_results { match result { ClientSessionResult::OutboundResponse(packet) => { incoming_sender .send(Bytes::from(packet.bytes)) .expect("Incoming bytes channel closed"); } x => panic!("Unexpected session result of {:?}", x), } } loop { let packet = match timeout(Duration::from_millis(10), outgoing_receiver.recv()).await { Ok(Some(packet)) => packet, Ok(None) => panic!("outgoing receiver sender closed"), Err(_) => break, }; let results = session .handle_input(&packet.bytes) .expect("Error processing bytes"); for result in results { match result { ClientSessionResult::OutboundResponse(packet) => { incoming_sender .send(Bytes::from(packet.bytes)) .expect("Incoming bytes channel closed"); } _ => (), } } } self.connection = Some(Connection { session, incoming_bytes: incoming_sender, outgoing_bytes: outgoing_receiver, }) } pub async fn connect_to_app(&mut self, app: String, should_succeed: bool) { self.execute_session_method_single_result(|session| session.request_connection(app)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let response = test_utils::expect_mpsc_response(&mut connection.outgoing_bytes).await; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); let mut event_raised = false; for result in results { match result { ClientSessionResult::RaisedEvent( ClientSessionEvent::ConnectionRequestAccepted, ) => event_raised = true, _ => (), } } if !event_raised { panic!("No connection request accepted event raised"); } } } pub async fn publish_to_stream_key(&mut self, stream_key: String, should_succeed: bool) { self.execute_session_method_single_result(|session| { session.request_publishing(stream_key, PublishRequestType::Live) }); let receiver = &mut self.connection.as_mut().unwrap().outgoing_bytes; let response = test_utils::expect_mpsc_response(receiver).await; self.execute_session_method_vec_result(|session| session.handle_input(&response.bytes)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let mut all_results = Vec::new(); loop { let response = match timeout( Duration::from_millis(10), connection.outgoing_bytes.recv(), ) .await { Ok(Some(response)) => response, Ok(None) => panic!("Outgoing bytes channel closed"), Err(_) => break, }; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); all_results.extend(results); } assert_eq!(all_results.len(), 1, "Only one result expected"); match all_results.remove(0) { ClientSessionResult::RaisedEvent(ClientSessionEvent::PublishRequestAccepted) => (), result => panic!("Unexpected result seen: {:?}", result), } } } pub async fn watch_stream_key(&mut self, stream_key: String, should_succeed: bool) { self.execute_session_method_single_result(|session| session.request_playback(stream_key)); let receiver = &mut self.connection.as_mut().unwrap().outgoing_bytes; let response = test_utils::expect_mpsc_response(receiver).await; self.execute_session_method_vec_result(|session| session.handle_input(&response.bytes)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let mut all_results = Vec::new(); loop { let response = match timeout( Duration::from_millis(10), connection.outgoing_bytes.recv(), ) .await { Ok(Some(response)) => response, Ok(None) => panic!("Outgoing bytes channel closed"), Err(_) => break, }; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); all_results.extend(results); } let mut accepted_event_received = false; for result in all_results { match result { ClientSessionResult::RaisedEvent( ClientSessionEvent::PlaybackRequestAccepted, ) => accepted_event_received = true, _ => (), } } assert!( accepted_event_received, "PlaybackRequestAccepted event not raised" ); } } pub async fn stop_watching(&mut self) { self.execute_session_method_vec_result(|session| session.stop_playback()); } pub fn disconnect(&mut self) { self.connection = None; } pub async fn stop_publishing(&mut self) { self.execute_session_method_vec_result(|session| session.stop_publishing()); } pub fn publish_metadata(&mut self, metadata: StreamMetadata) { self.execute_session_method_single_result(|session| session.publish_metadata(&metadata)); } pub fn publish_video(&mut self, data: Bytes, timestamp: RtmpTimestamp) { self.execute_session_method_single_result(|session| { session.publish_video_data(data, timestamp, false) }); } pub fn publish_audio(&mut self, data: Bytes, timestamp: RtmpTimestamp) { self.execute_session_method_single_result(|session| { session.publish_audio_data(data, timestamp, false) }); } pub fn execute_session_method_single_result( &mut self, function: impl FnOnce(&mut ClientSession) -> Result<ClientSessionResult, ClientSessionError>, ) { let connection = self .connection .as_mut() .expect("Connection not established yet"); let result = function(&mut connection.session).expect("Client session returned error"); match result { ClientSessionResult::OutboundResponse(packet) => connection .incoming_bytes .send(Bytes::from(packet.bytes)) .expect("Failed to send stop publishing command"), x => panic!("Unexpected session result: {:?}", x), } } fn execute_session_method_vec_result( &mut self, function: impl FnOnce( &mut ClientSession, ) -> Result<Vec<ClientSessionResult>, ClientSessionError>, ) { let connection = self .connection .as_mut() .expect("Connection not established yet"); let results = function(&mut connection.session).expect("Client session returned error"); for result in results { match result { ClientSessionResult::OutboundResponse(packet) => connection .incoming_bytes .send(Bytes::from(packet.bytes)) .expect("Failed to send packet"), x => panic!("Unexpected session result: {:?}", x), } } } pub async fn get_next_event(&mut self) -> Option<ClientSessionEvent> { let connection = self .connection .as_mut() .expect("Connection not established yet"); loop { let packet = match timeout(Duration::from_millis(10), connection.outgoing_bytes.recv()).await { Ok(Some(packet)) => packet, _ => break, }; let results = connection .session .handle_input(&packet.bytes) .expect("Failed to handle packet"); for result in results { match result { ClientSessionResult::RaisedEvent(event) => return Some(event), _ => (), } } } return None; } }
t (sender, receiver) = unbounded_channel(); let client = RtmpTestClient { socket_manager_receiver: receiver, socket_manager_response_sender: None, port: None, connection: None, }; (client, sender) }
function_block-function_prefixed
[ { "content": "/// Quick function to create an un-named gstreamer element, while providing a consumable error\n\n/// if that fails.\n\npub fn create_gst_element(name: &str) -> Result<Element> {\n\n ElementFactory::make(name, None).with_context(|| format!(\"Failed to create element '{}'\", name))\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/utils.rs", "rank": 0, "score": 171462.12872548203 }, { "content": "#[instrument(skip(port_map))]\n\nfn clean_disconnected_connection(connection_id: ConnectionId, port_map: &mut PortMapping) {\n\n let connection = match port_map.connections.remove(&connection_id) {\n\n Some(x) => x,\n\n None => return,\n\n };\n\n\n\n info!(\"Connection {} disconnected. Cleaning it up\", connection_id);\n\n match connection.state {\n\n ConnectionState::None => (),\n\n ConnectionState::WaitingForPublishValidation { .. } => (),\n\n ConnectionState::WaitingForWatchValidation { .. } => (),\n\n ConnectionState::Publishing {\n\n rtmp_app,\n\n stream_key,\n\n } => match port_map.rtmp_applications.get_mut(rtmp_app.as_str()) {\n\n None => (),\n\n Some(app_map) => match app_map.active_stream_keys.get_mut(stream_key.as_str()) {\n\n None => (),\n\n Some(active_key) => {\n\n match &active_key.publisher {\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/mod.rs", "rank": 1, "score": 167776.77066663414 }, { "content": "fn handle_connection_stop_publish(connection_id: ConnectionId, port_map: &mut PortMapping) {\n\n let connection = match port_map.connections.get_mut(&connection_id) {\n\n Some(connection) => connection,\n\n None => {\n\n warn!(\n\n \"Connection handler for connection {:?} a sent publish finished notification, but \\\n\n that connection isn't being tracked\",\n\n connection_id\n\n );\n\n\n\n return;\n\n }\n\n };\n\n\n\n match &connection.state {\n\n ConnectionState::Publishing {\n\n rtmp_app,\n\n stream_key,\n\n } => {\n\n let rtmp_app = rtmp_app.clone();\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/mod.rs", "rank": 2, "score": 166184.3083725336 }, { "content": "fn handle_connection_stop_watch(connection_id: ConnectionId, port_map: &mut PortMapping) {\n\n let connection = match port_map.connections.get_mut(&connection_id) {\n\n Some(connection) => connection,\n\n None => {\n\n warn!(\"Connection handler for connection {:?} a sent playback finished notification, but \\\n\n that connection isn't being tracked\", connection_id);\n\n\n\n return;\n\n }\n\n };\n\n\n\n match &connection.state {\n\n ConnectionState::Watching {\n\n rtmp_app,\n\n stream_key,\n\n } => {\n\n let rtmp_app = rtmp_app.clone();\n\n let stream_key = stream_key.clone();\n\n connection.state = ConnectionState::None;\n\n match port_map.rtmp_applications.get_mut(rtmp_app.as_str()) {\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/mod.rs", "rank": 3, "score": 166184.3083725336 }, { "content": "/// Parses configuration from a text block.\n\npub fn parse(content: &str) -> Result<MmidsConfig, ConfigParseError> {\n\n let mut config = MmidsConfig {\n\n settings: HashMap::new(),\n\n reactors: HashMap::new(),\n\n workflows: HashMap::new(),\n\n };\n\n\n\n let pairs = RawConfigParser::parse(Rule::content, content)?;\n\n for pair in pairs {\n\n let rule = pair.as_rule();\n\n match &rule {\n\n Rule::node_block => handle_node_block(&mut config, pair)?,\n\n Rule::EOI => (),\n\n x => {\n\n return Err(ConfigParseError::UnexpectedRule {\n\n rule: x.clone(),\n\n section: \"root\".to_string(),\n\n })\n\n }\n\n }\n\n }\n\n\n\n Ok(config)\n\n}\n\n\n", "file_path": "mmids-core/src/config.rs", "rank": 4, "score": 162792.86252774208 }, { "content": "pub fn start_reactor(\n\n name: String,\n\n executor: Box<dyn ReactorExecutor>,\n\n event_hub_subscriber: UnboundedSender<SubscriptionRequest>,\n\n update_interval: Duration,\n\n) -> UnboundedSender<ReactorRequest> {\n\n let (sender, receiver) = unbounded_channel();\n\n let actor = Actor::new(\n\n name,\n\n receiver,\n\n executor,\n\n event_hub_subscriber,\n\n update_interval,\n\n );\n\n tokio::spawn(actor.run());\n\n\n\n sender\n\n}\n\n\n", "file_path": "mmids-core/src/reactors/reactor.rs", "rank": 5, "score": 137984.99004480877 }, { "content": "pub fn start_workflow_manager(\n\n step_factory: Arc<WorkflowStepFactory>,\n\n event_hub_publisher: UnboundedSender<PublishEventRequest>,\n\n) -> UnboundedSender<WorkflowManagerRequest> {\n\n let (sender, receiver) = unbounded_channel();\n\n let actor = Actor::new(step_factory, event_hub_publisher);\n\n tokio::spawn(actor.run(receiver, sender.clone()));\n\n\n\n sender\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/manager.rs", "rank": 6, "score": 136130.53450683272 }, { "content": "pub fn start_event_hub() -> (\n\n UnboundedSender<PublishEventRequest>,\n\n UnboundedSender<SubscriptionRequest>,\n\n) {\n\n let (publish_sender, publish_receiver) = unbounded_channel();\n\n let (sub_sender, sub_receiver) = unbounded_channel();\n\n let actor = Actor::new(publish_receiver, sub_receiver);\n\n tokio::spawn(actor.run());\n\n\n\n (publish_sender, sub_sender)\n\n}\n\n\n", "file_path": "mmids-core/src/event_hub.rs", "rank": 7, "score": 136130.53450683272 }, { "content": "pub fn start_reactor_manager(\n\n executor_factory: ReactorExecutorFactory,\n\n event_hub_subscriber: UnboundedSender<SubscriptionRequest>,\n\n) -> UnboundedSender<ReactorManagerRequest> {\n\n let (sender, receiver) = unbounded_channel();\n\n let actor = Actor::new(executor_factory, receiver, event_hub_subscriber);\n\n tokio::spawn(actor.run());\n\n\n\n sender\n\n}\n\n\n", "file_path": "mmids-core/src/reactors/manager.rs", "rank": 8, "score": 136130.53450683272 }, { "content": "/// Starts the execution of a workflow with the specified definition\n\npub fn start_workflow(\n\n definition: WorkflowDefinition,\n\n step_factory: Arc<WorkflowStepFactory>,\n\n) -> UnboundedSender<WorkflowRequest> {\n\n let (sender, receiver) = unbounded_channel();\n\n let actor = Actor::new(&definition, step_factory, receiver);\n\n tokio::spawn(actor.run(definition));\n\n\n\n sender\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/runner/mod.rs", "rank": 9, "score": 136130.53450683272 }, { "content": "/// Used to update the gstreamer default log level to info, which can be used for getting more\n\n/// information about failures. Should not be used in production due to the amount of logs that\n\n/// will be raised, and it will not be easy to correlate gstreamer info logs to specific mmids\n\n/// streams.\n\n///\n\n/// **Must** be called after the first invocation of `GSTREAMER_INIT_RESULT`, otherwise the default\n\n/// log level will be overridden to warning.\n\npub fn set_gstreamer_log_level_to_info() {\n\n gstreamer::debug_set_default_threshold(DebugLevel::Info);\n\n}\n", "file_path": "mmids-gstreamer/src/lib.rs", "rank": 10, "score": 134359.7255536886 }, { "content": "/// Starts a new ffmpeg endpoint, and returns the channel in which the newly created endpoint\n\n/// can be communicated with\n\npub fn start_ffmpeg_endpoint(\n\n ffmpeg_exe_path: String,\n\n log_root: String,\n\n) -> Result<UnboundedSender<FfmpegEndpointRequest>, FfmpegEndpointStartError> {\n\n let actor = Actor::new(ffmpeg_exe_path, log_root)?;\n\n let (sender, receiver) = unbounded_channel();\n\n\n\n tokio::spawn(actor.run(receiver));\n\n\n\n Ok(sender)\n\n}\n\n\n", "file_path": "mmids-core/src/endpoints/ffmpeg/mod.rs", "rank": 11, "score": 134358.82868062996 }, { "content": "pub fn start_http_api(\n\n bind_address: SocketAddr,\n\n routes: RoutingTable,\n\n) -> Sender<HttpApiShutdownSignal> {\n\n let routes = Arc::new(routes);\n\n let service = make_service_fn(move |socket: &AddrStream| {\n\n let remote_address = socket.remote_addr();\n\n let routes_clone = routes.clone();\n\n async move {\n\n Ok::<_, hyper::Error>(service_fn(move |request: Request<Body>| {\n\n execute_request(\n\n request,\n\n remote_address,\n\n routes_clone.clone(),\n\n Uuid::new_v4().to_string(),\n\n )\n\n }))\n\n }\n\n });\n\n\n", "file_path": "mmids-core/src/http_api/mod.rs", "rank": 12, "score": 134354.2105334852 }, { "content": "/// Sets up an video encoder's `appsrc`'s caps based on the specified codec. Since sequence headers\n\n/// are not valid packets for the codec, we can't just push the sequence header into the appsrc's\n\n/// buffer. Instead, different codecs have different mechanisms to pass the sequence header in\n\n/// so it can be utilized, and this provides a central function for that logic.\n\npub fn set_source_video_sequence_header(\n\n source: &AppSrc,\n\n codec: VideoCodec,\n\n buffer: Buffer,\n\n) -> Result<()> {\n\n match codec {\n\n VideoCodec::H264 => {\n\n let caps = Caps::builder(\"video/x-h264\")\n\n .field(\"codec_data\", buffer)\n\n .build();\n\n\n\n source.set_caps(Some(&caps));\n\n\n\n Ok(())\n\n }\n\n\n\n VideoCodec::Unknown => Err(anyhow!(\n\n \"Video codec is not known, and thus we can't prepare the gstreamer pipeline to \\\n\n accept it.\"\n\n )),\n\n }\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/utils.rs", "rank": 13, "score": 134354.2105334852 }, { "content": "pub fn set_source_audio_sequence_header(\n\n source: &AppSrc,\n\n codec: AudioCodec,\n\n buffer: Buffer,\n\n) -> Result<()> {\n\n match codec {\n\n AudioCodec::Aac => {\n\n let caps = Caps::builder(\"audio/mpeg\")\n\n .field(\"mpegversion\", 4) // I think this is correct? Unsure 2 vs 4\n\n .field(\"codec_data\", buffer)\n\n .build();\n\n\n\n source.set_caps(Some(&caps));\n\n\n\n Ok(())\n\n }\n\n\n\n AudioCodec::Unknown => Err(anyhow!(\n\n \"audio codec is not known, and thus we can't prepare the gstreamer pipeline to accept it.\"\n\n ))\n\n }\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/utils.rs", "rank": 14, "score": 134354.2105334852 }, { "content": "/// Starts the gstreamer transcode process, and returns a channel in which communication with the\n\n/// endpoint can be made.\n\npub fn start_gst_transcoder(\n\n encoder_factory: Arc<EncoderFactory>,\n\n) -> Result<UnboundedSender<GstTranscoderRequest>, EndpointStartError> {\n\n let (sender, receiver) = unbounded_channel();\n\n let actor = EndpointActor::new(receiver, encoder_factory)?;\n\n tokio::spawn(actor.run());\n\n\n\n Ok(sender)\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/endpoints/gst_transcoder/mod.rs", "rank": 15, "score": 132651.18226050166 }, { "content": "/// Starts a new RTMP server endpoint, returning a channel that can be used to send notifications\n\n/// and requests to it.\n\npub fn start_rtmp_server_endpoint(\n\n socket_request_sender: UnboundedSender<TcpSocketRequest>,\n\n) -> UnboundedSender<RtmpEndpointRequest> {\n\n let (endpoint_sender, endpoint_receiver) = unbounded_channel();\n\n\n\n let endpoint = RtmpServerEndpointActor {\n\n futures: FuturesUnordered::new(),\n\n ports: HashMap::new(),\n\n };\n\n\n\n tokio::spawn(endpoint.run(endpoint_receiver, socket_request_sender));\n\n\n\n endpoint_sender\n\n}\n\n\n\n/// Specifies how a stream key should be registered for playback or publishing\n\n#[derive(Clone, Hash, Eq, PartialEq, Debug)]\n\npub enum StreamKeyRegistration {\n\n /// All stream keys for the the rtmp application should be registered\n\n Any,\n", "file_path": "mmids-core/src/endpoints/rtmp_server/mod.rs", "rank": 16, "score": 131026.42303915553 }, { "content": "pub fn start_transcode_manager(\n\n parameters: TranscoderParams,\n\n) -> UnboundedSender<TranscodeManagerRequest> {\n\n let (sender, receiver) = unbounded_channel();\n\n let actor = TranscodeManager::new(parameters, receiver);\n\n tokio::spawn(actor.run());\n\n\n\n sender\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/endpoints/gst_transcoder/transcoding_manager.rs", "rank": 17, "score": 131017.00483692909 }, { "content": "/// Takes items from an RTMP stream metadata message and maps them to standardized key/value\n\n/// entries in a hash map.\n\npub fn stream_metadata_to_hash_map(metadata: StreamMetadata) -> HashMap<String, String> {\n\n let mut map = HashMap::new();\n\n\n\n if let Some(codec) = metadata.video_codec {\n\n map.insert(\"videocodecid\".to_string(), codec);\n\n }\n\n\n\n if let Some(x) = metadata.audio_bitrate_kbps {\n\n map.insert(\"audiodatarate\".to_string(), x.to_string());\n\n }\n\n\n\n if let Some(x) = metadata.audio_channels {\n\n map.insert(\"audiochannels\".to_string(), x.to_string());\n\n }\n\n\n\n if let Some(codec) = metadata.audio_codec {\n\n map.insert(\"audiocodecid\".to_string(), codec);\n\n }\n\n\n\n if let Some(x) = metadata.audio_is_stereo {\n", "file_path": "mmids-core/src/utils.rs", "rank": 18, "score": 121691.28504681983 }, { "content": "/// Attempts to extract RTMP stream metadata values from a hash map\n\npub fn hash_map_to_stream_metadata(properties: &HashMap<String, String>) -> StreamMetadata {\n\n let mut metadata = StreamMetadata::new();\n\n if let Some(video_codec_id) = properties.get(\"videocodecid\") {\n\n metadata.video_codec = Some(video_codec_id.clone());\n\n }\n\n\n\n if let Some(audio_data_rate) = properties.get(\"audiodatarate\") {\n\n if let Ok(num) = audio_data_rate.parse() {\n\n metadata.audio_bitrate_kbps = Some(num);\n\n }\n\n }\n\n\n\n if let Some(count) = properties.get(\"audiochannels\") {\n\n if let Ok(num) = count.parse() {\n\n metadata.audio_channels = Some(num);\n\n }\n\n }\n\n\n\n if let Some(codec) = properties.get(\"audiocodecid\") {\n\n metadata.audio_codec = Some(codec.clone());\n", "file_path": "mmids-core/src/utils.rs", "rank": 19, "score": 121691.28504681983 }, { "content": "struct Connection {\n\n sender: UnboundedSender<OutboundPacket>,\n\n}\n\n\n", "file_path": "validators/echo-server/src/main.rs", "rank": 20, "score": 119672.07409690482 }, { "content": "#[instrument(skip(port_map))]\n\nfn handle_connection_request_connect_to_app(\n\n connection_id: &ConnectionId,\n\n port_map: &mut PortMapping,\n\n port: u16,\n\n rtmp_app: String,\n\n) {\n\n let connection = match port_map.connections.get_mut(&connection_id) {\n\n Some(x) => x,\n\n None => {\n\n warn!(\"Connection handler for connection {} sent a request to connect to an rtmp app on port {}, \\\n\n but that connection isn't being tracked.\", connection_id, port);\n\n\n\n return;\n\n }\n\n };\n\n let response = if !port_map.rtmp_applications.contains_key(rtmp_app.as_str()) {\n\n info!(\n\n \"Connection {} requested connection to RTMP app '{}' which isn't registered yet\",\n\n connection_id, rtmp_app\n\n );\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/mod.rs", "rank": 21, "score": 118141.17023151685 }, { "content": "/// Starts a new instance of a socket manager task. A socket manager can be requested to open\n\n/// ports on behalf of another system. If the port is successfully opened it will begin listening\n\n/// for TCP connections on that port, and then manage the reading and writing of network traffic\n\n/// for that connection.\n\npub fn start(tls_options: Option<TlsOptions>) -> UnboundedSender<TcpSocketRequest> {\n\n let (request_sender, request_receiver) = unbounded_channel();\n\n\n\n let manager = SocketManager::new();\n\n tokio::spawn(manager.run(request_receiver, tls_options));\n\n\n\n request_sender\n\n}\n\n\n", "file_path": "mmids-core/src/net/tcp/socket_manager.rs", "rank": 22, "score": 117825.0466055148 }, { "content": "/// Starts listening for TCP connections on the specified port. It returns a channel which\n\n/// callers can use to know if the listener has shut down unexpectedly.\n\npub fn start(params: ListenerParams) -> UnboundedSender<()> {\n\n let (self_disconnect_sender, self_disconnect_receiver) = unbounded_channel();\n\n tokio::spawn(listen(params, self_disconnect_receiver));\n\n\n\n self_disconnect_sender\n\n}\n\n\n\n#[instrument(skip(params, _self_disconnection_signal), fields(port = params.port, use_tls = params.use_tls))]\n\nasync fn listen(params: ListenerParams, _self_disconnection_signal: UnboundedReceiver<()>) {\n\n info!(\"Socket listener for port started\");\n\n\n\n let ListenerParams {\n\n port,\n\n response_channel,\n\n use_tls,\n\n tls_options,\n\n } = params;\n\n\n\n let tls = if let Some(tls) = tls_options.as_ref() {\n\n let identity = tls.certificate.clone();\n", "file_path": "mmids-core/src/net/tcp/listener.rs", "rank": 23, "score": 115287.06239250544 }, { "content": "struct OpenPort {\n\n response_channel: UnboundedSender<TcpSocketResponse>,\n\n}\n\n\n", "file_path": "mmids-core/src/net/tcp/socket_manager.rs", "rank": 24, "score": 114148.27435068518 }, { "content": "/// Reads the `codec_data` caps from the provided element. This is usually where sequence header\n\n/// data is contained.\n\npub fn get_codec_data_from_element(element: &Element) -> Result<Bytes> {\n\n let pad = element\n\n .static_pad(\"src\")\n\n .with_context(|| format!(\"Failed to get src pad of the {} element\", element.name()))?;\n\n\n\n let caps = pad\n\n .caps()\n\n .with_context(|| format!(\"No caps on src pad of the {} element\", element.name()))?;\n\n\n\n let structure = caps\n\n .structure(0)\n\n .with_context(|| format!(\"No structure on the pad of the {} element\", element.name()))?;\n\n\n\n let codec_data = structure.get::<Buffer>(\"codec_data\").with_context(|| {\n\n format!(\n\n \"The src pad of the {} element did not have a 'codec_data' field\",\n\n element.name()\n\n )\n\n })?;\n\n\n", "file_path": "mmids-gstreamer/src/utils.rs", "rank": 25, "score": 112127.56758819043 }, { "content": "struct ConnectionDetails {\n\n stream_id: StreamId,\n\n\n\n // Used to cancel the reactor update future. When a stream disconnects, this cancellation\n\n // channel will be dropped causing the future waiting for reactor updates to be closed. This\n\n // will inform the reactor that this step is no longer interested in whatever workflow it was\n\n // managing for it. Not using a one shot, as the channel needs to live across multiple futures\n\n // if updates come in.\n\n _cancellation_channel: Option<UnboundedSender<()>>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/rtmp_receive/mod.rs", "rank": 26, "score": 112082.33831080998 }, { "content": "struct UnwrappedAudio {\n\n codec: AudioCodec,\n\n is_sequence_header: bool,\n\n data: Bytes,\n\n}\n\n\n\nimpl RtmpServerConnectionHandler {\n\n pub fn new(\n\n id: ConnectionId,\n\n outgoing_bytes: UnboundedSender<OutboundPacket>,\n\n request_sender: UnboundedSender<ConnectionRequest>,\n\n ) -> Self {\n\n RtmpServerConnectionHandler {\n\n id,\n\n state: ConnectionState::Handshaking,\n\n handshake: Handshake::new(PeerType::Server),\n\n rtmp_session: None,\n\n outgoing_byte_channel: outgoing_bytes,\n\n futures: FuturesUnordered::new(),\n\n request_sender,\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/connection_handler.rs", "rank": 28, "score": 110380.63688843673 }, { "content": "struct UnwrappedVideo {\n\n codec: VideoCodec,\n\n is_keyframe: bool,\n\n is_sequence_header: bool,\n\n data: Bytes,\n\n composition_time_in_ms: i32,\n\n}\n\n\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/connection_handler.rs", "rank": 29, "score": 110380.63688843673 }, { "content": "#[test]\n\nfn new_step_is_in_created_status() {\n\n let definition = DefinitionBuilder::new().build();\n\n let context = TestContext::new(definition).unwrap();\n\n\n\n let status = context.step_context.step.get_status();\n\n assert_eq!(status, &StepStatus::Created, \"Unexpected step status\");\n\n}\n\n\n\n#[tokio::test]\n\nasync fn registration_failure_changes_status_to_error() {\n\n let definition = DefinitionBuilder::new().build();\n\n let mut context = TestContext::new(definition).unwrap();\n\n\n\n let response = test_utils::expect_mpsc_response(&mut context.rtmp_endpoint).await;\n\n let _channel = match response {\n\n RtmpEndpointRequest::ListenForWatchers {\n\n notification_channel,\n\n ..\n\n } => {\n\n notification_channel\n", "file_path": "mmids-core/src/workflows/steps/rtmp_watch/tests.rs", "rank": 30, "score": 108429.14961439701 }, { "content": "#[instrument(skip(port_map))]\n\nfn handle_connection_request_watch(\n\n connection_id: ConnectionId,\n\n port_map: &mut PortMapping,\n\n port: u16,\n\n rtmp_app: String,\n\n stream_key: &String,\n\n reactor_update_channel: Option<UnboundedReceiver<ReactorWorkflowUpdate>>,\n\n) -> Option<BoxFuture<'static, FutureResult>> {\n\n let connection = match port_map.connections.get_mut(&connection_id) {\n\n Some(x) => x,\n\n None => {\n\n warn!(\"Connection handler for connection {:?} sent request to watch on port {}, but that \\\n\n connection isn't being tracked.\", connection_id, port);\n\n\n\n return None;\n\n }\n\n };\n\n\n\n // Has this app been registered yet?\n\n let application = match port_map.rtmp_applications.get_mut(rtmp_app.as_str()) {\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/mod.rs", "rank": 31, "score": 108148.00763500584 }, { "content": "#[instrument(skip(port_map))]\n\nfn handle_connection_request_publish(\n\n connection_id: &ConnectionId,\n\n port_map: &mut PortMapping,\n\n port: u16,\n\n rtmp_app: String,\n\n stream_key: &String,\n\n reactor_response_channel: Option<UnboundedReceiver<ReactorWorkflowUpdate>>,\n\n) -> Option<BoxFuture<'static, FutureResult>> {\n\n let connection = match port_map.connections.get_mut(&connection_id) {\n\n Some(x) => x,\n\n None => {\n\n warn!(\"Connection handler for connection {:?} sent a request to publish on port {}, but that \\\n\n connection isn't being tracked.\", connection_id, port);\n\n\n\n return None;\n\n }\n\n };\n\n\n\n // Has this RTMP application been registered yet?\n\n let application = match port_map.rtmp_applications.get_mut(rtmp_app.as_str()) {\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/mod.rs", "rank": 32, "score": 108148.00763500584 }, { "content": "fn wrap_audio_into_flv(\n\n data: Bytes,\n\n codec: AudioCodec,\n\n is_sequence_header: bool,\n\n) -> Result<Bytes, ()> {\n\n match codec {\n\n AudioCodec::Aac => {\n\n let flv_tag = 0xaf;\n\n let packet_type = if is_sequence_header { 0 } else { 1 };\n\n let mut wrapped = BytesMut::new();\n\n wrapped.put_u8(flv_tag);\n\n wrapped.put_u8(packet_type);\n\n wrapped.extend(data);\n\n\n\n Ok(wrapped.freeze())\n\n }\n\n\n\n AudioCodec::Unknown => {\n\n // Need to know the codec to wrap it into flv\n\n Err(())\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/connection_handler.rs", "rank": 33, "score": 108142.81614280808 }, { "content": "fn wrap_video_into_flv(\n\n data: Bytes,\n\n codec: VideoCodec,\n\n is_keyframe: bool,\n\n is_sequence_header: bool,\n\n composition_time_offset: i32,\n\n) -> Result<Bytes, ()> {\n\n match codec {\n\n VideoCodec::H264 => {\n\n let flv_tag = if is_keyframe { 0x17 } else { 0x27 };\n\n let avc_type = if is_sequence_header { 0 } else { 1 };\n\n\n\n let mut header = vec![flv_tag, avc_type];\n\n if let Err(error) = header.write_i24::<BigEndian>(composition_time_offset) {\n\n error!(\"Failed to write composition time offset: {error:?}\");\n\n return Err(());\n\n }\n\n\n\n let mut wrapped = BytesMut::new();\n\n wrapped.extend(header);\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/connection_handler.rs", "rank": 34, "score": 108142.81614280808 }, { "content": "fn get_number(parameters: &HashMap<String, Option<String>>, key: &str) -> Option<u32> {\n\n if let Some(outer) = parameters.get(key) {\n\n if let Some(inner) = outer {\n\n match inner.parse() {\n\n Ok(num) => return Some(num),\n\n Err(_) => warn!(\"Parameter {key} had a value of '{inner}', which is not a number\"),\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/encoders/video_x264.rs", "rank": 35, "score": 96887.2861356501 }, { "content": "fn get_number(parameters: &HashMap<String, Option<String>>, key: &str) -> Option<i32> {\n\n if let Some(outer) = parameters.get(key) {\n\n if let Some(inner) = outer {\n\n match inner.parse() {\n\n Ok(num) => return Some(num),\n\n Err(_) => warn!(\"Parameter {key} had a value of '{inner}', which is not a number\"),\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/encoders/audio_avenc_aac.rs", "rank": 36, "score": 95634.33655220989 }, { "content": "/// Function that makes it easy to create a gstreamer `Buffer` based on a set of bytes, an optional\n\n/// decoding timestamp, and an optional presentation timestamp.\n\npub fn set_gst_buffer(data: Bytes, dts: Option<Duration>, pts: Option<Duration>) -> Result<Buffer> {\n\n let mut buffer = Buffer::with_size(data.len())\n\n .with_context(|| format!(\"Could not create a buffer with size {}\", data.len()))?;\n\n\n\n {\n\n let buffer = buffer\n\n .get_mut()\n\n .with_context(|| \"Could not get mutable buffer\")?;\n\n\n\n if let Some(dts) = dts {\n\n buffer.set_dts(ClockTime::from_mseconds(dts.as_millis() as u64));\n\n }\n\n\n\n if let Some(pts) = pts {\n\n buffer.set_pts(ClockTime::from_mseconds(pts.as_millis() as u64));\n\n }\n\n\n\n let mut sample = buffer\n\n .map_writable()\n\n .with_context(|| \"Failed to map buffer to writable buffer map\")?;\n\n\n\n {\n\n let sample = sample.as_mut_slice();\n\n sample.copy_from_slice(&data);\n\n }\n\n }\n\n\n\n Ok(buffer)\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/utils.rs", "rank": 37, "score": 93610.74186368185 }, { "content": "fn unwrap_audio_from_flv(mut data: Bytes) -> UnwrappedAudio {\n\n if data.len() < 2 {\n\n return UnwrappedAudio {\n\n codec: AudioCodec::Unknown,\n\n is_sequence_header: false,\n\n data,\n\n };\n\n }\n\n\n\n let flv_tag = data.split_to(1);\n\n let packet_type = data.split_to(1);\n\n let is_sequence_header = packet_type[0] == 0;\n\n let codec = if flv_tag[0] & 0xa0 == 0xa0 {\n\n AudioCodec::Aac\n\n } else {\n\n AudioCodec::Unknown\n\n };\n\n\n\n UnwrappedAudio {\n\n codec,\n\n is_sequence_header,\n\n data,\n\n }\n\n}\n\n\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/connection_handler.rs", "rank": 38, "score": 90276.86480802303 }, { "content": "fn unwrap_video_from_flv(mut data: Bytes) -> UnwrappedVideo {\n\n if data.len() < 2 {\n\n return UnwrappedVideo {\n\n codec: VideoCodec::Unknown,\n\n is_keyframe: false,\n\n is_sequence_header: false,\n\n data,\n\n composition_time_in_ms: 0,\n\n };\n\n }\n\n\n\n let flv_tag = data.split_to(1);\n\n let avc_header = data.split_to(4);\n\n\n\n let is_sequence_header;\n\n let codec = if flv_tag[0] & 0x07 == 0x07 {\n\n is_sequence_header = avc_header[0] == 0x00;\n\n VideoCodec::H264\n\n } else {\n\n is_sequence_header = false;\n", "file_path": "mmids-core/src/endpoints/rtmp_server/actor/connection_handler.rs", "rank": 39, "score": 90276.86480802303 }, { "content": "struct Endpoints {\n\n rtmp: UnboundedSender<RtmpEndpointRequest>,\n\n ffmpeg: UnboundedSender<FfmpegEndpointRequest>,\n\n gst_transcoder: UnboundedSender<GstTranscoderRequest>,\n\n}\n\n\n\n#[tokio::main]\n\npub async fn main() {\n\n // Start logging\n\n let log_dir = get_log_directory();\n\n let mut app_log_path = PathBuf::from(log_dir.clone());\n\n app_log_path.push(\"application\");\n\n\n\n let log_level = match env::var(\"mmids_log\") {\n\n Ok(level) => match level.to_lowercase().as_str() {\n\n \"error\" => Level::ERROR,\n\n \"warn\" => Level::WARN,\n\n \"info\" => Level::INFO,\n\n \"debug\" => Level::DEBUG,\n\n \"trace\" => Level::TRACE,\n", "file_path": "mmids-app/src/main.rs", "rank": 40, "score": 75723.86785985346 }, { "content": "struct Actor {\n\n futures: FuturesUnordered<BoxFuture<'static, FutureResult>>,\n\n next_subscriber_id: Wrapping<usize>,\n\n active_subscriber_ids: HashSet<usize>,\n\n workflow_start_stop_subscribers: HashMap<usize, UnboundedSender<WorkflowStartedOrStoppedEvent>>,\n\n workflow_manager_subscribers: HashMap<usize, UnboundedSender<WorkflowManagerEvent>>,\n\n new_subscribers_can_join: bool,\n\n active_workflows: HashMap<String, UnboundedSender<WorkflowRequest>>,\n\n active_workflow_manager: Option<UnboundedSender<WorkflowManagerRequest>>,\n\n}\n\n\n\nimpl Actor {\n\n fn new(\n\n publish_receiver: UnboundedReceiver<PublishEventRequest>,\n\n subscribe_receiver: UnboundedReceiver<SubscriptionRequest>,\n\n ) -> Self {\n\n let futures = FuturesUnordered::new();\n\n futures.push(wait_for_publish_request(publish_receiver).boxed());\n\n futures.push(wait_for_subscription_request(subscribe_receiver).boxed());\n\n\n", "file_path": "mmids-core/src/event_hub.rs", "rank": 41, "score": 74662.83624596585 }, { "content": "struct Actor {\n\n executor_factory: ReactorExecutorFactory,\n\n event_hub_subscriber: UnboundedSender<SubscriptionRequest>,\n\n futures: FuturesUnordered<BoxFuture<'static, FutureResult>>,\n\n reactors: HashMap<String, UnboundedSender<ReactorRequest>>,\n\n}\n\n\n\nunsafe impl Send for Actor {}\n\n\n\nimpl Actor {\n\n fn new(\n\n executor_factory: ReactorExecutorFactory,\n\n receiver: UnboundedReceiver<ReactorManagerRequest>,\n\n event_hub_subscriber: UnboundedSender<SubscriptionRequest>,\n\n ) -> Self {\n\n let futures = FuturesUnordered::new();\n\n futures.push(wait_for_request(receiver).boxed());\n\n\n\n Actor {\n\n executor_factory,\n", "file_path": "mmids-core/src/reactors/manager.rs", "rank": 42, "score": 74662.83624596585 }, { "content": "struct ChildNode {\n\n name: String,\n\n arguments: HashMap<String, Option<String>>,\n\n}\n\n\n", "file_path": "mmids-core/src/config.rs", "rank": 43, "score": 74662.83624596585 }, { "content": "struct Actor {\n\n futures: FuturesUnordered<BoxFuture<'static, FutureResult>>,\n\n workflows: HashMap<String, UnboundedSender<WorkflowRequest>>,\n\n step_factory: Arc<WorkflowStepFactory>,\n\n event_hub_publisher: UnboundedSender<PublishEventRequest>,\n\n}\n\n\n\nimpl Actor {\n\n fn new(\n\n step_factory: Arc<WorkflowStepFactory>,\n\n event_hub_publisher: UnboundedSender<PublishEventRequest>,\n\n ) -> Self {\n\n Actor {\n\n futures: FuturesUnordered::new(),\n\n workflows: HashMap::new(),\n\n step_factory,\n\n event_hub_publisher,\n\n }\n\n }\n\n\n", "file_path": "mmids-core/src/workflows/manager.rs", "rank": 44, "score": 74662.83624596585 }, { "content": "struct Actor {\n\n name: String,\n\n executor: Box<dyn ReactorExecutor>,\n\n futures: FuturesUnordered<BoxFuture<'static, FutureResult>>,\n\n workflow_manager: Option<UnboundedSender<WorkflowManagerRequest>>,\n\n cached_workflows_for_stream_name: HashMap<String, CachedWorkflows>,\n\n update_interval: Duration,\n\n stream_response_channels: HashMap<String, Vec<UnboundedSender<ReactorWorkflowUpdate>>>,\n\n}\n\n\n\nunsafe impl Send for Actor {}\n\n\n\nimpl Actor {\n\n fn new(\n\n name: String,\n\n receiver: UnboundedReceiver<ReactorRequest>,\n\n executor: Box<dyn ReactorExecutor>,\n\n event_hub_subscriber: UnboundedSender<SubscriptionRequest>,\n\n update_interval: Duration,\n\n ) -> Self {\n", "file_path": "mmids-core/src/reactors/reactor.rs", "rank": 45, "score": 74662.83624596585 }, { "content": "fn start_workflows(\n\n config: &MmidsConfig,\n\n step_factory: Arc<WorkflowStepFactory>,\n\n event_hub_publisher: UnboundedSender<PublishEventRequest>,\n\n) -> UnboundedSender<WorkflowManagerRequest> {\n\n info!(\"Starting workflow manager\");\n\n let manager = start_workflow_manager(step_factory, event_hub_publisher);\n\n for (_, workflow) in &config.workflows {\n\n let _ = manager.send(WorkflowManagerRequest {\n\n request_id: \"mmids-app-startup\".to_string(),\n\n operation: WorkflowManagerRequestOperation::UpsertWorkflow {\n\n definition: workflow.clone(),\n\n },\n\n });\n\n }\n\n\n\n manager\n\n}\n\n\n", "file_path": "mmids-app/src/main.rs", "rank": 46, "score": 74046.22450473957 }, { "content": "fn read_reactor(\n\n config: &mut MmidsConfig,\n\n pairs: Pairs<Rule>,\n\n starting_line: usize,\n\n) -> Result<(), ConfigParseError> {\n\n let mut name = None;\n\n let mut parameters = HashMap::new();\n\n let mut executor_name = None;\n\n let mut update_interval = 0;\n\n\n\n for pair in pairs {\n\n match pair.as_rule() {\n\n Rule::argument => {\n\n let (key, value) = read_argument(pair.clone())?;\n\n if name.is_none() {\n\n // Name must come first and only have a key, no pair\n\n if value.is_some() {\n\n return Err(ConfigParseError::InvalidReactorName {\n\n line: get_line_number(&pair),\n\n name: pair.as_str().to_string(),\n", "file_path": "mmids-core/src/config.rs", "rank": 47, "score": 74046.22450473957 }, { "content": "fn register_steps(\n\n endpoints: Endpoints,\n\n subscription_sender: UnboundedSender<SubscriptionRequest>,\n\n reactor_manager: UnboundedSender<ReactorManagerRequest>,\n\n) -> Arc<WorkflowStepFactory> {\n\n info!(\"Starting workflow step factory, and adding known step types to it\");\n\n let mut step_factory = WorkflowStepFactory::new();\n\n step_factory\n\n .register(\n\n WorkflowStepType(RTMP_RECEIVE.to_string()),\n\n Box::new(RtmpReceiverStepGenerator::new(\n\n endpoints.rtmp.clone(),\n\n reactor_manager.clone(),\n\n )),\n\n )\n\n .expect(\"Failed to register rtmp_receive step\");\n\n\n\n step_factory\n\n .register(\n\n WorkflowStepType(RTMP_WATCH.to_string()),\n", "file_path": "mmids-app/src/main.rs", "rank": 48, "score": 74046.22450473957 }, { "content": "fn start_endpoints(\n\n config: &MmidsConfig,\n\n tls_options: Option<TlsOptions>,\n\n log_dir: String,\n\n) -> Endpoints {\n\n info!(\"Starting all endpoints\");\n\n\n\n let socket_manager = start_socket_manager(tls_options);\n\n let rtmp_endpoint = start_rtmp_server_endpoint(socket_manager);\n\n\n\n let ffmpeg_path = config\n\n .settings\n\n .get(\"ffmpeg_path\")\n\n .expect(\"No ffmpeg_path setting found\")\n\n .as_ref()\n\n .expect(\"no ffmpeg path specified\");\n\n\n\n let ffmpeg_endpoint = start_ffmpeg_endpoint(ffmpeg_path.to_string(), log_dir)\n\n .expect(\"Failed to start ffmpeg endpoint\");\n\n\n", "file_path": "mmids-app/src/main.rs", "rank": 49, "score": 74046.22450473957 }, { "content": "fn read_workflow(\n\n config: &mut MmidsConfig,\n\n pairs: Pairs<Rule>,\n\n starting_line: usize,\n\n) -> Result<(), ConfigParseError> {\n\n let mut steps = Vec::new();\n\n let mut workflow_name = None;\n\n let mut routed_by_reactor = false;\n\n for pair in pairs {\n\n match pair.as_rule() {\n\n Rule::child_node => {\n\n let child_node = read_child_node(pair)?;\n\n steps.push(WorkflowStepDefinition {\n\n step_type: WorkflowStepType(child_node.name),\n\n parameters: child_node.arguments,\n\n });\n\n }\n\n\n\n Rule::argument => {\n\n let (key, value) = read_argument(pair.clone())?;\n", "file_path": "mmids-core/src/config.rs", "rank": 50, "score": 74046.22450473957 }, { "content": "#[derive(Parser)]\n\n#[grammar = \"config.pest\"]\n\nstruct RawConfigParser;\n\n\n", "file_path": "mmids-core/src/config.rs", "rank": 51, "score": 73649.52353796184 }, { "content": "struct Actor {\n\n ffmpeg_exe_path: String,\n\n log_path: PathBuf,\n\n futures: FuturesUnordered<BoxFuture<'static, FutureResult>>,\n\n processes: HashMap<Uuid, FfmpegProcess>,\n\n}\n\n\n\nimpl Actor {\n\n fn new(ffmpeg_exe_path: String, log_root: String) -> Result<Self, FfmpegEndpointStartError> {\n\n let path = Path::new(ffmpeg_exe_path.as_str());\n\n if !path.is_file() {\n\n return Err(FfmpegEndpointStartError::FfmpegExecutableNotFound(\n\n ffmpeg_exe_path,\n\n ));\n\n }\n\n\n\n let mut path = PathBuf::from(log_root.as_str());\n\n if path.is_file() {\n\n // We expected the path to be a new or existing directory, not a file\n\n return Err(FfmpegEndpointStartError::LogDirectoryInvalidPath(log_root));\n", "file_path": "mmids-core/src/endpoints/ffmpeg/mod.rs", "rank": 52, "score": 73649.52353796184 }, { "content": "struct CachedWorkflows {\n\n definitions: Vec<WorkflowDefinition>,\n\n}\n\n\n", "file_path": "mmids-core/src/reactors/reactor.rs", "rank": 53, "score": 73649.52353796184 }, { "content": "#[derive(Deserialize)]\n\nstruct RequestContent {\n\n stream_name: String,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let addr = \"127.0.0.1:9055\".parse().unwrap();\n\n let make_service =\n\n make_service_fn(|_| async { Ok::<_, hyper::Error>(service_fn(get_response)) });\n\n\n\n let server = Server::bind(&addr).serve(make_service);\n\n\n\n println!(\"Listening on http://{}\", addr);\n\n\n\n if let Err(e) = server.await {\n\n eprintln!(\"server error: {}\", e);\n\n }\n\n}\n\n\n\nasync fn get_response(req: Request<Body>) -> Result<Response<Body>> {\n", "file_path": "reactor-test-server/src/main.rs", "rank": 54, "score": 73649.52353796184 }, { "content": "struct Actor {\n\n name: String,\n\n steps_by_definition_id: HashMap<u64, Box<dyn WorkflowStep>>,\n\n active_steps: Vec<u64>,\n\n pending_steps: Vec<u64>,\n\n futures: FuturesUnordered<BoxFuture<'static, FutureResult>>,\n\n step_inputs: StepInputs,\n\n step_outputs: StepOutputs,\n\n cached_step_media: HashMap<u64, HashMap<StreamId, Vec<MediaNotification>>>,\n\n cached_inbound_media: HashMap<StreamId, Vec<MediaNotification>>,\n\n active_streams: HashMap<StreamId, StreamDetails>,\n\n step_factory: Arc<WorkflowStepFactory>,\n\n step_definitions: HashMap<u64, WorkflowStepDefinition>,\n\n status: WorkflowStatus,\n\n}\n\n\n\nimpl Actor {\n\n #[instrument(skip(definition, step_factory, receiver), fields(workflow_name = %definition.name))]\n\n fn new(\n\n definition: &WorkflowDefinition,\n", "file_path": "mmids-core/src/workflows/runner/mod.rs", "rank": 55, "score": 73649.52353796184 }, { "content": "fn start_http_api(\n\n config: &MmidsConfig,\n\n manager: UnboundedSender<WorkflowManagerRequest>,\n\n) -> Option<Sender<HttpApiShutdownSignal>> {\n\n let port = match config.settings.get(\"http_api_port\") {\n\n Some(Some(value)) => match value.parse::<u16>() {\n\n Ok(port) => port,\n\n Err(_) => {\n\n panic!(\"http_api_port value of '{}' is not a valid number\", value);\n\n }\n\n },\n\n\n\n _ => {\n\n warn!(\"No `http_api_port` setting specified. HTTP api disabled\");\n\n return None;\n\n }\n\n };\n\n\n\n let mut routes = RoutingTable::new();\n\n routes\n", "file_path": "mmids-app/src/main.rs", "rank": 56, "score": 73035.08015742061 }, { "content": "struct RouteNode {\n\n leaf: Option<Route>,\n\n children: HashMap<SearchablePathPart, RouteNode>,\n\n}\n\n\n\nimpl RoutingTable {\n\n /// Creates an empty routing table\n\n pub fn new() -> Self {\n\n RoutingTable {\n\n routes: HashMap::new(),\n\n }\n\n }\n\n\n\n /// Registers a route to be available by the routing table\n\n pub fn register(&mut self, route: Route) -> Result<(), RouteRegistrationError> {\n\n let mut node = self\n\n .routes\n\n .entry(route.method.clone())\n\n .or_insert(RouteNode {\n\n leaf: None,\n", "file_path": "mmids-core/src/http_api/routing.rs", "rank": 57, "score": 72680.78136355546 }, { "content": "struct CodecInfo {\n\n codec: AudioCodec,\n\n sequence_header: Bytes,\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/encoders/audio_copy.rs", "rank": 58, "score": 72680.78136355546 }, { "content": "struct X264Encoder {\n\n source: AppSrc,\n\n}\n\n\n\nimpl X264Encoder {\n\n fn new(\n\n media_sender: UnboundedSender<MediaNotificationContent>,\n\n parameters: &HashMap<String, Option<String>>,\n\n pipeline: &Pipeline,\n\n ) -> Result<X264Encoder> {\n\n let height = get_number(&parameters, \"height\");\n\n let width = get_number(&parameters, \"width\");\n\n let preset = parameters.get(\"preset\").unwrap_or(&None);\n\n let fps = get_number(&parameters, \"fps\");\n\n let bitrate = get_number(&parameters, \"bitrate\");\n\n\n\n let appsrc = create_gst_element(\"appsrc\")?;\n\n let queue = create_gst_element(\"queue\")?;\n\n let decoder = create_gst_element(\"decodebin\")?;\n\n let scale = create_gst_element(\"videoscale\")?;\n", "file_path": "mmids-gstreamer/src/encoders/video_x264.rs", "rank": 59, "score": 72680.78136355546 }, { "content": "struct FfmpegProcess {\n\n handle: Child,\n\n notification_channel: UnboundedSender<FfmpegEndpointNotification>,\n\n}\n\n\n", "file_path": "mmids-core/src/endpoints/ffmpeg/mod.rs", "rank": 60, "score": 72680.78136355546 }, { "content": "struct CodecInfo {\n\n codec: VideoCodec,\n\n sequence_header: Bytes,\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/encoders/video_copy.rs", "rank": 61, "score": 72680.78136355546 }, { "content": "struct StreamDetails {\n\n /// The step that first sent a new stream media notification. We know that if this step is\n\n /// removed, the stream no longer has a source of video and should be considered disconnected\n\n originating_step_id: u64,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/runner/mod.rs", "rank": 62, "score": 72680.78136355546 }, { "content": "fn sample_received(\n\n sink: &AppSink,\n\n codec_data_sent: &mut bool,\n\n output_parser: &Element,\n\n media_sender: UnboundedSender<MediaNotificationContent>,\n\n) -> Result<()> {\n\n if !*codec_data_sent {\n\n // Pull the codec_data/sequence header out from the output parser\n\n let codec_data = get_codec_data_from_element(&output_parser)?;\n\n\n\n let _ = media_sender.send(MediaNotificationContent::Video {\n\n codec: VideoCodec::H264,\n\n timestamp: VideoTimestamp::from_zero(),\n\n is_sequence_header: true,\n\n is_keyframe: false,\n\n data: codec_data,\n\n });\n\n\n\n *codec_data_sent = true;\n\n }\n", "file_path": "mmids-gstreamer/src/encoders/video_x264.rs", "rank": 63, "score": 72068.41096840988 }, { "content": "struct VideoCopyEncoder {\n\n source: AppSrc,\n\n codec_data: Arc<Mutex<Option<CodecInfo>>>,\n\n}\n\n\n\nimpl VideoCopyEncoder {\n\n fn new(\n\n media_sender: UnboundedSender<MediaNotificationContent>,\n\n pipeline: &Pipeline,\n\n ) -> Result<VideoCopyEncoder> {\n\n // While we won't be mutating the stream, we want to pass it through a gstreamer pipeline\n\n // so the packets will be synchronized with audio in case of transcoding delay.\n\n\n\n let appsrc = create_gst_element(\"appsrc\")?;\n\n let queue = create_gst_element(\"queue\")?;\n\n let appsink = create_gst_element(\"appsink\")?;\n\n\n\n pipeline\n\n .add_many(&[&appsrc, &queue, &appsink])\n\n .with_context(|| \"Failed to add video copy encoder's elements to the pipeline\")?;\n", "file_path": "mmids-gstreamer/src/encoders/video_copy.rs", "rank": 64, "score": 71753.73235259898 }, { "content": "struct VideoDropEncoder {}\n\n\n\nimpl VideoEncoder for VideoDropEncoder {\n\n fn push_data(\n\n &self,\n\n _codec: VideoCodec,\n\n _data: Bytes,\n\n _timestamp: VideoTimestamp,\n\n _is_sequence_header: bool,\n\n ) -> anyhow::Result<()> {\n\n // Do nothing since we want to drop the video stream\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "mmids-gstreamer/src/encoders/video_drop.rs", "rank": 65, "score": 71753.73235259898 }, { "content": "struct ActiveTranscode {\n\n media_sender: UnboundedSender<MediaNotificationContent>,\n\n transcode_process_id: Uuid,\n\n stream_name: String,\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/steps/basic_transcoder/mod.rs", "rank": 66, "score": 71753.73235259898 }, { "content": "#[cfg(test)]\n\nstruct StepTestContext {\n\n step: Box<dyn WorkflowStep>,\n\n futures: FuturesUnordered<BoxFuture<'static, Box<dyn StepFutureResult>>>,\n\n media_outputs: Vec<MediaNotification>,\n\n}\n\n\n\n#[cfg(test)]\n\nimpl StepTestContext {\n\n fn new(generator: Box<dyn StepGenerator>, definition: WorkflowStepDefinition) -> Result<Self> {\n\n let (step, futures) = generator\n\n .generate(definition)\n\n .or_else(|error| Err(anyhow!(\"Failed to generate workflow step: {:?}\", error)))?;\n\n\n\n Ok(StepTestContext {\n\n step,\n\n futures: FuturesUnordered::from_iter(futures),\n\n media_outputs: Vec::new(),\n\n })\n\n }\n\n\n", "file_path": "mmids-core/src/workflows/steps/mod.rs", "rank": 67, "score": 71753.73235259898 }, { "content": "struct AudioDropEncoder {}\n\n\n\nimpl AudioEncoder for AudioDropEncoder {\n\n fn push_data(\n\n &self,\n\n _codec: AudioCodec,\n\n _data: Bytes,\n\n _timestamp: Duration,\n\n _is_sequence_header: bool,\n\n ) -> Result<()> {\n\n // Do nothing with the data since we are dropping the audio stream\n\n Ok(())\n\n }\n\n}\n", "file_path": "mmids-gstreamer/src/encoders/audio_drop.rs", "rank": 68, "score": 71753.73235259898 }, { "content": "struct SocketManager {\n\n open_ports: HashMap<u16, OpenPort>,\n\n futures: FuturesUnordered<BoxFuture<'static, SocketManagerFutureResult>>,\n\n}\n\n\n\nimpl SocketManager {\n\n fn new() -> Self {\n\n SocketManager {\n\n open_ports: HashMap::new(),\n\n futures: FuturesUnordered::new(),\n\n }\n\n }\n\n\n\n async fn run(\n\n mut self,\n\n request_receiver: UnboundedReceiver<TcpSocketRequest>,\n\n tls_options: Option<TlsOptions>,\n\n ) {\n\n info!(\"Starting TCP socket manager\");\n\n let tls_options = Arc::new(tls_options);\n", "file_path": "mmids-core/src/net/tcp/socket_manager.rs", "rank": 69, "score": 71753.73235259898 }, { "content": "struct AudioCopyEncoder {\n\n source: AppSrc,\n\n codec_data: Arc<Mutex<Option<CodecInfo>>>,\n\n}\n\n\n\nimpl AudioCopyEncoder {\n\n fn new(\n\n media_sender: UnboundedSender<MediaNotificationContent>,\n\n pipeline: &Pipeline,\n\n ) -> Result<AudioCopyEncoder> {\n\n // While we won't be mutating the stream, we want to pass it through a gstreamer pipeline\n\n // so the packets will be synchronized with possibly transcoded video delay.\n\n\n\n let appsrc = create_gst_element(\"appsrc\")?;\n\n let queue = create_gst_element(\"queue\")?;\n\n let appsink = create_gst_element(\"appsink\")?;\n\n\n\n pipeline\n\n .add_many(&[&appsrc, &queue, &appsink])\n\n .with_context(|| \"Failed to add audio copy encoder's elements to the pipeline\")?;\n", "file_path": "mmids-gstreamer/src/encoders/audio_copy.rs", "rank": 70, "score": 71753.73235259898 }, { "content": "struct EndpointActor {\n\n futures: FuturesUnordered<BoxFuture<'static, EndpointFuturesResult>>,\n\n active_transcodes: HashMap<Uuid, ActiveTranscode>,\n\n encoder_factory: Arc<EncoderFactory>,\n\n}\n\n\n\nunsafe impl Send for EndpointActor {}\n\nunsafe impl Sync for EndpointActor {}\n\n\n\nimpl EndpointActor {\n\n fn new(\n\n receiver: UnboundedReceiver<GstTranscoderRequest>,\n\n encoder_factory: Arc<EncoderFactory>,\n\n ) -> Result<EndpointActor, EndpointStartError> {\n\n (*GSTREAMER_INIT_RESULT).as_ref()?;\n\n\n\n let futures = FuturesUnordered::new();\n\n futures.push(endpoint_futures::wait_for_request(receiver).boxed());\n\n\n\n Ok(EndpointActor {\n", "file_path": "mmids-gstreamer/src/endpoints/gst_transcoder/mod.rs", "rank": 71, "score": 71753.73235259898 }, { "content": "struct ActiveTranscode {\n\n sender: UnboundedSender<TranscodeManagerRequest>,\n\n notification_channel: UnboundedSender<GstTranscoderNotification>,\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/endpoints/gst_transcoder/mod.rs", "rank": 72, "score": 71753.73235259898 }, { "content": "fn sample_received(\n\n sink: &AppSink,\n\n codec_data_sent: &mut bool,\n\n output_parser: &Element,\n\n media_sender: UnboundedSender<MediaNotificationContent>,\n\n) -> Result<()> {\n\n if !*codec_data_sent {\n\n // Pull the codec_data out of the output parser to get the sequence header\n\n let codec_data = get_codec_data_from_element(&output_parser)?;\n\n let _ = media_sender.send(MediaNotificationContent::Audio {\n\n codec: AudioCodec::Aac,\n\n timestamp: Duration::from_millis(0),\n\n is_sequence_header: true,\n\n data: codec_data,\n\n });\n\n\n\n *codec_data_sent = true;\n\n }\n\n\n\n let sample = SampleResult::from_sink(sink).with_context(|| \"Failed to get aac sample\")?;\n", "file_path": "mmids-gstreamer/src/encoders/audio_avenc_aac.rs", "rank": 73, "score": 71143.34572476688 }, { "content": "struct StreamWatchers {\n\n // Use an unbounded channel for this instead of a one shot, as we risk losing the cancellation\n\n // channel when a reactor update comes through. We can work around this by recreating the\n\n // cancellation token each time, but it's easier to just use an `UnboundedSender` instead.\n\n _reactor_cancel_channel: Option<UnboundedSender<()>>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/rtmp_watch/mod.rs", "rank": 74, "score": 70865.7415925577 }, { "content": "struct GstError {\n\n source_name: String,\n\n error_description: String,\n\n debug_info: Option<String>,\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/endpoints/gst_transcoder/transcoding_manager.rs", "rank": 75, "score": 70865.7415925577 }, { "content": "struct ActiveStream {\n\n id: StreamId,\n\n stream_name: String,\n\n pending_media: VecDeque<MediaNotificationContent>,\n\n rtmp_output_status: WatchRegistrationStatus,\n\n external_stream_handler: Box<dyn ExternalStreamHandler + Sync + Send>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/external_stream_reader.rs", "rank": 76, "score": 70865.7415925577 }, { "content": "struct TestOutputStep {\n\n status: StepStatus,\n\n definition: WorkflowStepDefinition,\n\n media: UnboundedSender<MediaNotification>,\n\n}\n\n\n\nimpl StepFutureResult for InputFutureResult {}\n", "file_path": "mmids-core/src/workflows/runner/test_steps.rs", "rank": 77, "score": 70865.7415925577 }, { "content": "struct BasicTranscodeStep {\n\n definition: WorkflowStepDefinition,\n\n status: StepStatus,\n\n transcoder_endpoint: UnboundedSender<GstTranscoderRequest>,\n\n active_transcodes: HashMap<StreamId, ActiveTranscode>,\n\n video_encoder_name: String,\n\n audio_encoder_name: String,\n\n video_parameters: HashMap<String, Option<String>>,\n\n audio_parameters: HashMap<String, Option<String>>,\n\n}\n\n\n", "file_path": "mmids-gstreamer/src/steps/basic_transcoder/mod.rs", "rank": 78, "score": 70865.7415925577 }, { "content": "struct FfmpegTranscoder {\n\n definition: WorkflowStepDefinition,\n\n ffmpeg_endpoint: UnboundedSender<FfmpegEndpointRequest>,\n\n rtmp_server_endpoint: UnboundedSender<RtmpEndpointRequest>,\n\n video_codec_params: VideoTranscodeParams,\n\n audio_codec_params: AudioTranscodeParams,\n\n video_scale_params: Option<VideoScale>,\n\n bitrate: Option<u16>,\n\n active_streams: HashMap<StreamId, ActiveStream>,\n\n status: StepStatus,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/ffmpeg_transcode/mod.rs", "rank": 79, "score": 70865.7415925577 }, { "content": "struct DefinitionBuilder {\n\n port: Option<u16>,\n\n app: Option<String>,\n\n key: Option<String>,\n\n reactor: Option<String>,\n\n}\n\n\n\nimpl DefinitionBuilder {\n\n fn new() -> Self {\n\n DefinitionBuilder {\n\n port: None,\n\n app: None,\n\n key: None,\n\n reactor: None,\n\n }\n\n }\n\n\n\n fn port(mut self, port: u16) -> Self {\n\n self.port = Some(port);\n\n self\n", "file_path": "mmids-core/src/workflows/steps/rtmp_watch/tests.rs", "rank": 80, "score": 70865.7415925577 }, { "content": "#[derive(Serialize)]\n\nstruct RequestContent {\n\n stream_name: String,\n\n}\n\n\n\nimpl ReactorExecutorGenerator for SimpleHttpExecutorGenerator {\n\n fn generate(\n\n &self,\n\n parameters: &HashMap<String, Option<String>>,\n\n ) -> Result<Box<dyn ReactorExecutor>, Box<dyn Error + Sync + Send>> {\n\n let url = match parameters.get(\"url\") {\n\n Some(Some(url)) => url.trim().to_string(),\n\n _ => return Err(Box::new(SimpleHttpExecutorError::UrlParameterNotProvided)),\n\n };\n\n\n\n Ok(Box::new(SimpleHttpExecutor { url }))\n\n }\n\n}\n\n\n\n#[instrument]\n\nasync fn execute_simple_http_executor(url: String, stream_name: String) -> ReactorExecutionResult {\n\n info!(\"Querying {} for workflow for stream '{}'\", url, stream_name);\n\n let mut config = match execute_with_retry(&url, &stream_name, 0).await {\n\n Ok(config) => config,\n\n Err(_) => return ReactorExecutionResult::invalid(),\n\n };\n\n\n\n let workflows = config.workflows.drain().map(|kvp| kvp.1).collect();\n\n ReactorExecutionResult::valid(workflows)\n\n}\n\n\n", "file_path": "mmids-core/src/reactors/executors/simple_http_executor.rs", "rank": 81, "score": 70865.7415925577 }, { "content": "struct ActiveStream {\n\n id: StreamId,\n\n stream_name: String,\n\n pending_media: VecDeque<MediaNotificationContent>,\n\n rtmp_output_status: WatchRegistrationStatus,\n\n rtmp_input_status: PublishRegistrationStatus,\n\n ffmpeg_status: FfmpegStatus,\n\n ffmpeg_id: Uuid,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/ffmpeg_transcode/mod.rs", "rank": 82, "score": 70865.7415925577 }, { "content": "struct ParamGenerator {\n\n rtmp_app: String,\n\n path: String,\n\n segment_duration: u16,\n\n segment_count: u16,\n\n stream_name: Option<String>,\n\n}\n\n\n\nimpl FfmpegHlsStepGenerator {\n\n pub fn new(\n\n rtmp_endpoint: UnboundedSender<RtmpEndpointRequest>,\n\n ffmpeg_endpoint: UnboundedSender<FfmpegEndpointRequest>,\n\n ) -> Self {\n\n FfmpegHlsStepGenerator {\n\n rtmp_endpoint,\n\n ffmpeg_endpoint,\n\n }\n\n }\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/ffmpeg_hls/mod.rs", "rank": 83, "score": 70865.7415925577 }, { "content": "struct TestContext {\n\n step_context: StepTestContext,\n\n rtmp_endpoint: UnboundedReceiver<RtmpEndpointRequest>,\n\n reactor_manager: UnboundedReceiver<ReactorManagerRequest>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/rtmp_watch/tests.rs", "rank": 84, "score": 70865.7415925577 }, { "content": "struct StreamDetails {\n\n target_workflow_names: HashSet<String>,\n\n required_media: Vec<MediaNotification>,\n\n\n\n // Used to cancel the reactor update future. When a stream disconnects, this cancellation\n\n // channel will be dropped causing the future waiting for reactor updates to be closed. This\n\n // will inform the reactor that this step is no longer interested in whatever workflow it was\n\n // managing for it. Not using a one shot, as the channel needs to live across multiple futures\n\n // if updates come in.\n\n _cancellation_channel: Option<UnboundedSender<()>>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/workflow_forwarder/mod.rs", "rank": 85, "score": 70865.7415925577 }, { "content": "struct DefinitionBuilder {\n\n port: Option<u16>,\n\n app: Option<String>,\n\n key: Option<String>,\n\n reactor: Option<String>,\n\n}\n\n\n\nimpl DefinitionBuilder {\n\n fn new() -> Self {\n\n DefinitionBuilder {\n\n port: None,\n\n app: None,\n\n key: None,\n\n reactor: None,\n\n }\n\n }\n\n\n\n fn port(mut self, port: u16) -> Self {\n\n self.port = Some(port);\n\n self\n", "file_path": "mmids-core/src/workflows/steps/rtmp_receive/tests.rs", "rank": 86, "score": 70865.7415925577 }, { "content": "struct TestContext {\n\n reactor_manager: UnboundedReceiver<ReactorManagerRequest>,\n\n _event_hub: UnboundedReceiver<SubscriptionRequest>,\n\n step_context: StepTestContext,\n\n workflow_sender: UnboundedSender<WorkflowRequest>,\n\n workflow_receiver: UnboundedReceiver<WorkflowRequest>,\n\n workflow_event_channel: UnboundedSender<WorkflowStartedOrStoppedEvent>,\n\n}\n\n\n\nimpl TestContext {\n\n async fn new(specific_workflow: Option<&str>, reactor: Option<&str>) -> Result<Self> {\n\n if specific_workflow.is_some() && reactor.is_some() {\n\n return Err(anyhow!(\n\n \"Both workflow and reactor names specified. Only one should be\"\n\n ));\n\n }\n\n\n\n if specific_workflow.is_none() && reactor.is_none() {\n\n return Err(anyhow!(\n\n \"Neither workflow or reactor name specified. One must be\"\n", "file_path": "mmids-core/src/workflows/steps/workflow_forwarder/tests.rs", "rank": 87, "score": 70865.7415925577 }, { "content": "struct TestInputStep {\n\n status: StepStatus,\n\n definition: WorkflowStepDefinition,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/runner/test_steps.rs", "rank": 88, "score": 70865.7415925577 }, { "content": "struct DefinitionBuilder {\n\n vcodec: Option<String>,\n\n acodec: Option<String>,\n\n h264_preset: Option<String>,\n\n size: Option<String>,\n\n bitrate: Option<u16>,\n\n}\n\n\n\nimpl DefinitionBuilder {\n\n fn new() -> Self {\n\n DefinitionBuilder {\n\n vcodec: None,\n\n acodec: None,\n\n h264_preset: None,\n\n size: None,\n\n bitrate: None,\n\n }\n\n }\n\n\n\n fn vcodec(mut self, vcodec: &str) -> Self {\n", "file_path": "mmids-core/src/workflows/steps/ffmpeg_transcode/tests.rs", "rank": 89, "score": 70865.7415925577 }, { "content": "struct TestContext {\n\n step_context: StepTestContext,\n\n rtmp_endpoint: UnboundedReceiver<RtmpEndpointRequest>,\n\n ffmpeg_endpoint: UnboundedReceiver<FfmpegEndpointRequest>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/ffmpeg_transcode/tests.rs", "rank": 90, "score": 70865.7415925577 }, { "content": "struct TranscodeManager {\n\n termination_requested: bool,\n\n id: Uuid,\n\n futures: FuturesUnordered<BoxFuture<'static, TranscoderFutureResult>>,\n\n video_encoder: Box<dyn VideoEncoder>,\n\n audio_encoder: Box<dyn AudioEncoder>,\n\n pipeline: Pipeline,\n\n}\n\n\n\nunsafe impl Send for TranscodeManager {}\n\nunsafe impl Sync for TranscodeManager {}\n\n\n\nimpl TranscodeManager {\n\n fn new(\n\n parameters: TranscoderParams,\n\n receiver: UnboundedReceiver<TranscodeManagerRequest>,\n\n ) -> TranscodeManager {\n\n let futures = FuturesUnordered::new();\n\n futures.push(wait_for_request(receiver).boxed());\n\n futures.push(notify_on_outbound_media_closed(parameters.outbound_media).boxed());\n", "file_path": "mmids-gstreamer/src/endpoints/gst_transcoder/transcoding_manager.rs", "rank": 91, "score": 70865.7415925577 }, { "content": "struct TestContext {\n\n step_context: StepTestContext,\n\n rtmp_endpoint: UnboundedReceiver<RtmpEndpointRequest>,\n\n reactor_manager: UnboundedReceiver<ReactorManagerRequest>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/rtmp_receive/tests.rs", "rank": 92, "score": 70865.7415925577 }, { "content": "struct AvencAacEncoder {\n\n source: AppSrc,\n\n}\n\n\n\nimpl AvencAacEncoder {\n\n fn new(\n\n media_sender: UnboundedSender<MediaNotificationContent>,\n\n parameters: &HashMap<String, Option<String>>,\n\n pipeline: &Pipeline,\n\n ) -> Result<AvencAacEncoder> {\n\n let bitrate = get_number(parameters, \"bitrate\");\n\n\n\n let appsrc = create_gst_element(\"appsrc\")?;\n\n let queue = create_gst_element(\"queue\")?;\n\n let decodebin = create_gst_element(\"decodebin\")?;\n\n let convert = create_gst_element(\"audioconvert\")?;\n\n let encoder = create_gst_element(\"avenc_aac\")?;\n\n let output_parser = create_gst_element(\"aacparse\")?;\n\n let appsink = create_gst_element(\"appsink\")?;\n\n\n", "file_path": "mmids-gstreamer/src/encoders/audio_avenc_aac.rs", "rank": 93, "score": 70865.7415925577 }, { "content": "struct WorkflowForwarderStep {\n\n global_workflow_name: Option<String>,\n\n reactor_name: Option<String>,\n\n reactor_manager: UnboundedSender<ReactorManagerRequest>,\n\n definition: WorkflowStepDefinition,\n\n status: StepStatus,\n\n active_streams: HashMap<StreamId, StreamDetails>,\n\n stream_for_workflow_name: HashMap<String, HashSet<StreamId>>,\n\n known_workflows: HashMap<String, UnboundedSender<WorkflowRequest>>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/workflow_forwarder/mod.rs", "rank": 94, "score": 70014.39161744676 }, { "content": "struct FfmpegHlsStep {\n\n definition: WorkflowStepDefinition,\n\n status: StepStatus,\n\n stream_reader: ExternalStreamReader,\n\n path: String,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/ffmpeg_hls/mod.rs", "rank": 95, "score": 70014.39161744676 }, { "content": "struct ParamGenerator {\n\n rtmp_app: String,\n\n target: String,\n\n}\n\n\n\nimpl FfmpegRtmpPushStepGenerator {\n\n pub fn new(\n\n rtmp_endpoint: UnboundedSender<RtmpEndpointRequest>,\n\n ffmpeg_endpoint: UnboundedSender<FfmpegEndpointRequest>,\n\n ) -> Self {\n\n FfmpegRtmpPushStepGenerator {\n\n rtmp_endpoint,\n\n ffmpeg_endpoint,\n\n }\n\n }\n\n}\n\n\n\nimpl StepGenerator for FfmpegRtmpPushStepGenerator {\n\n fn generate(&self, definition: WorkflowStepDefinition) -> StepCreationResult {\n\n let target = match definition.parameters.get(TARGET) {\n", "file_path": "mmids-core/src/workflows/steps/ffmpeg_rtmp_push/mod.rs", "rank": 96, "score": 70014.39161744676 }, { "content": "struct FfmpegPullStep {\n\n definition: WorkflowStepDefinition,\n\n ffmpeg_endpoint: UnboundedSender<FfmpegEndpointRequest>,\n\n rtmp_endpoint: UnboundedSender<RtmpEndpointRequest>,\n\n status: StepStatus,\n\n rtmp_app: String,\n\n pull_location: String,\n\n stream_name: String,\n\n ffmpeg_id: Option<Uuid>,\n\n active_stream_id: Option<StreamId>,\n\n}\n\n\n", "file_path": "mmids-core/src/workflows/steps/ffmpeg_pull/mod.rs", "rank": 97, "score": 70014.39161744676 }, { "content": "struct RtmpWatchStep {\n\n definition: WorkflowStepDefinition,\n\n port: u16,\n\n rtmp_app: String,\n\n stream_key: StreamKeyRegistration,\n\n reactor_name: Option<String>,\n\n status: StepStatus,\n\n rtmp_endpoint_sender: UnboundedSender<RtmpEndpointRequest>,\n\n reactor_manager: UnboundedSender<ReactorManagerRequest>,\n\n media_channel: UnboundedSender<RtmpEndpointMediaMessage>,\n\n stream_id_to_name_map: HashMap<StreamId, String>,\n\n stream_watchers: HashMap<String, StreamWatchers>,\n\n}\n\n\n\nimpl StepFutureResult for RtmpWatchStepFutureResult {}\n\n\n", "file_path": "mmids-core/src/workflows/steps/rtmp_watch/mod.rs", "rank": 98, "score": 70014.39161744676 }, { "content": "struct RtmpReceiverStep {\n\n definition: WorkflowStepDefinition,\n\n rtmp_endpoint_sender: UnboundedSender<RtmpEndpointRequest>,\n\n reactor_manager: UnboundedSender<ReactorManagerRequest>,\n\n port: u16,\n\n rtmp_app: String,\n\n stream_key: StreamKeyRegistration,\n\n status: StepStatus,\n\n connection_details: HashMap<ConnectionId, ConnectionDetails>,\n\n reactor_name: Option<String>,\n\n}\n\n\n\nimpl StepFutureResult for FutureResult {}\n\n\n", "file_path": "mmids-core/src/workflows/steps/rtmp_receive/mod.rs", "rank": 99, "score": 70014.39161744676 } ]
Rust
rafx-framework/src/visibility/visibility_object_arc.rs
aclysma/renderer_prototype
a274b82c873c0ec7f9d6c3376cd054bfccfe3895
use crate::render_features::RenderObjectHandle; use crate::visibility::visibility_object_allocator::VisibilityObjectId; use crate::visibility::ObjectId; use crossbeam_channel::Sender; use glam::{Quat, Vec3}; use rafx_visibility::geometry::Transform; use rafx_visibility::{ AsyncCommand, ModelHandle, PolygonSoup, VisibilityObjectHandle, VisibleBounds, ZoneHandle, }; use slotmap::Key; use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::{Arc, Weak}; pub enum CullModel { Mesh(PolygonSoup), VisibleBounds(VisibleBounds), Sphere(f32), Quad(f32, f32), None, } impl CullModel { pub fn mesh(polygon_soup: PolygonSoup) -> CullModel { CullModel::Mesh(polygon_soup) } pub fn visible_bounds(model: VisibleBounds) -> CullModel { CullModel::VisibleBounds(model) } pub fn sphere(radius: f32) -> CullModel { CullModel::Sphere(radius) } pub fn quad( width: f32, height: f32, ) -> CullModel { CullModel::Quad(width, height) } pub fn none() -> CullModel { CullModel::None } } pub struct VisibilityObjectArcInner { object: VisibilityObjectRaii, visibility_object_id: AtomicU64, drop_tx: Sender<VisibilityObjectId>, } impl Drop for VisibilityObjectArcInner { fn drop(&mut self) { let _ = self .drop_tx .send(VisibilityObjectId::from(slotmap::KeyData::from_ffi( self.visibility_object_id.load(Ordering::Relaxed), ))); } } pub struct VisibilityObjectWeakArcInner { inner: Weak<VisibilityObjectArcInner>, } impl VisibilityObjectWeakArcInner { pub fn upgrade(&self) -> Option<VisibilityObjectArc> { self.inner .upgrade() .map(|inner| VisibilityObjectArc { inner }) } } #[derive(Clone)] pub struct VisibilityObjectArc { inner: Arc<VisibilityObjectArcInner>, } impl VisibilityObjectArc { pub(crate) fn new( object: VisibilityObjectRaii, drop_tx: Sender<VisibilityObjectId>, ) -> Self { Self { inner: Arc::new(VisibilityObjectArcInner { object, visibility_object_id: AtomicU64::default(), drop_tx, }), } } pub fn downgrade(&self) -> VisibilityObjectWeakArcInner { VisibilityObjectWeakArcInner { inner: Arc::downgrade(&self.inner), } } pub(super) fn set_visibility_object_id( &self, visibility_object_id: VisibilityObjectId, ) { self.inner .visibility_object_id .store(visibility_object_id.data().as_ffi(), Ordering::Relaxed); } #[allow(dead_code)] pub(super) fn set_zone( &self, zone: Option<ZoneHandle>, ) -> &Self { self.inner.object.set_zone(zone); self } pub fn object_id(&self) -> ObjectId { self.inner.object.object_id() } pub fn visibility_object_handle(&self) -> VisibilityObjectHandle { self.inner.object.handle } pub fn render_objects(&self) -> &[RenderObjectHandle] { &self.inner.object.render_objects() } pub fn set_cull_model( &self, cull_model: Option<ModelHandle>, ) -> &Self { self.inner.object.set_cull_model(cull_model); self } pub fn set_transform( &self, translation: Vec3, rotation: Quat, scale: Vec3, ) -> &Self { self.inner .object .set_transform(translation, rotation, scale); self } } pub struct VisibilityObjectRaii { commands: Sender<AsyncCommand>, handle: VisibilityObjectHandle, object_id: ObjectId, render_objects: Vec<RenderObjectHandle>, } impl Drop for VisibilityObjectRaii { fn drop(&mut self) { let _ = self.commands.send(AsyncCommand::DestroyObject(self.handle)); } } impl VisibilityObjectRaii { pub fn new( object_id: ObjectId, render_objects: Vec<RenderObjectHandle>, handle: VisibilityObjectHandle, commands: Sender<AsyncCommand>, ) -> Self { Self { commands, handle, object_id, render_objects, } } #[allow(dead_code)] pub(super) fn set_zone( &self, zone: Option<ZoneHandle>, ) -> &Self { self.commands .send(AsyncCommand::SetObjectZone(self.handle, zone)) .expect("Unable to send SetObjectZone command."); self } pub fn object_id(&self) -> ObjectId { self.object_id } pub fn render_objects(&self) -> &[RenderObjectHandle] { &self.render_objects } pub fn set_cull_model( &self, cull_model: Option<ModelHandle>, ) -> &Self { self.commands .send(AsyncCommand::SetObjectCullModel(self.handle, cull_model)) .expect("Unable to send SetObjectCullModel command."); self } pub fn set_transform( &self, translation: Vec3, rotation: Quat, scale: Vec3, ) -> &Self { self.commands .send(AsyncCommand::SetObjectTransform( self.handle, Transform { translation, rotation, scale, }, )) .expect("Unable to send SetObjectPosition command."); self } }
use crate::render_features::RenderObjectHandle; use crate::visibility::visibility_object_allocator::VisibilityObjectId; use crate::visibility::ObjectId; use crossbeam_channel::Sender; use glam::{Quat, Vec3}; use rafx_visibility::geometry::Transform; use rafx_visibility::{ AsyncCommand, ModelHandle, PolygonSoup, VisibilityObjectHandle, VisibleBounds, ZoneHandle, }; use slotmap::Key; use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::{Arc, Weak}; pub enum CullModel { Mesh(PolygonSoup), VisibleBounds(VisibleBounds), Sphere(f32), Quad(f32, f32), None, } impl CullModel { pub fn mesh(polygon_soup: PolygonSoup) -> CullModel { CullModel::Mesh(polygon_soup) } pub fn visible_bounds(model: VisibleBounds) -> CullModel { CullModel::VisibleBounds(model) } pub fn sphere(radius: f32) -> CullModel { CullModel::Sphere(radius) } pub fn quad( width: f32, height: f32, ) -> CullModel { CullModel::Quad(width, height) } pub fn none() -> CullModel { CullModel::None } } pub struct VisibilityObjectArcInner { object: VisibilityObjectRaii, visibility_object_id: AtomicU64, drop_tx: Sender<VisibilityObjectId>, } impl Drop for VisibilityObjectArcInner { fn drop(&mut self) { let _ = self .drop_tx .send(VisibilityObjectId::from(slotmap::KeyData::from_ffi( self.visibility_object_id.load(Ordering::Relaxed), ))); } } pub struct VisibilityObjectWeakArcInner { inner: Weak<VisibilityObjectArcInner>, } impl VisibilityObjectWeakArcInner { pub fn upgrade(&self) -> Option<VisibilityObjectArc> { self.inner .upgrade() .map(|inner| VisibilityObjectArc { inner }) } } #[derive(Clone)] pub struct VisibilityObjectArc { inner: Arc<VisibilityObjectArcInner>, } impl VisibilityObjectArc { pub(crate) fn new( object: VisibilityObjectRaii, drop_tx: Sender<VisibilityObjectId>, ) -> Self { Self { inner: Arc::new(VisibilityObjectArcInner { object, visibility_object_id: AtomicU64::default(), drop_tx, }), } } pub fn downgrade(&self) -> VisibilityObjectWeakArcInner { VisibilityObjectWeakArcInner { inner: Arc::downgrade(&self.inner), } } pub(super) fn set_visibility_object_id( &self, visibility_object_id: VisibilityObjectId, ) { self.inner .visibility_object_id .store(visibility_object_id.data().as_ffi(), Ordering::Relaxed); } #[allow(dead_code)] pub(super) fn set_zone( &self, zone: Option<ZoneHandle>, ) -> &Self { self.inner.object.set_zone(zone); self } pub fn object_id(&self) -> ObjectId { self.inner.object.object_id() } pub fn visibility_object_handle(&self) -> VisibilityObjectHandle { self.inner.object.handle } pub fn render_objects(&self) -> &[RenderObjectHandle] { &self.inner.object.render_objects() } pub fn set_cull_model( &self, cull_model: Option<ModelHandle>, ) -> &Self { self.inner.object.set_cull_model(cull_model); self } pub fn set_transform( &self, translation: Vec3, rotation: Quat, scale: Vec3, ) -> &Self { self.inner .object .set_transform(translation, rotation, scale); self } } pub struct VisibilityObjectRaii { commands: Sender<AsyncCommand>, handle: VisibilityObjectHandle, object_id: ObjectId, render_objects: Vec<RenderObjectHandle>, } impl Drop for VisibilityObjectRaii { fn drop(&mut self) { let _ = self.commands.send(AsyncCommand::DestroyObject(self.handle)); } } impl VisibilityObjectRaii {
#[allow(dead_code)] pub(super) fn set_zone( &self, zone: Option<ZoneHandle>, ) -> &Self { self.commands .send(AsyncCommand::SetObjectZone(self.handle, zone)) .expect("Unable to send SetObjectZone command."); self } pub fn object_id(&self) -> ObjectId { self.object_id } pub fn render_objects(&self) -> &[RenderObjectHandle] { &self.render_objects } pub fn set_cull_model( &self, cull_model: Option<ModelHandle>, ) -> &Self { self.commands .send(AsyncCommand::SetObjectCullModel(self.handle, cull_model)) .expect("Unable to send SetObjectCullModel command."); self } pub fn set_transform( &self, translation: Vec3, rotation: Quat, scale: Vec3, ) -> &Self { self.commands .send(AsyncCommand::SetObjectTransform( self.handle, Transform { translation, rotation, scale, }, )) .expect("Unable to send SetObjectPosition command."); self } }
pub fn new( object_id: ObjectId, render_objects: Vec<RenderObjectHandle>, handle: VisibilityObjectHandle, commands: Sender<AsyncCommand>, ) -> Self { Self { commands, handle, object_id, render_objects, } }
function_block-full_function
[ { "content": "/// Call when winit sends an event\n\npub fn handle_sdl2_event(\n\n event: &Event,\n\n input_state: &mut InputState,\n\n) {\n\n let _is_close_requested = false;\n\n\n\n match event {\n\n Event::KeyDown {\n\n keycode, repeat: _, ..\n\n } => handle_keyboard_event(input_state, keycode, minimum_input::ButtonState::Pressed),\n\n Event::KeyUp {\n\n keycode, repeat: _, ..\n\n } => handle_keyboard_event(input_state, keycode, minimum_input::ButtonState::Released),\n\n Event::MouseButtonDown { mouse_btn, .. } => {\n\n handle_mouse_button_event(input_state, mouse_btn, minimum_input::ButtonState::Pressed)\n\n }\n\n Event::MouseButtonUp { mouse_btn, .. } => {\n\n handle_mouse_button_event(input_state, mouse_btn, minimum_input::ButtonState::Released)\n\n }\n\n Event::MouseMotion { x, y, .. } => {\n", "file_path": "demo/src/input/input_sdl2.rs", "rank": 0, "score": 222048.84282992175 }, { "content": "fn random_color(rng: &mut impl Rng) -> Vec3 {\n\n let r = rng.gen_range(0.2..1.0);\n\n let g = rng.gen_range(0.2..1.0);\n\n let b = rng.gen_range(0.2..1.0);\n\n let v = Vec3::new(r, g, b);\n\n v.normalize()\n\n}\n\n\n", "file_path": "demo/src/scenes/mod.rs", "rank": 1, "score": 216730.90462114778 }, { "content": "/// Call when winit sends an event\n\npub fn handle_winit_event<T>(\n\n event: &winit::event::Event<T>,\n\n input_state: &mut super::InputState,\n\n) {\n\n use winit::event::Event;\n\n use winit::event::WindowEvent;\n\n\n\n let _is_close_requested = false;\n\n\n\n match event {\n\n //Process keyboard input\n\n Event::WindowEvent {\n\n event: WindowEvent::KeyboardInput { input, .. },\n\n ..\n\n } => {\n\n log::trace!(\"keyboard input {:?}\", input);\n\n if let Some(vk) = input.virtual_keycode {\n\n input_state\n\n .handle_keyboard_event(vk.into(), WinitElementState::new(input.state).into());\n\n }\n", "file_path": "demo/src/input/input_winit.rs", "rank": 2, "score": 211692.6496598368 }, { "content": "// Internally represents a VkCommandPool with automatic lifetime/reuse management\n\nstruct DynCommandPoolInner {\n\n command_pool: RafxCommandPool,\n\n command_pool_meta: CommandPoolMeta,\n\n allocated_command_buffers: Vec<DynCommandBuffer>,\n\n submits_in_frame_index: u64,\n\n\n\n // Just a debugging aid\n\n pool_id: u64,\n\n}\n\n\n\nimpl DynCommandPoolInner {\n\n fn reset_command_pool(&mut self) -> RafxResult<()> {\n\n for command_buffer in &self.allocated_command_buffers {\n\n command_buffer.return_to_pool()?;\n\n }\n\n\n\n self.allocated_command_buffers.clear();\n\n self.command_pool.reset_command_pool()\n\n }\n\n}\n", "file_path": "rafx-framework/src/resources/dyn_commands.rs", "rank": 3, "score": 206067.13784108136 }, { "content": "struct DynCommandPoolAllocatorInner {\n\n // Command pools that are ready to use but have no recorded commands\n\n unused_pools: FnvHashMap<CommandPoolMeta, Vec<DynCommandPoolInner>>,\n\n\n\n // Command pools that are in use and have a frame that we know they will be submitted in\n\n pending_pools: FnvHashMap<PendingCommandPoolMeta, Vec<DynCommandPoolInner>>,\n\n\n\n // submitted pools\n\n // TODO: Would be less allocations if this was a static array of vecs\n\n submitted_pools: BTreeMap<u64, Vec<DynCommandPoolInner>>,\n\n\n\n max_frames_in_flight: u64,\n\n current_frame_index: u64,\n\n\n\n drop_tx: Sender<DynCommandPoolInner>,\n\n drop_rx: Receiver<DynCommandPoolInner>,\n\n\n\n // Just a debugging aid\n\n next_pool_id: u64,\n\n}\n", "file_path": "rafx-framework/src/resources/dyn_commands.rs", "rank": 4, "score": 203098.0877628072 }, { "content": "pub fn new_gpu_image_data_from_image_asset_data_subresources(\n\n width: u32,\n\n height: u32,\n\n format: RafxFormat,\n\n subresources: ImageAssetDataPayloadSubresources,\n\n) -> GpuImageData {\n\n #[cfg(debug_assertions)]\n\n {\n\n debug_assert_eq!(width, subresources.layers[0].mip_levels[0].width);\n\n debug_assert_eq!(height, subresources.layers[0].mip_levels[0].height);\n\n for i in 1..subresources.layers.len() {\n\n let layers = &subresources.layers;\n\n let layer_0 = &layers[0];\n\n debug_assert_eq!(layer_0.mip_levels.len(), layers[i].mip_levels.len());\n\n for j in 1..layer_0.mip_levels.len() {\n\n debug_assert_eq!(layer_0.mip_levels[j].width, layers[i].mip_levels[j].width);\n\n debug_assert_eq!(layer_0.mip_levels[j].height, layers[i].mip_levels[j].height);\n\n debug_assert_eq!(layer_0.mip_levels[j].width, layers[i].mip_levels[j].width);\n\n }\n\n }\n", "file_path": "rafx-assets/src/assets/image/asset_upload_queue.rs", "rank": 5, "score": 199022.4914346828 }, { "content": "//\n\n// Static functions\n\n//\n\npub fn handle_load_result<AssetT: Clone>(\n\n load_op: AssetLoadOp,\n\n loaded_asset: RafxResult<AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n result_tx: Sender<AssetT>,\n\n) {\n\n match loaded_asset {\n\n Ok(loaded_asset) => {\n\n asset_lookup.set_uncommitted(load_op.load_handle(), loaded_asset.clone());\n\n result_tx.send(loaded_asset).unwrap();\n\n load_op.complete()\n\n }\n\n Err(err) => {\n\n load_op.error(err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 6, "score": 194123.22371307225 }, { "content": "pub fn default_daemon() -> distill::daemon::AssetDaemon {\n\n use crate::assets::*;\n\n\n\n // This demonstrates using filenames to hint default settings for images on import for normal\n\n // maps and roughness/metalness maps by using filenames. Otherwise, the user has to remember to\n\n // edit the .meta file.\n\n let pbr_map_suffix = vec![\"_pbr.\"];\n\n let normal_map_suffix = vec![\"_n.\"];\n\n\n\n // Default config\n\n let mut image_importer_config = ImageImporterConfig::new(ImageImporterRuleOptions {\n\n mip_generation: ImageAssetMipGeneration::Runtime,\n\n color_space: ImageAssetColorSpaceConfig::Srgb,\n\n data_format: ImageAssetDataFormatConfig::Uncompressed,\n\n });\n\n\n\n for suffix in normal_map_suffix {\n\n // Override for normal maps\n\n image_importer_config.add_filename_contains_override(\n\n suffix,\n", "file_path": "rafx-assets/src/distill_impl/mod.rs", "rank": 7, "score": 192299.90456674382 }, { "content": "pub fn handle_free_requests<AssetDataT, AssetT>(\n\n load_queues: &mut LoadQueues<AssetDataT, AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n) {\n\n for request in load_queues.take_free_requests() {\n\n log::trace!(\n\n \"free asset {:?} {}\",\n\n request.load_handle,\n\n core::any::type_name::<AssetDataT>()\n\n );\n\n asset_lookup.free(request.load_handle);\n\n }\n\n}\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 8, "score": 188820.05443603816 }, { "content": "pub fn handle_commit_requests<AssetDataT, AssetT>(\n\n load_queues: &mut LoadQueues<AssetDataT, AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n) {\n\n for request in load_queues.take_commit_requests() {\n\n log::trace!(\n\n \"commit asset {:?} {}\",\n\n request.load_handle,\n\n core::any::type_name::<AssetDataT>()\n\n );\n\n asset_lookup.commit(request.load_handle);\n\n }\n\n}\n\n\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 9, "score": 188820.05443603816 }, { "content": "fn resolve_load_handle<T: AssetHandle>(\n\n handle: &T,\n\n indirection_table: &IndirectionTable,\n\n) -> Option<LoadHandle> {\n\n if handle.load_handle().is_indirect() {\n\n indirection_table.resolve(handle.load_handle())\n\n } else {\n\n Some(handle.load_handle())\n\n }\n\n}\n\n\n\nimpl<AssetT: TypeUuid + Send> Storage<AssetT> {\n\n fn new(\n\n sender: Sender<RefOp>,\n\n loader: Box<dyn DynAssetLoader<AssetT>>,\n\n indirection_table: IndirectionTable,\n\n ) -> Self {\n\n Self {\n\n refop_sender: sender,\n\n assets: HashMap::new(),\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 10, "score": 188508.1841794553 }, { "content": "pub fn rendering_init(\n\n resources: &mut Resources,\n\n asset_source: AssetSource,\n\n window: &dyn HasRawWindowHandle,\n\n window_width: u32,\n\n window_height: u32,\n\n) -> RafxResult<()> {\n\n resources.insert(VisibilityResource::new());\n\n resources.insert(ViewportsResource::default());\n\n\n\n #[cfg(feature = \"basic-pipeline\")]\n\n let mesh_renderer_plugin = Arc::new(MeshBasicRendererPlugin::new(Some(32)));\n\n #[cfg(not(feature = \"basic-pipeline\"))]\n\n let mesh_renderer_plugin = Arc::new(MeshAdvRendererPlugin::new(Some(32)));\n\n let sprite_renderer_plugin = Arc::new(SpriteRendererPlugin::default());\n\n let skybox_renderer_plugin = Arc::new(SkyboxRendererPlugin::default());\n\n let tile_layer_renderer_plugin = Arc::new(TileLayerRendererPlugin::default());\n\n let debug3d_renderer_plugin = Arc::new(Debug3DRendererPlugin::default());\n\n let debug_pip_renderer_plugin = Arc::new(DebugPipRendererPlugin::default());\n\n let text_renderer_plugin = Arc::new(TextRendererPlugin::default());\n", "file_path": "demo/src/init.rs", "rank": 11, "score": 176223.09056968248 }, { "content": "pub fn logging_init() {\n\n #[cfg(not(debug_assertions))]\n\n let log_level = log::LevelFilter::Info;\n\n #[cfg(debug_assertions)]\n\n let log_level = log::LevelFilter::Debug;\n\n\n\n // Setup logging\n\n env_logger::Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .filter_module(\n\n \"rafx_assets::resources::descriptor_sets\",\n\n log::LevelFilter::Info,\n\n )\n\n .filter_module(\"rafx_framework::nodes\", log::LevelFilter::Info)\n\n .filter_module(\"rafx_framework::visibility\", log::LevelFilter::Info)\n\n .filter_module(\"rafx_framework::graph\", log::LevelFilter::Debug)\n\n .filter_module(\"rafx_framework::resources\", log::LevelFilter::Debug)\n\n .filter_module(\"rafx_framework::graph::graph_plan\", log::LevelFilter::Info)\n\n .filter_module(\"rafx_api\", log::LevelFilter::Debug)\n\n .filter_module(\"rafx_framework\", log::LevelFilter::Debug)\n", "file_path": "demo/src/main.rs", "rank": 12, "score": 176223.09056968248 }, { "content": "pub fn update() {}\n", "file_path": "demo-web/src/lib.rs", "rank": 13, "score": 176223.09056968248 }, { "content": "pub fn update_loop(\n\n args: &DemoArgs,\n\n window: winit::window::Window,\n\n event_loop: winit::event_loop::EventLoop<()>,\n\n) -> RafxResult<()> {\n\n log::debug!(\"calling init\");\n\n let mut app = DemoApp::init(args, &window).unwrap();\n\n\n\n log::debug!(\"start update loop\");\n\n event_loop.run(move |event, _, control_flow| {\n\n use winit::event::Event;\n\n match event {\n\n Event::MainEventsCleared => {\n\n window.request_redraw();\n\n }\n\n Event::RedrawRequested(_) => {\n\n *control_flow = app.update(&window).unwrap();\n\n }\n\n event @ _ => {\n\n if !app.process_input(&event, &window) {\n\n *control_flow = ControlFlow::Exit;\n\n }\n\n }\n\n }\n\n });\n\n}\n", "file_path": "demo/src/lib.rs", "rank": 14, "score": 176223.09056968248 }, { "content": "pub fn update_loop(\n\n window: winit::window::Window,\n\n event_loop: winit::event_loop::EventLoop<()>,\n\n) -> RafxResult<()> {\n\n //\n\n // Create the api\n\n //\n\n log::trace!(\"Creating the API\");\n\n let api = unsafe { RafxApi::new(&window, &Default::default())? };\n\n\n\n // Wrap all of this so that it gets dropped before we drop the API object. This ensures a nice\n\n // clean shutdown.\n\n {\n\n // A cloneable device handle, these are lightweight and can be passed across threads\n\n let device_context = api.device_context();\n\n\n\n //\n\n // Create a swapchain\n\n //\n\n log::trace!(\"Creating swapchain\");\n", "file_path": "demo-web/src/lib.rs", "rank": 15, "score": 173720.70534580262 }, { "content": "#[cfg(not(feature = \"basic-pipeline\"))]\n\npub fn draw_taa_options(\n\n ui: &mut egui::Ui,\n\n render_options: &mut RenderOptions,\n\n) {\n\n let taa_options = &mut render_options.taa_options;\n\n ui.checkbox(\n\n &mut taa_options.enable_side_by_side_debug_view,\n\n \"side_by_side_debug_view\",\n\n );\n\n ui.add(\n\n egui::Slider::new(&mut taa_options.forward_pass_mip_bias, -5.0..=5.0)\n\n .text(\"forward_pass_mip_bias\"),\n\n );\n\n\n\n ui.add(\n\n egui::Slider::new(&mut taa_options.jitter_multiplier, 0.0..=3.0).text(\"jitter_multiplier\"),\n\n );\n\n ui.add(egui::Slider::new(&mut taa_options.history_weight, 0.0..=1.0).text(\"history_weight\"));\n\n ui.add(\n\n egui::Slider::new(\n", "file_path": "demo/src/demo_ui.rs", "rank": 16, "score": 171326.7896683852 }, { "content": "// Texture must be in COPY_SRC state\n\n// After this call, it will be in COPY_DST state\n\n// Vulkan requires this on a graphics queue. Metal allows this on any queue.\n\npub fn generate_mipmaps(\n\n command_buffer: &RafxCommandBuffer,\n\n _texture: &RafxTexture,\n\n) -> RafxResult<()> {\n\n match command_buffer {\n\n #[cfg(feature = \"rafx-vulkan\")]\n\n RafxCommandBuffer::Vk(inner) => generate_mipmaps_vk(inner, _texture),\n\n #[cfg(feature = \"rafx-metal\")]\n\n RafxCommandBuffer::Metal(inner) => generate_mipmaps_metal(inner, _texture),\n\n #[cfg(feature = \"rafx-gles2\")]\n\n RafxCommandBuffer::Gles2(inner) => generate_mipmaps_gles2(inner, _texture),\n\n #[cfg(feature = \"rafx-gles3\")]\n\n RafxCommandBuffer::Gles3(inner) => generate_mipmaps_gles3(inner, _texture),\n\n #[cfg(any(\n\n feature = \"rafx-empty\",\n\n not(any(\n\n feature = \"rafx-metal\",\n\n feature = \"rafx-vulkan\",\n\n feature = \"rafx-gles2\",\n\n feature = \"rafx-gles3\"\n\n ))\n\n ))]\n\n RafxCommandBuffer::Empty(_) => unimplemented!(),\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 17, "score": 171326.7896683852 }, { "content": "pub fn main_native() {\n\n // Setup logging\n\n env_logger::Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .filter_level(log::LevelFilter::Debug)\n\n .init();\n\n\n\n let event_loop = EventLoop::new();\n\n let window = WindowBuilder::new()\n\n .with_title(\"Rafx Web Demo\")\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n crate::update_loop(window, event_loop).unwrap();\n\n}\n", "file_path": "demo-web/src/main_native.rs", "rank": 18, "score": 171326.7896683852 }, { "content": "pub fn calculate_window_hash(window: &dyn HasRawWindowHandle) -> WindowHash {\n\n let mut hasher = FnvHasher::default();\n\n window.raw_window_handle().hash(&mut hasher);\n\n WindowHash(hasher.finish())\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub struct WindowHash(u64);\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct BufferId(pub u32);\n\npub const NONE_BUFFER: BufferId = BufferId(gles2_bindings::NONE);\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct TextureId(pub u32);\n\npub const NONE_TEXTURE: TextureId = TextureId(gles2_bindings::NONE);\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct FramebufferId(pub u32);\n\npub const NONE_FRAMEBUFFER: FramebufferId = FramebufferId(gles2_bindings::NONE);\n", "file_path": "rafx-api/src/backends/gles2/internal/gl_context/mod.rs", "rank": 19, "score": 170855.06955639855 }, { "content": "pub fn calculate_window_hash(window: &dyn HasRawWindowHandle) -> WindowHash {\n\n let mut hasher = FnvHasher::default();\n\n window.raw_window_handle().hash(&mut hasher);\n\n WindowHash(hasher.finish())\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub struct WindowHash(u64);\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct BufferId(pub u32);\n\npub const NONE_BUFFER: BufferId = BufferId(gles3_bindings::NONE);\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct TextureId(pub u32);\n\npub const NONE_TEXTURE: TextureId = TextureId(gles3_bindings::NONE);\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct FramebufferId(pub u32);\n\npub const NONE_FRAMEBUFFER: FramebufferId = FramebufferId(gles3_bindings::NONE);\n", "file_path": "rafx-api/src/backends/gles3/internal/gl_context/mod.rs", "rank": 20, "score": 170855.06955639855 }, { "content": "pub fn round_size_up_to_alignment_u32(\n\n size: u32,\n\n required_alignment: u32,\n\n) -> u32 {\n\n assert!(required_alignment > 0);\n\n ((size + required_alignment - 1) / required_alignment) * required_alignment\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 21, "score": 169034.44049208728 }, { "content": "pub fn round_size_up_to_alignment_u64(\n\n size: u64,\n\n required_alignment: u64,\n\n) -> u64 {\n\n assert!(required_alignment > 0);\n\n ((size + required_alignment - 1) / required_alignment) * required_alignment\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 22, "score": 169034.44049208728 }, { "content": "pub fn round_size_up_to_alignment_usize(\n\n size: usize,\n\n required_alignment: usize,\n\n) -> usize {\n\n assert!(required_alignment > 0);\n\n ((size + required_alignment - 1) / required_alignment) * required_alignment\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 23, "score": 169034.44049208728 }, { "content": "// This function is a little more complex to use than enqueue_load_images but can support cubemaps\n\n// We create a layer for each layer_image_assignment, and copy from the decoded_image\n\n// at the index matching the assignment\n\npub fn enqueue_load_image(\n\n device_context: &RafxDeviceContext,\n\n upload: &mut RafxTransferUpload,\n\n image_data: &GpuImageData,\n\n params: ImageUploadParams,\n\n) -> Result<RafxTexture, RafxUploadError> {\n\n // All images must have identical mip level count, sizes, etc.\n\n #[cfg(debug_assertions)]\n\n image_data.verify_state();\n\n\n\n //\n\n // Determine the total amount of data we need to upload and verify there is enough space\n\n //\n\n let bytes_required = image_data.total_size(IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT as u64);\n\n\n\n let has_space_available = upload.has_space_available(\n\n bytes_required as usize,\n\n IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT as usize,\n\n 1,\n\n );\n", "file_path": "rafx-framework/src/upload/image_upload.rs", "rank": 24, "score": 166842.94793799036 }, { "content": "pub fn jitter_amount(\n\n frame_index: usize,\n\n pattern: JitterPattern,\n\n viewport_size: glam::Vec2,\n\n) -> glam::Vec2 {\n\n let jitter_amount = match pattern {\n\n JitterPattern::SobolOwen16 => SOBOL_OWEN_JITTER_SAMPLES[frame_index % 16],\n\n JitterPattern::SobolOwen64 => SOBOL_OWEN_JITTER_SAMPLES[frame_index % 64],\n\n JitterPattern::Halton => HALTON_JITTER_SAMPLES[frame_index % HALTON_JITTER_SAMPLES.len()],\n\n JitterPattern::QuadJitterTest => {\n\n QUAD_JITTER_TEST_SAMPLES[frame_index % QUAD_JITTER_TEST_SAMPLES.len()]\n\n }\n\n JitterPattern::MAX => unimplemented!(),\n\n };\n\n\n\n return (jitter_amount * glam::Vec2::splat(2.0) - glam::Vec2::ONE) / viewport_size;\n\n}\n", "file_path": "rafx-plugins/src/pipelines/modern/internal/jitter.rs", "rank": 25, "score": 166837.3283539851 }, { "content": "pub fn default_main_view_masks(\n\n render_options: &RenderOptions\n\n) -> (\n\n RenderPhaseMaskBuilder,\n\n RenderFeatureMaskBuilder,\n\n RenderFeatureFlagMaskBuilder,\n\n) {\n\n let phase_mask_builder = RenderPhaseMaskBuilder::default()\n\n .add_render_phase::<DepthPrepassRenderPhase>()\n\n .add_render_phase::<OpaqueRenderPhase>()\n\n .add_render_phase::<TransparentRenderPhase>()\n\n .add_render_phase::<WireframeRenderPhase>()\n\n .add_render_phase::<DebugPipRenderPhase>()\n\n .add_render_phase::<UiRenderPhase>();\n\n\n\n //use rafx_plugins::features::debug_pip::DebugPipRenderFeature;\n\n let mut feature_mask_builder = RenderFeatureMaskBuilder::default()\n\n .add_render_feature::<MeshRenderFeature>()\n\n .add_render_feature::<SpriteRenderFeature>()\n\n //.add_render_feature::<DebugPipRenderFeature>()\n", "file_path": "demo/src/scenes/util/mod.rs", "rank": 26, "score": 166837.3283539851 }, { "content": "pub fn add_light_debug_draw(\n\n resources: &Resources,\n\n world: &World,\n\n) {\n\n let mut debug_draw = resources.get_mut::<Debug3DResource>().unwrap();\n\n\n\n let mut query = <Read<DirectionalLightComponent>>::query();\n\n for light in query.iter(world) {\n\n let light_from = light.direction * -10.0;\n\n let light_to = glam::Vec3::ZERO;\n\n\n\n debug_draw.add_line(light_from, light_to, light.color);\n\n }\n\n\n\n let mut query = <(Read<TransformComponent>, Read<PointLightComponent>)>::query();\n\n for (transform, light) in query.iter(world) {\n\n debug_draw.add_sphere(transform.translation, 0.1, light.color, 12);\n\n debug_draw.add_sphere(transform.translation, light.range(), light.color, 12);\n\n }\n\n\n", "file_path": "demo/src/scenes/util/mod.rs", "rank": 27, "score": 166837.3283539851 }, { "content": "pub fn enqueue_load_buffer(\n\n device_context: &RafxDeviceContext,\n\n upload: &mut RafxTransferUpload,\n\n // transfer_queue_family_index: u32,\n\n // dst_queue_family_index: u32,\n\n resource_type: RafxResourceType,\n\n data: &[u8],\n\n dst_buffer: Option<&RafxBuffer>,\n\n dst_byte_offset: u64,\n\n //TODO: params?\n\n) -> Result<Option<RafxBuffer>, RafxUploadError> {\n\n // Arbitrary, not sure if there is any requirement\n\n const REQUIRED_ALIGNMENT: usize = 16;\n\n\n\n // Push data into the staging buffer\n\n let src_byte_offset = upload.push(data, REQUIRED_ALIGNMENT)?;\n\n let size = data.len() as u64;\n\n\n\n // Allocate a GPU buffer\n\n let mut new_buffer = None;\n", "file_path": "rafx-framework/src/upload/buffer_upload.rs", "rank": 28, "score": 166837.3283539851 }, { "content": "pub fn load_image_blocking(\n\n device_context: &RafxDeviceContext,\n\n transfer_queue: &RafxQueue,\n\n dst_queue: &RafxQueue,\n\n upload_buffer_max_size: u64,\n\n image_data: &GpuImageData,\n\n params: ImageUploadParams,\n\n) -> Result<RafxTexture, RafxUploadError> {\n\n let total_size = image_data.total_size(IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT);\n\n if upload_buffer_max_size < total_size {\n\n Err(RafxUploadError::BufferFull)?;\n\n }\n\n\n\n let mut upload = RafxTransferUpload::new(\n\n device_context,\n\n transfer_queue,\n\n dst_queue,\n\n upload_buffer_max_size,\n\n None,\n\n )?;\n\n\n\n let texture = enqueue_load_image(device_context, &mut upload, image_data, params)?;\n\n\n\n upload.block_until_upload_complete()?;\n\n\n\n Ok(texture)\n\n}\n", "file_path": "rafx-framework/src/upload/image_upload.rs", "rank": 29, "score": 166837.3283539851 }, { "content": "pub fn parse_shader_source_recursive(\n\n file_to_process: &FileToProcess,\n\n declarations: &mut Vec<DeclarationText>,\n\n included_files: &mut FnvHashSet<PathBuf>,\n\n) -> Result<(), String> {\n\n log::trace!(\"parse_shader_source_recursive {:?}\", file_to_process);\n\n let resolved_include = super::include_impl(\n\n &file_to_process.path,\n\n file_to_process.include_type,\n\n &file_to_process.requested_from,\n\n file_to_process.include_depth,\n\n )?;\n\n\n\n if included_files.contains(&resolved_include.resolved_path) {\n\n return Ok(());\n\n }\n\n\n\n included_files.insert(resolved_include.resolved_path.clone());\n\n\n\n let mut resolved_file_paths = file_to_process.clone();\n", "file_path": "rafx-shader-processor/src/parse_source.rs", "rank": 30, "score": 164729.63901134641 }, { "content": "pub fn do_find_supported_format(\n\n instance: &ash::Instance,\n\n physical_device: vk::PhysicalDevice,\n\n candidates: &[RafxFormat],\n\n image_tiling: vk::ImageTiling,\n\n features: vk::FormatFeatureFlags,\n\n) -> Option<RafxFormat> {\n\n for &candidate in candidates {\n\n let props = unsafe {\n\n instance.get_physical_device_format_properties(physical_device, candidate.into())\n\n };\n\n\n\n let is_supported = match image_tiling {\n\n vk::ImageTiling::LINEAR => (props.linear_tiling_features & features) == features,\n\n vk::ImageTiling::OPTIMAL => (props.optimal_tiling_features & features) == features,\n\n _ => unimplemented!(),\n\n };\n\n\n\n if is_supported {\n\n return Some(candidate);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "rafx-api/src/backends/vulkan/device_context.rs", "rank": 31, "score": 164729.63901134641 }, { "content": "pub fn set_colorspace(\n\n layer: &MetalLayerRef,\n\n colorspace: &core_graphics::color_space::CGColorSpaceRef,\n\n) {\n\n unsafe { msg_send![layer, setColorspace: colorspace] }\n\n}\n\n\n\npub struct NSWindowWrapper(StrongPtr);\n\n\n\nimpl NSWindowWrapper {\n\n pub fn new(window: *mut Object) -> Self {\n\n unsafe {\n\n assert!(!window.is_null());\n\n let class = class!(NSWindow);\n\n let is_actually_window: BOOL = msg_send![window, isKindOfClass: class];\n\n assert_eq!(is_actually_window, YES);\n\n\n\n let ptr = StrongPtr::new(window);\n\n StrongPtr::retain(window);\n\n NSWindowWrapper(ptr)\n", "file_path": "rafx-api/src/backends/metal/internal/extra_ffi.rs", "rank": 32, "score": 164729.63901134641 }, { "content": "pub fn create_font_texture_with_ranges(\n\n font_data: &[u8],\n\n character_ranges_to_include: &[(u32, u32)],\n\n size: f32,\n\n margin: u32,\n\n) -> Option<FontTextureWithMeta> {\n\n // let character_ranges_to_include = vec![\n\n // (32, 128),\n\n // //(0x4e00, 0x5FCC)\n\n // ];\n\n\n\n let mut characters_to_include = vec![];\n\n\n\n //\n\n // Iterate codepoints in the font and find the characters within the given ranges\n\n //\n\n let face = ttf_parser::Face::from_slice(font_data, 0).unwrap();\n\n\n\n for subtable in face.character_mapping_subtables() {\n\n subtable.codepoints(|codepoint| {\n", "file_path": "rafx-plugins/src/assets/font/font_cooking.rs", "rank": 33, "score": 162706.0220633342 }, { "content": "pub fn blend_state_to_create_info(\n\n blend_state: &RafxBlendState,\n\n color_attachment_count: usize,\n\n) -> RafxBlendStateVkCreateInfo {\n\n let mut blend_attachments_states = vec![];\n\n\n\n blend_state.verify(color_attachment_count);\n\n\n\n if let Some(first_attachment) = blend_state.render_target_blend_states.first() {\n\n for attachment_index in 0..color_attachment_count {\n\n let attachment_state = if blend_state\n\n .render_target_mask\n\n .intersects(RafxBlendStateTargets::from_bits(1 << attachment_index).unwrap())\n\n {\n\n if blend_state.independent_blend {\n\n blend_state_render_target_to_create_info(\n\n &blend_state.render_target_blend_states[attachment_index],\n\n )\n\n } else {\n\n blend_state_render_target_to_create_info(first_attachment)\n", "file_path": "rafx-api/src/backends/vulkan/internal/util.rs", "rank": 34, "score": 162706.0220633342 }, { "content": "/// Creates a right-handed perspective projection matrix with [0,1] depth range.\n\npub fn perspective_rh(\n\n fov_y_radians: f32,\n\n aspect_ratio: f32,\n\n z_near: f32,\n\n z_far: f32,\n\n) -> glam::Mat4 {\n\n debug_assert!(z_near > 0.0 && z_far > 0.0);\n\n let (sin_fov, cos_fov) = (0.5 * fov_y_radians).sin_cos();\n\n let h = cos_fov / sin_fov;\n\n let w = h / aspect_ratio;\n\n let r = z_far / (z_near - z_far);\n\n glam::Mat4::from_cols(\n\n glam::Vec4::new(w, 0.0, 0.0, 0.0),\n\n glam::Vec4::new(0.0, h, 0.0, 0.0),\n\n glam::Vec4::new(0.0, 0.0, r, -1.0),\n\n glam::Vec4::new(0.0, 0.0, r * z_near, 0.0),\n\n )\n\n}\n\n\n", "file_path": "rafx-plugins/src/features/mesh_basic/shadow_map_resource.rs", "rank": 35, "score": 162706.0220633342 }, { "content": "pub fn resource_type_to_descriptor_type(\n\n resource_type: RafxResourceType\n\n) -> Option<vk::DescriptorType> {\n\n match resource_type {\n\n RafxResourceType::SAMPLER => Some(vk::DescriptorType::SAMPLER),\n\n RafxResourceType::TEXTURE => Some(vk::DescriptorType::SAMPLED_IMAGE),\n\n RafxResourceType::UNIFORM_BUFFER => Some(vk::DescriptorType::UNIFORM_BUFFER),\n\n RafxResourceType::TEXTURE_READ_WRITE => Some(vk::DescriptorType::STORAGE_IMAGE),\n\n RafxResourceType::BUFFER => Some(vk::DescriptorType::STORAGE_BUFFER),\n\n RafxResourceType::BUFFER_READ_WRITE => Some(vk::DescriptorType::STORAGE_BUFFER),\n\n RafxResourceType::INPUT_ATTACHMENT => Some(vk::DescriptorType::INPUT_ATTACHMENT),\n\n RafxResourceType::TEXEL_BUFFER => Some(vk::DescriptorType::UNIFORM_TEXEL_BUFFER),\n\n RafxResourceType::TEXEL_BUFFER_READ_WRITE => Some(vk::DescriptorType::STORAGE_TEXEL_BUFFER),\n\n RafxResourceType::COMBINED_IMAGE_SAMPLER => {\n\n Some(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/vulkan/internal/util.rs", "rank": 36, "score": 162706.0220633342 }, { "content": "/// The max number of mip levels an image can have given its size\n\npub fn mip_level_max_count_for_image_size(\n\n width: u32,\n\n height: u32,\n\n) -> u32 {\n\n let max_dimension = std::cmp::max(width, height);\n\n (max_dimension as f32).log2().floor() as u32 + 1\n\n}\n\n\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 37, "score": 162706.0220633342 }, { "content": "pub fn gl_get_string(\n\n gles3: &Gles3,\n\n pname: u32,\n\n) -> String {\n\n unsafe {\n\n let str = gles3.GetString(pname);\n\n if str.is_null() {\n\n return \"\".to_string();\n\n }\n\n\n\n std::ffi::CStr::from_ptr(str as _)\n\n .to_str()\n\n .unwrap()\n\n .to_string()\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles3/internal/gl_context/native.rs", "rank": 38, "score": 160761.5456006836 }, { "content": "pub fn get_descriptor_set_element_write(\n\n descriptor_set_layout: &ResourceArc<DescriptorSetLayoutResource>,\n\n key: &DescriptorSetElementKey,\n\n) -> Option<DescriptorSetElementWrite> {\n\n for binding in &descriptor_set_layout\n\n .get_raw()\n\n .descriptor_set_layout_def\n\n .bindings\n\n {\n\n let element_count = binding.resource.element_count_normalized() as usize;\n\n if key.dst_binding != binding.resource.binding || key.array_index >= element_count {\n\n continue;\n\n }\n\n\n\n return Some(DescriptorSetElementWrite {\n\n has_immutable_sampler: binding.immutable_samplers.is_some(),\n\n descriptor_type: binding.resource.resource_type,\n\n image_info: DescriptorSetWriteElementImage::default(),\n\n buffer_info: DescriptorSetWriteElementBuffer::default(),\n\n });\n\n }\n\n\n\n None\n\n}\n", "file_path": "rafx-framework/src/resources/descriptor_sets/mod.rs", "rank": 39, "score": 160761.5456006836 }, { "content": "pub fn gl_get_integerv(\n\n gles2: &Gles2,\n\n pname: u32,\n\n) -> i32 {\n\n unsafe {\n\n let mut value = 0;\n\n gles2.GetIntegerv(pname, &mut value);\n\n value\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles2/internal/gl_context/native.rs", "rank": 40, "score": 160761.5456006836 }, { "content": "pub fn gl_get_string(\n\n gles2: &Gles2,\n\n pname: u32,\n\n) -> String {\n\n unsafe {\n\n let str = gles2.GetString(pname);\n\n if str.is_null() {\n\n return \"\".to_string();\n\n }\n\n\n\n std::ffi::CStr::from_ptr(str as _)\n\n .to_str()\n\n .unwrap()\n\n .to_string()\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles2/internal/gl_context/native.rs", "rank": 41, "score": 160761.5456006836 }, { "content": "pub fn gl_get_integerv(\n\n gles3: &Gles3,\n\n pname: u32,\n\n) -> i32 {\n\n unsafe {\n\n let mut value = 0;\n\n gles3.GetIntegerv(pname, &mut value);\n\n value\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles3/internal/gl_context/native.rs", "rank": 42, "score": 160761.5456006836 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/renderer_triangle/renderer_triangle.rs", "rank": 43, "score": 157671.8157442311 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/framework_triangle/framework_triangle.rs", "rank": 44, "score": 157671.8157442311 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/api_triangle/api_triangle.rs", "rank": 45, "score": 157671.8157442311 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/asset_triangle/asset_triangle.rs", "rank": 46, "score": 157671.8157442311 }, { "content": "pub fn create_uninitialized_write_set_for_layout(\n\n layout: &DescriptorSetLayout\n\n) -> DescriptorSetWriteSet {\n\n let mut write_set = DescriptorSetWriteSet::default();\n\n for binding in &layout.bindings {\n\n for array_index in 0..binding.resource.element_count_normalized() {\n\n let element_write = DescriptorSetElementWrite {\n\n has_immutable_sampler: binding.immutable_samplers.is_some(),\n\n descriptor_type: binding.resource.resource_type,\n\n image_info: DescriptorSetWriteElementImage::default(),\n\n buffer_info: DescriptorSetWriteElementBuffer::default(),\n\n };\n\n\n\n let key = DescriptorSetElementKey {\n\n dst_binding: binding.resource.binding as u32,\n\n array_index: array_index as usize,\n\n };\n\n\n\n write_set.elements.insert(key, element_write);\n\n }\n\n }\n\n\n\n write_set\n\n}\n", "file_path": "rafx-framework/src/resources/descriptor_sets/descriptor_write_set.rs", "rank": 47, "score": 157092.14269373353 }, { "content": "pub fn create_font_atlas_image_view(\n\n imgui_font_atlas_data: ImguiFontAtlasData,\n\n device_context: &RafxDeviceContext,\n\n upload: &mut RafxTransferUpload,\n\n dyn_resource_allocator: &DynResourceAllocatorSet,\n\n) -> RafxResult<ResourceArc<ImageViewResource>> {\n\n let image_data = GpuImageData::new_simple(\n\n imgui_font_atlas_data.width,\n\n imgui_font_atlas_data.height,\n\n GpuImageDataColorSpace::Linear.rgba8(),\n\n imgui_font_atlas_data.data,\n\n );\n\n\n\n let texture = image_upload::enqueue_load_image(\n\n device_context,\n\n upload,\n\n &image_data,\n\n ImageUploadParams {\n\n generate_mips: false,\n\n ..Default::default()\n\n },\n\n )\n\n .map_err(|x| Into::<RafxError>::into(x))?;\n\n\n\n let image = dyn_resource_allocator.insert_texture(texture);\n\n\n\n Ok(dyn_resource_allocator.insert_image_view(&image, None)?)\n\n}\n", "file_path": "rafx-plugins/src/features/imgui/internal/imgui_font_atlas.rs", "rank": 48, "score": 157092.14269373353 }, { "content": "pub fn color_render_target_binding_mtl_store_op(\n\n color_binding: &RafxColorRenderTargetBinding\n\n) -> MTLStoreAction {\n\n let resolve = color_binding.resolve_target.is_some()\n\n && color_binding.resolve_store_op == RafxStoreOp::Store;\n\n if color_binding.store_op == RafxStoreOp::Store {\n\n if resolve {\n\n MTLStoreAction::StoreAndMultisampleResolve\n\n } else {\n\n MTLStoreAction::Store\n\n }\n\n } else {\n\n if resolve {\n\n MTLStoreAction::MultisampleResolve\n\n } else {\n\n MTLStoreAction::DontCare\n\n }\n\n }\n\n}\n", "file_path": "rafx-api/src/backends/metal/internal/util.rs", "rank": 49, "score": 157092.14269373353 }, { "content": "#[cfg(feature = \"egui\")]\n\npub fn draw_ui(resources: &Resources) {\n\n let ctx = resources.get::<EguiContextResource>().unwrap().context();\n\n let time_state = resources.get::<TimeState>().unwrap();\n\n let mut debug_ui_state = resources.get_mut::<DebugUiState>().unwrap();\n\n let mut render_options = resources.get_mut::<RenderOptions>().unwrap();\n\n #[cfg(not(feature = \"basic-pipeline\"))]\n\n let tonemap_debug_data = resources.get::<ModernPipelineTonemapDebugData>().unwrap();\n\n #[cfg(not(feature = \"basic-pipeline\"))]\n\n let mesh_culling_debug_data = resources\n\n .get::<ModernPipelineMeshCullingDebugData>()\n\n .unwrap();\n\n let asset_resource = resources.get::<AssetResource>().unwrap();\n\n\n\n egui::TopBottomPanel::top(\"top_panel\").show(&ctx, |ui| {\n\n egui::menu::bar(ui, |ui| {\n\n egui::menu::menu(ui, \"Windows\", |ui| {\n\n ui.checkbox(&mut debug_ui_state.show_render_options, \"Render Options\");\n\n\n\n #[cfg(not(feature = \"basic-pipeline\"))]\n\n ui.checkbox(&mut debug_ui_state.show_taa_options, \"TAA Options\");\n", "file_path": "demo/src/demo_ui.rs", "rank": 50, "score": 156335.4701550258 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nstruct PendingCommandPoolMeta {\n\n submits_in_frame_index: u64,\n\n command_pool_meta: CommandPoolMeta,\n\n}\n\n\n", "file_path": "rafx-framework/src/resources/dyn_commands.rs", "rank": 51, "score": 154150.60511576998 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n console_log::init_with_level(Level::Debug).unwrap();\n\n\n\n let event_loop = EventLoop::new();\n\n let window = WindowBuilder::new()\n\n .with_title(\"Winit Web GL Example\")\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n // Winit created a canvas element, we add it to the DOM here\n\n web_sys::window()\n\n .and_then(|win| win.document())\n\n .and_then(|doc| doc.body())\n\n .and_then(|body| {\n\n body.append_child(&web_sys::Element::from(window.canvas()))\n\n .ok()\n\n })\n\n .expect(\"couldn't append canvas to document body\");\n\n\n\n log::debug!(\"{:?}\", window.raw_window_handle());\n\n\n\n let result = crate::update_loop(window, event_loop);\n\n log::error!(\"Returned with result {:?}\", result);\n\n\n\n Ok(())\n\n}\n", "file_path": "demo-web/src/main_web.rs", "rank": 52, "score": 154138.3580169236 }, { "content": "pub fn main_native(args: &DemoArgs) {\n\n let event_loop = EventLoop::new();\n\n let window = WindowBuilder::new()\n\n .with_title(\"Rafx Demo\")\n\n .with_inner_size(winit::dpi::LogicalSize::new(1601, 901))\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n crate::update_loop(&args, window, event_loop).unwrap();\n\n}\n", "file_path": "demo/src/main_native.rs", "rank": 53, "score": 154138.3580169236 }, { "content": "fn generate_samples() -> [[f32; 4]; 16] {\n\n //NOTE: Tried using a random kernel every frame, it causes lots of flickering. Getting good\n\n // enough results with a constant kernel and random rotations of it in the shader\n\n let mut rng = rand::rngs::StdRng::seed_from_u64(1);\n\n //let mut rng = thread_rng();\n\n\n\n let mut values = [[0.0; 4]; 16];\n\n for i in 0..values.len() {\n\n loop {\n\n let dir = glam::Vec3::new(\n\n rng.gen_range(-1.0..1.0),\n\n rng.gen_range(-1.0..1.0),\n\n rng.gen_range(0.0..1.0),\n\n );\n\n\n\n if dir.length() <= 1.0 {\n\n dir.normalize();\n\n // This biases checking nearby rather than far away for the first few samples\n\n let length = lerp_f32(i as f32 / values.len() as f32, 0.1, 1.0);\n\n //let length = 1.0;\n", "file_path": "rafx-plugins/src/pipelines/modern/graph_generator/ssao_pass.rs", "rank": 54, "score": 150588.19891936582 }, { "content": "//\"textures/skybox.basis\"\n\npub fn setup_skybox<T: Into<String>>(\n\n resources: &Resources,\n\n path: T,\n\n) {\n\n let asset_resource = resources.get::<AssetResource>().unwrap();\n\n let skybox_texture = asset_resource.load_asset_path::<ImageAsset, _>(path);\n\n\n\n *resources\n\n .get_mut::<SkyboxResource>()\n\n .unwrap()\n\n .skybox_texture_mut() = Some(skybox_texture);\n\n}\n", "file_path": "demo/src/scenes/util/mod.rs", "rank": 55, "score": 149526.24941301023 }, { "content": "pub fn set_uniform<T: Copy>(\n\n gl_context: &GlContext,\n\n location: &LocationId,\n\n data: &T,\n\n gl_type: GLenum,\n\n count: u32,\n\n) -> RafxResult<()> {\n\n match gl_type {\n\n gles3_bindings::INT | gles3_bindings::BOOL => {\n\n gl_context.gl_uniform_1iv(location, data, count)\n\n }\n\n gles3_bindings::FLOAT => gl_context.gl_uniform_1fv(location, data, count),\n\n gles3_bindings::INT_VEC2 | gles3_bindings::BOOL_VEC2 => {\n\n gl_context.gl_uniform_2iv(location, data, count)\n\n }\n\n gles3_bindings::FLOAT_VEC2 => gl_context.gl_uniform_2fv(location, data, count),\n\n gles3_bindings::INT_VEC3 | gles3_bindings::BOOL_VEC3 => {\n\n gl_context.gl_uniform_3iv(location, data, count)\n\n }\n\n gles3_bindings::FLOAT_VEC3 => gl_context.gl_uniform_3fv(location, data, count),\n", "file_path": "rafx-api/src/backends/gles3/internal/gl_type_util.rs", "rank": 56, "score": 146192.68573701035 }, { "content": "pub fn set_uniform<T: Copy>(\n\n gl_context: &GlContext,\n\n location: &LocationId,\n\n data: &T,\n\n gl_type: GLenum,\n\n count: u32,\n\n) -> RafxResult<()> {\n\n match gl_type {\n\n gles2_bindings::INT | gles2_bindings::BOOL => {\n\n gl_context.gl_uniform_1iv(location, data, count)\n\n }\n\n gles2_bindings::FLOAT => gl_context.gl_uniform_1fv(location, data, count),\n\n gles2_bindings::INT_VEC2 | gles2_bindings::BOOL_VEC2 => {\n\n gl_context.gl_uniform_2iv(location, data, count)\n\n }\n\n gles2_bindings::FLOAT_VEC2 => gl_context.gl_uniform_2fv(location, data, count),\n\n gles2_bindings::INT_VEC3 | gles2_bindings::BOOL_VEC3 => {\n\n gl_context.gl_uniform_3iv(location, data, count)\n\n }\n\n gles2_bindings::FLOAT_VEC3 => gl_context.gl_uniform_3fv(location, data, count),\n", "file_path": "rafx-api/src/backends/gles2/internal/gl_type_util.rs", "rank": 57, "score": 146192.68573701035 }, { "content": "fn rust_structs(\n\n rust_code: &mut Vec<String>,\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n) -> Result<Vec<GenerateStructResult>, String> {\n\n let mut structs = Vec::default();\n\n for (type_name, user_type) in user_types {\n\n if user_type.export_uniform_layout {\n\n let s = generate_struct(\n\n &builtin_types,\n\n &user_types,\n\n type_name,\n\n user_type,\n\n MemoryLayout::Std140,\n\n )?;\n\n rust_code.push(generate_struct_code(&s));\n\n rust_code.push(generate_struct_default_code(&s));\n\n structs.push(s);\n\n }\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 58, "score": 146042.81642361073 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum StructBindingType {\n\n Uniform,\n\n Buffer,\n\n PushConstant,\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 59, "score": 145650.8480901655 }, { "content": "fn handle_keyboard_event(\n\n input_state: &mut InputState,\n\n keycode: &Option<Keycode>,\n\n button_state: minimum_input::ButtonState,\n\n) {\n\n if let Some(kc) = keycode {\n\n input_state.handle_keyboard_event(Sdl2KeyboardKey::new(*kc).into(), button_state)\n\n }\n\n}", "file_path": "demo/src/input/input_sdl2.rs", "rank": 60, "score": 143723.03714580007 }, { "content": "pub fn rendering_destroy(resources: &mut Resources) -> RafxResult<()> {\n\n // Destroy these first\n\n {\n\n {\n\n let swapchain_helper = resources.remove::<RafxSwapchainHelper>().unwrap();\n\n let mut asset_manager = resources.get_mut::<AssetManager>().unwrap();\n\n let renderer = resources.get::<Renderer>().unwrap();\n\n SwapchainHandler::destroy_swapchain(swapchain_helper, &mut *asset_manager, &*renderer)?;\n\n }\n\n\n\n resources.remove::<Renderer>();\n\n\n\n #[cfg(feature = \"basic-pipeline\")]\n\n MeshBasicRendererPlugin::legion_destroy(resources);\n\n #[cfg(not(feature = \"basic-pipeline\"))]\n\n MeshAdvRendererPlugin::legion_destroy(resources);\n\n SpriteRendererPlugin::legion_destroy(resources);\n\n SkyboxRendererPlugin::legion_destroy(resources);\n\n TileLayerRendererPlugin::legion_destroy(resources);\n\n Debug3DRendererPlugin::legion_destroy(resources);\n", "file_path": "demo/src/init.rs", "rank": 61, "score": 143577.45446261857 }, { "content": "pub fn init_distill_daemon(connect_string: String) -> AssetResource {\n\n let rpc_loader = RpcIO::new(RpcConnectionType::TCP(connect_string)).unwrap();\n\n let loader = Loader::new(Box::new(rpc_loader));\n\n let resolver = Box::new(DefaultIndirectionResolver);\n\n AssetResource::new(loader, resolver)\n\n}\n\n\n", "file_path": "rafx-renderer/src/daemon.rs", "rank": 62, "score": 143450.46665970873 }, { "content": "#[derive(Clone, Debug, Copy)]\n\nstruct QuadVertex {\n\n pub pos: [f32; 3],\n\n pub tex_coord: [f32; 2],\n\n}\n\n\n\n/// Static data the represents a \"unit\" quad\n\nconst QUAD_VERTEX_LIST: [QuadVertex; 4] = [\n\n // Top Right\n\n QuadVertex {\n\n pos: [0.5, 0.5, 0.0],\n\n tex_coord: [1.0, 0.0],\n\n },\n\n // Top Left\n\n QuadVertex {\n\n pos: [-0.5, 0.5, 0.0],\n\n tex_coord: [0.0, 0.0],\n\n },\n\n // Bottom Right\n\n QuadVertex {\n\n pos: [0.5, -0.5, 0.0],\n", "file_path": "rafx-plugins/src/features/sprite/jobs/prepare.rs", "rank": 63, "score": 142759.04385858608 }, { "content": "struct ResourceContextInner {\n\n descriptor_set_allocator_provider: DescriptorSetAllocatorProvider,\n\n dyn_resources_allocator_provider: DynResourceAllocatorSetProvider,\n\n dyn_command_pool_allocator: DynCommandPoolAllocator,\n\n resources: ResourceLookupSet,\n\n graphics_pipeline_cache: GraphicsPipelineCache,\n\n render_graph_cache: RenderGraphCache,\n\n builtin_pipelines: BuiltinPipelines,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ResourceContext {\n\n inner: Arc<ResourceContextInner>,\n\n}\n\n\n\nimpl ResourceContext {\n\n pub fn device_context(&self) -> &RafxDeviceContext {\n\n self.inner.resources.device_context()\n\n }\n\n\n", "file_path": "rafx-framework/src/resources/resource_manager.rs", "rank": 64, "score": 142754.62544454387 }, { "content": "struct RenderRegistryInner {\n\n registered_phases: FnvHashMap<RenderPhaseIndex, RegisteredPhase>,\n\n phase_name_to_index: FnvHashMap<String, RenderPhaseIndex>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct RenderRegistry {\n\n inner: Arc<RenderRegistryInner>,\n\n}\n\n\n\nimpl RenderRegistry {\n\n pub fn registered_feature_count() -> RenderFeatureIndex {\n\n RENDER_REGISTRY_FEATURE_COUNT.load(Ordering::Acquire)\n\n }\n\n\n\n pub fn registered_render_phase_count() -> RenderPhaseIndex {\n\n RENDER_REGISTRY_PHASE_COUNT.load(Ordering::Acquire)\n\n }\n\n\n\n pub fn render_phase_index_from_name(\n", "file_path": "rafx-framework/src/render_features/registry.rs", "rank": 65, "score": 142754.62544454387 }, { "content": "pub fn slice_size_in_bytes<T>(slice: &[T]) -> usize {\n\n let range = slice.as_ptr_range();\n\n (range.end as *const u8 as usize) - (range.start as *const u8 as usize)\n\n}\n\n\n\npub unsafe fn force_to_static_lifetime<T>(value: &T) -> &'static T {\n\n std::mem::transmute(value)\n\n}\n\n\n\npub unsafe fn force_to_static_lifetime_mut<T>(value: &mut T) -> &'static mut T {\n\n std::mem::transmute(value)\n\n}\n", "file_path": "rafx-base/src/memory.rs", "rank": 66, "score": 141553.83751460636 }, { "content": "fn handle_mouse_button_event(\n\n input_state: &mut InputState,\n\n mouse_btn: &MouseButton,\n\n button_state: minimum_input::ButtonState,\n\n) {\n\n input_state.handle_mouse_button_event(Sdl2MouseButton::new(*mouse_btn).into(), button_state)\n\n}\n\n\n", "file_path": "demo/src/input/input_sdl2.rs", "rank": 67, "score": 141429.14826362117 }, { "content": "fn resolve_load_handle(\n\n load_handle: LoadHandle,\n\n indirection_table: &IndirectionTable,\n\n) -> Option<LoadHandle> {\n\n if load_handle.is_indirect() {\n\n indirection_table.resolve(load_handle)\n\n } else {\n\n Some(load_handle)\n\n }\n\n}\n\n\n", "file_path": "rafx-assets/src/assets/asset_lookup.rs", "rank": 68, "score": 141429.14826362117 }, { "content": "// This is an inner of InProgressImageUpload - it is wrapped in a Option to avoid borrowing issues\n\n// when polling by allowing us to temporarily take ownership of it and then put it back\n\nstruct InProgressUploadBatchInner {\n\n in_flight_uploads: Vec<InFlightUpload>,\n\n upload: RafxTransferUpload,\n\n}\n\n\n", "file_path": "rafx-framework/src/upload/upload_queue.rs", "rank": 69, "score": 140559.52585874547 }, { "content": "#[derive(Debug)]\n\nstruct RafxShaderGles2Inner {\n\n device_context: RafxDeviceContextGles2,\n\n stage_flags: RafxShaderStageFlags,\n\n stages: Vec<RafxShaderStageDef>,\n\n pipeline_reflection: RafxPipelineReflection,\n\n vertex_shader: Gles2CompiledShader,\n\n fragment_shader: Gles2CompiledShader,\n\n program_id: ProgramId,\n\n}\n\n\n\nimpl Drop for RafxShaderGles2Inner {\n\n fn drop(&mut self) {\n\n self.device_context\n\n .gl_context()\n\n .gl_destroy_program(self.program_id)\n\n .unwrap();\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "rafx-api/src/backends/gles2/shader.rs", "rank": 70, "score": 140553.67776990347 }, { "content": "#[derive(Debug)]\n\nstruct RafxShaderGles3Inner {\n\n device_context: RafxDeviceContextGles3,\n\n stage_flags: RafxShaderStageFlags,\n\n stages: Vec<RafxShaderStageDef>,\n\n pipeline_reflection: RafxPipelineReflection,\n\n vertex_shader: Gles3CompiledShader,\n\n fragment_shader: Gles3CompiledShader,\n\n program_id: ProgramId,\n\n}\n\n\n\nimpl Drop for RafxShaderGles3Inner {\n\n fn drop(&mut self) {\n\n self.device_context\n\n .gl_context()\n\n .gl_destroy_program(self.program_id)\n\n .unwrap();\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "rafx-api/src/backends/gles3/shader.rs", "rank": 71, "score": 140553.67776990347 }, { "content": "#[derive(Debug)]\n\nstruct RafxShaderVulkanInner {\n\n stage_flags: RafxShaderStageFlags,\n\n stages: Vec<RafxShaderStageDef>,\n\n pipeline_reflection: RafxPipelineReflection,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct RafxShaderVulkan {\n\n inner: Arc<RafxShaderVulkanInner>,\n\n}\n\n\n\nimpl RafxShaderVulkan {\n\n pub fn new(\n\n _device_context: &RafxDeviceContextVulkan,\n\n stages: Vec<RafxShaderStageDef>,\n\n ) -> RafxResult<Self> {\n\n let pipeline_reflection = RafxPipelineReflection::from_stages(&stages)?;\n\n let mut stage_flags = RafxShaderStageFlags::empty();\n\n for stage in &stages {\n\n stage_flags |= stage.reflection.shader_stage;\n", "file_path": "rafx-api/src/backends/vulkan/shader.rs", "rank": 72, "score": 140553.67776990347 }, { "content": "#[derive(Debug)]\n\nstruct RafxShaderMetalInner {\n\n stage_flags: RafxShaderStageFlags,\n\n stages: Vec<RafxShaderStageDef>,\n\n pipeline_reflection: RafxPipelineReflection,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct RafxShaderMetal {\n\n inner: Arc<RafxShaderMetalInner>,\n\n}\n\n\n\nimpl RafxShaderMetal {\n\n pub fn new(\n\n _device_context: &RafxDeviceContextMetal,\n\n stages: Vec<RafxShaderStageDef>,\n\n ) -> RafxResult<Self> {\n\n let pipeline_reflection = RafxPipelineReflection::from_stages(&stages)?;\n\n let mut stage_flags = RafxShaderStageFlags::empty();\n\n for stage in &stages {\n\n stage_flags |= stage.reflection.shader_stage;\n", "file_path": "rafx-api/src/backends/metal/shader.rs", "rank": 73, "score": 140553.67776990347 }, { "content": "pub fn any_as_bytes<T: Copy>(data: &T) -> &[u8] {\n\n let ptr: *const T = data;\n\n let ptr = ptr as *const u8;\n\n let slice: &[u8] = unsafe { std::slice::from_raw_parts(ptr, std::mem::size_of::<T>()) };\n\n\n\n slice\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 74, "score": 140328.93288437295 }, { "content": "// Inner state for ImguiManager, which will be protected by a Mutex. Mutex protection required since\n\n// this object is Send but not Sync\n\nstruct ImguiManagerInner {\n\n context: imgui::Context,\n\n\n\n // Pointer to the font atlas. Assuming no direct calls to C imgui interface, this pointer is\n\n // valid as long as context is not destroyed\n\n font_atlas_texture: *mut imgui::FontAtlasTexture<'static>,\n\n\n\n // Pointer to the current UI. Assuming no direct calls to C imgui interface, this pointer is\n\n // valid as long as context is not destroyed, and a frame has started and not ended\n\n ui: Option<*mut imgui::Ui<'static>>,\n\n\n\n // These are all refreshed when frame is started\n\n want_capture_keyboard: bool,\n\n want_capture_mouse: bool,\n\n want_set_mouse_pos: bool,\n\n want_text_input: bool,\n\n}\n\n\n\n// Rust assumes pointers in Inner are not safe to send, so we need to explicitly impl that here\n\nunsafe impl Send for ImguiManagerInner {}\n", "file_path": "rafx-plugins/src/features/imgui/internal/imgui_manager.rs", "rank": 75, "score": 138454.32618644866 }, { "content": "// Inner state for EguiManager, which will be protected by a Mutex. Mutex protection required since\n\n// this object is Send but not Sync\n\nstruct EguiManagerInner {\n\n context: egui::CtxRef,\n\n raw_input: egui::RawInput,\n\n start_time: rafx::base::Instant,\n\n\n\n // This is produced when calling render()\n\n font_atlas: Option<Arc<egui::Texture>>,\n\n clipped_meshes: Option<Vec<egui::epaint::ClippedMesh>>,\n\n}\n\n\n\n//TODO: Investigate usage of channels/draw lists\n\n#[derive(Clone)]\n\npub struct EguiManager {\n\n inner: Arc<Mutex<EguiManagerInner>>,\n\n}\n\n\n\n// Wraps egui (and winit integration logic)\n\nimpl EguiManager {\n\n // egui and winit platform are expected to be pre-configured\n\n pub fn new() -> Self {\n", "file_path": "rafx-plugins/src/features/egui/internal/egui_manager.rs", "rank": 76, "score": 138454.32618644866 }, { "content": "#[derive(Debug)]\n\nstruct Gles2CompiledShaderInner {\n\n device_context: RafxDeviceContextGles2,\n\n shader_id: ShaderId,\n\n stage: RafxShaderStageFlags,\n\n}\n\n\n\nimpl Drop for Gles2CompiledShaderInner {\n\n fn drop(&mut self) {\n\n self.device_context\n\n .gl_context()\n\n .gl_destroy_shader(self.shader_id)\n\n .unwrap();\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Gles2CompiledShader {\n\n inner: Arc<Gles2CompiledShaderInner>,\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles2/shader_module.rs", "rank": 77, "score": 138442.30899764234 }, { "content": "struct ViewFrustumArcInner {\n\n visibility_query: RwLock<VisibilityQuery>,\n\n static_view_frustum: Option<ViewFrustumRaii>,\n\n dynamic_view_frustum: Option<ViewFrustumRaii>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ViewFrustumArc {\n\n inner: Arc<ViewFrustumArcInner>,\n\n}\n\n\n\nimpl ViewFrustumArc {\n\n pub fn new(\n\n static_view_frustum: Option<ViewFrustumRaii>,\n\n dynamic_view_frustum: Option<ViewFrustumRaii>,\n\n ) -> Self {\n\n Self {\n\n inner: Arc::new(ViewFrustumArcInner {\n\n visibility_query: Default::default(),\n\n static_view_frustum,\n", "file_path": "rafx-framework/src/visibility/view_frustum_arc.rs", "rank": 78, "score": 138442.30899764234 }, { "content": "// Intentionally doesn't hold a DeviceContext as this is indirectly held by DeviceContext and would\n\n// create a cyclical reference\n\nstruct UnallocatedQueueInner {\n\n queue_family_index: u32,\n\n queue_index: u32,\n\n raw_queue: vk::Queue,\n\n locked_queue: Mutex<vk::Queue>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct VkUnallocatedQueue {\n\n inner: Arc<UnallocatedQueueInner>,\n\n}\n\n\n\nimpl VkUnallocatedQueue {\n\n pub fn new(\n\n device: &ash::Device,\n\n queue_family_index: u32,\n\n queue_index: u32,\n\n ) -> Self {\n\n let raw_queue = unsafe { device.get_device_queue(queue_family_index, queue_index) };\n\n let inner = UnallocatedQueueInner {\n", "file_path": "rafx-api/src/backends/vulkan/internal/queue_allocation.rs", "rank": 79, "score": 138442.30899764234 }, { "content": "#[derive(Debug)]\n\nstruct Gles3CompiledShaderInner {\n\n device_context: RafxDeviceContextGles3,\n\n shader_id: ShaderId,\n\n stage: RafxShaderStageFlags,\n\n}\n\n\n\nimpl Drop for Gles3CompiledShaderInner {\n\n fn drop(&mut self) {\n\n self.device_context\n\n .gl_context()\n\n .gl_destroy_shader(self.shader_id)\n\n .unwrap();\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Gles3CompiledShader {\n\n inner: Arc<Gles3CompiledShaderInner>,\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles3/shader_module.rs", "rank": 80, "score": 138442.30899764234 }, { "content": "pub fn any_slice_as_bytes<T: Copy>(data: &[T]) -> &[u8] {\n\n let ptr: *const T = data.as_ptr();\n\n let ptr = ptr as *const u8;\n\n let slice: &[u8] =\n\n unsafe { std::slice::from_raw_parts(ptr, data.len() * std::mem::size_of::<T>()) };\n\n\n\n slice\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 81, "score": 138305.31593636074 }, { "content": "struct AssetState<A> {\n\n version: u32,\n\n asset_uuid: AssetUuid,\n\n asset: A,\n\n}\n\n\n\n// A strongly typed storage for a single asset type\n\npub struct Storage<AssetT: TypeUuid + Send> {\n\n refop_sender: Sender<RefOp>,\n\n assets: HashMap<LoadHandle, AssetState<AssetT>>,\n\n uncommitted: HashMap<LoadHandle, UncommittedAssetState<AssetT>>,\n\n loader: Box<dyn DynAssetLoader<AssetT>>,\n\n indirection_table: IndirectionTable,\n\n}\n\n\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 82, "score": 138116.666743974 }, { "content": "/// Implement to customize how PoolAllocator resets and destroys pools\n\npub trait PooledResourceImpl {\n\n fn reset(&mut self) -> RafxResult<()>;\n\n}\n\n\n", "file_path": "rafx-framework/src/resources/pool.rs", "rank": 83, "score": 138083.02884434705 }, { "content": "pub fn what_to_bind(element_write: &DescriptorSetElementWrite) -> WhatToBind {\n\n let mut what = WhatToBind::default();\n\n\n\n // See https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkWriteDescriptorSet.html\n\n match element_write.descriptor_type {\n\n RafxResourceType::SAMPLER => {\n\n what.bind_samplers = !element_write.has_immutable_sampler;\n\n }\n\n RafxResourceType::COMBINED_IMAGE_SAMPLER => {\n\n what.bind_samplers = !element_write.has_immutable_sampler;\n\n what.bind_images = true;\n\n }\n\n RafxResourceType::TEXTURE => {\n\n what.bind_images = true;\n\n }\n\n RafxResourceType::TEXTURE_READ_WRITE => {\n\n what.bind_images = true;\n\n }\n\n RafxResourceType::UNIFORM_BUFFER => {\n\n what.bind_buffers = true;\n", "file_path": "rafx-framework/src/resources/descriptor_sets/mod.rs", "rank": 84, "score": 138048.0268334802 }, { "content": "pub fn check_for_error(gles3: &Gles3) -> RafxResult<()> {\n\n unsafe {\n\n let result = gles3.GetError();\n\n if result != gles3_bindings::NO_ERROR {\n\n Err(RafxError::GlError(result))\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles3/internal/gl_context/native.rs", "rank": 85, "score": 138048.0268334802 }, { "content": "pub fn check_for_error(gles2: &Gles2) -> RafxResult<()> {\n\n unsafe {\n\n let result = gles2.GetError();\n\n if result != gles2_bindings::NO_ERROR {\n\n Err(RafxError::GlError(result))\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles2/internal/gl_context/native.rs", "rank": 86, "score": 138048.0268334802 }, { "content": "fn lerp_f32(\n\n t: f32,\n\n p0: f32,\n\n p1: f32,\n\n) -> f32 {\n\n p0 + (p1 - p0) * t\n\n}\n\n\n", "file_path": "rafx-plugins/src/pipelines/modern/graph_generator/ssao_pass.rs", "rank": 87, "score": 137133.55866173192 }, { "content": "#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\nenum GltfObjectId {\n\n Name(String),\n\n Index(usize),\n\n}\n\n\n", "file_path": "rafx-plugins/src/assets/mesh_basic/importers/gltf_importer.rs", "rank": 88, "score": 137059.8669581362 }, { "content": "#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\nenum GltfObjectId {\n\n Name(String),\n\n Index(usize),\n\n}\n\n\n", "file_path": "rafx-plugins/src/assets/mesh_adv/importers/gltf_importer.rs", "rank": 89, "score": 137059.8669581362 }, { "content": "fn generate_struct_code(st: &GenerateStructResult) -> String {\n\n let mut result_string = String::default();\n\n result_string += &format!(\n\n \"#[derive(Copy, Clone, Debug)]\\n#[repr(C)]\\npub struct {} {{\\n\",\n\n st.name\n\n );\n\n for m in &st.members {\n\n result_string += &format_member(&m.name, &m.ty, m.offset, m.size);\n\n }\n\n result_string += &format!(\"}} // {} bytes\\n\\n\", st.size);\n\n result_string\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 90, "score": 136797.2262299238 }, { "content": "struct WinitEguiManagerInner {\n\n #[cfg(feature = \"clipboard\")]\n\n clipboard: Option<ClipboardContext>,\n\n mouse_position: Option<egui::Pos2>,\n\n cursor: Option<winit::window::CursorIcon>,\n\n pending_cursor: Option<winit::window::CursorIcon>,\n\n}\n\n\n\n/// Full egui API and the SDL2 abstraction/platform integration\n\n#[derive(Clone)]\n\npub struct WinitEguiManager {\n\n egui_manager: EguiManager,\n\n inner: Arc<Mutex<WinitEguiManagerInner>>,\n\n}\n\n\n\n// Wraps egui (and winit integration logic)\n\nimpl WinitEguiManager {\n\n pub fn egui_manager(&self) -> EguiManager {\n\n self.egui_manager.clone()\n\n }\n", "file_path": "rafx-plugins/src/features/egui/winit_egui_manager.rs", "rank": 91, "score": 136415.15938663637 }, { "content": "struct Sdl2EguiManagerInner {\n\n clipboard: sdl2::clipboard::ClipboardUtil,\n\n video_subsystem: sdl2::VideoSubsystem,\n\n cursor: CursorHandler,\n\n}\n\n\n\n// For sdl2::mouse::Cursor, a member of egui_sdl2::Sdl2EguiManager\n\nunsafe impl Send for Sdl2EguiManagerInner {}\n\n\n\n/// Full egui API and the SDL2 abstraction/platform integration\n\n#[derive(Clone)]\n\npub struct Sdl2EguiManager {\n\n egui_manager: EguiManager,\n\n inner: Arc<Mutex<Sdl2EguiManagerInner>>,\n\n}\n\n\n\n// Wraps egui (and winit integration logic)\n\nimpl Sdl2EguiManager {\n\n pub fn egui_manager(&self) -> EguiManager {\n\n self.egui_manager.clone()\n", "file_path": "rafx-plugins/src/features/egui/sdl2_egui_manager.rs", "rank": 92, "score": 136415.15938663637 }, { "content": "struct Sdl2ImguiManagerInner {\n\n imgui_sdl2: ImguiSdl2,\n\n}\n\n\n\n// For sdl2::mouse::Cursor, a member of imgui_sdl2::ImguiSdl2\n\nunsafe impl Send for Sdl2ImguiManagerInner {}\n\n\n\n//TODO: Investigate usage of channels/draw lists\n\n#[derive(Clone)]\n\npub struct Sdl2ImguiManager {\n\n imgui_manager: ImguiManager,\n\n inner: Arc<Mutex<Sdl2ImguiManagerInner>>,\n\n}\n\n\n\n// Wraps imgui (and winit integration logic)\n\nimpl Sdl2ImguiManager {\n\n pub fn imgui_manager(&self) -> ImguiManager {\n\n self.imgui_manager.clone()\n\n }\n\n\n", "file_path": "rafx-plugins/src/features/imgui/sdl2_imgui_manager.rs", "rank": 93, "score": 136415.15938663637 }, { "content": "pub fn vertex_buffer_adjusted_buffer_index(binding: u32) -> NSUInteger {\n\n // Argument buffers will be 0-4\n\n // vertex buffers will be 30 - n\n\n (30 - binding) as _\n\n}\n\n\n\npub(crate) fn resource_type_mtl_data_type(resource_type: RafxResourceType) -> Option<MTLDataType> {\n\n if resource_type.intersects(\n\n RafxResourceType::UNIFORM_BUFFER\n\n | RafxResourceType::BUFFER\n\n | RafxResourceType::BUFFER_READ_WRITE,\n\n ) {\n\n Some(MTLDataType::Pointer)\n\n } else if resource_type\n\n .intersects(RafxResourceType::TEXTURE | RafxResourceType::TEXTURE_READ_WRITE)\n\n {\n\n Some(MTLDataType::Texture)\n\n } else if resource_type.intersects(RafxResourceType::SAMPLER) {\n\n Some(MTLDataType::Sampler)\n\n } else {\n", "file_path": "rafx-api/src/backends/metal/internal/util.rs", "rank": 94, "score": 136377.84958410816 }, { "content": "struct DropSinkResourceInFlight<T> {\n\n // prefixed with _ to silence \"field not used\" warning. The purpose of the var is to hold the\n\n // resource for a while then drop this entire structure\n\n _resource: T,\n\n live_until_frame: Wrapping<u32>,\n\n}\n\n\n\n/// This handles waiting for N frames to pass before dropping the resource.\n\npub struct ResourceDropSink<T> {\n\n // We are assuming that all resources can survive for the same amount of time so the data in\n\n // this VecDeque will naturally be orderered such that things that need to be destroyed sooner\n\n // are at the front\n\n resources_in_flight: VecDeque<DropSinkResourceInFlight<T>>,\n\n\n\n // All resources pushed into the sink will be destroyed after N frames\n\n max_in_flight_frames: Wrapping<u32>,\n\n\n\n // Incremented when on_frame_complete is called\n\n frame_index: Wrapping<u32>,\n\n}\n", "file_path": "rafx-framework/src/resources/cleanup.rs", "rank": 95, "score": 135939.16454906567 }, { "content": "fn generate_struct_default_code(st: &GenerateStructResult) -> String {\n\n let mut result_string = String::default();\n\n result_string += &format!(\"impl Default for {} {{\\n\", st.name);\n\n result_string += &format!(\" fn default() -> Self {{\\n\");\n\n result_string += &format!(\" {} {{\\n\", st.name);\n\n for m in &st.members {\n\n //result_string += &format!(\" {}: {}::default(),\\n\", &m.name, &m.ty);\n\n result_string += &format!(\" {}: {},\\n\", &m.name, m.default_value);\n\n }\n\n result_string += &format!(\" }}\\n\");\n\n result_string += &format!(\" }}\\n\");\n\n result_string += &format!(\"}}\\n\\n\");\n\n result_string\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 96, "score": 135072.35678016974 }, { "content": "fn generate_struct_test_code(st: &GenerateStructResult) -> String {\n\n use heck::SnakeCase;\n\n let mut result_string = String::default();\n\n result_string += &format!(\n\n \"\\n #[test]\\n fn test_struct_{}() {{\\n\",\n\n st.name.to_snake_case()\n\n );\n\n result_string += &format!(\n\n \" assert_eq!(std::mem::size_of::<{}>(), {});\\n\",\n\n st.name, st.size\n\n );\n\n for m in &st.members {\n\n result_string += &format!(\n\n \" assert_eq!(std::mem::size_of::<{}>(), {});\\n\",\n\n m.ty, m.size\n\n );\n\n result_string += &format!(\n\n \" assert_eq!(std::mem::align_of::<{}>(), {});\\n\",\n\n m.ty, m.align\n\n );\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 97, "score": 135072.35678016974 }, { "content": "pub fn array_layer_to_cube_map_target(array_layer: u16) -> GLenum {\n\n if array_layer > 5 {\n\n unimplemented!(\"GL ES 2.0 does not support more than 6 images for a cubemap\")\n\n }\n\n\n\n GL_CUBE_MAP_TARGETS[array_layer as usize]\n\n}\n", "file_path": "rafx-api/src/backends/gles2/internal/conversions.rs", "rank": 98, "score": 134767.17291207996 }, { "content": "#[allow(dead_code)]\n\npub fn byte_size_of_type(gl_type: GLenum) -> u32 {\n\n match gl_type {\n\n gles2_bindings::INT | gles2_bindings::BOOL | gles2_bindings::FLOAT => 4,\n\n gles2_bindings::INT_VEC2 | gles2_bindings::BOOL_VEC2 | gles2_bindings::FLOAT_VEC2 => 8,\n\n gles2_bindings::INT_VEC3\n\n | gles2_bindings::BOOL_VEC3\n\n | gles2_bindings::FLOAT_VEC3\n\n | gles2_bindings::INT_VEC4\n\n | gles2_bindings::BOOL_VEC4\n\n | gles2_bindings::FLOAT_VEC4 => 16,\n\n gles2_bindings::FLOAT_MAT2 => 32,\n\n gles2_bindings::FLOAT_MAT3 => 48,\n\n gles2_bindings::FLOAT_MAT4 => 64,\n\n _ => unimplemented!(\"Unknown GL type in byte_size_of_type\"),\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/gles2/internal/gl_type_util.rs", "rank": 99, "score": 134767.17291207996 } ]
Rust
core/bin/zksync_api/src/api_server/rest/v02/block.rs
w2k-star-forks/zksync
28b7402198a0eb5201f15e35183a73d4f6b983ca
use std::str::FromStr; use std::time::Instant; use actix_web::{web, Scope}; use zksync_api_types::v02::{ block::{BlockInfo, BlockStatus}, pagination::{parse_query, ApiEither, BlockAndTxHash, Paginated, PaginationQuery}, transaction::{Transaction, TxData, TxHashSerializeWrapper}, }; use zksync_crypto::{convert::FeConvert, Fr}; use zksync_storage::{chain::block::records::StorageBlockDetails, ConnectionPool, QueryResult}; use zksync_types::{tx::TxHash, BlockNumber, H256}; use super::{ error::{Error, InvalidDataError}, paginate_trait::Paginate, response::ApiResult, }; use crate::{api_try, utils::block_details_cache::BlockDetailsCache}; pub fn block_info_from_details(details: StorageBlockDetails) -> BlockInfo { let status = if details.is_verified() { BlockStatus::Finalized } else { BlockStatus::Committed }; BlockInfo { block_number: BlockNumber(details.block_number as u32), new_state_root: Fr::from_bytes(&details.new_state_root).unwrap_or_else(|err| { panic!( "Database provided an incorrect new_state_root field: {:?}, an error occurred {}", details.new_state_root, err ) }), block_size: details.block_size as u64, commit_tx_hash: details.commit_tx_hash.map(|bytes| H256::from_slice(&bytes)), verify_tx_hash: details.verify_tx_hash.map(|bytes| H256::from_slice(&bytes)), committed_at: details.committed_at, finalized_at: details.verified_at, status, } } #[derive(Debug, Clone)] struct ApiBlockData { pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache, } impl ApiBlockData { fn new(pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache) -> Self { Self { pool, verified_blocks_cache, } } async fn block_info(&self, block_number: BlockNumber) -> Result<Option<BlockInfo>, Error> { let details = self .verified_blocks_cache .get(&self.pool, block_number) .await .map_err(Error::storage)?; if let Some(details) = details { Ok(Some(block_info_from_details(details))) } else { Ok(None) } } async fn get_block_number_by_position( &self, block_position: &str, ) -> Result<BlockNumber, Error> { if let Ok(number) = u32::from_str(block_position) { Ok(BlockNumber(number)) } else { match block_position { "lastCommitted" => self .get_last_committed_block_number() .await .map_err(Error::storage), "lastFinalized" => self .get_last_finalized_block_number() .await .map_err(Error::storage), _ => Err(Error::from(InvalidDataError::InvalidBlockPosition)), } } } async fn block_page( &self, query: PaginationQuery<ApiEither<BlockNumber>>, ) -> Result<Paginated<BlockInfo, BlockNumber>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; storage.paginate_checked(&query).await } async fn transaction_page( &self, block_number: BlockNumber, query: PaginationQuery<ApiEither<TxHash>>, ) -> Result<Paginated<Transaction, TxHashSerializeWrapper>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; let new_query = PaginationQuery { from: BlockAndTxHash { block_number, tx_hash: query.from, }, limit: query.limit, direction: query.direction, }; storage.paginate_checked(&new_query).await } async fn tx_data( &self, block_number: BlockNumber, block_index: u64, ) -> Result<Option<TxData>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; Ok(storage .chain() .operations_ext_schema() .tx_data_by_block_and_index_api_v02(block_number, block_index) .await .map_err(Error::storage)?) } async fn get_last_committed_block_number(&self) -> QueryResult<BlockNumber> { let mut storage = self.pool.access_storage().await?; storage .chain() .block_schema() .get_last_committed_confirmed_block() .await } async fn get_last_finalized_block_number(&self) -> QueryResult<BlockNumber> { let mut storage = self.pool.access_storage().await?; storage .chain() .block_schema() .get_last_verified_confirmed_block() .await } } async fn block_pagination( data: web::Data<ApiBlockData>, web::Query(query): web::Query<PaginationQuery<String>>, ) -> ApiResult<Paginated<BlockInfo, BlockNumber>> { let start = Instant::now(); let query = api_try!(parse_query(query).map_err(Error::from)); let res = data.block_page(query).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_pagination"); res } async fn block_by_position( data: web::Data<ApiBlockData>, block_position: web::Path<String>, ) -> ApiResult<Option<BlockInfo>> { let start = Instant::now(); let block_number = api_try!(data.get_block_number_by_position(&block_position).await); let res = data.block_info(block_number).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_by_position"); res } async fn block_transactions( data: web::Data<ApiBlockData>, block_position: web::Path<String>, web::Query(query): web::Query<PaginationQuery<String>>, ) -> ApiResult<Paginated<Transaction, TxHashSerializeWrapper>> { let start = Instant::now(); let block_number = api_try!(data.get_block_number_by_position(&block_position).await); let query = api_try!(parse_query(query).map_err(Error::from)); let res = data.transaction_page(block_number, query).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_transactions"); res } async fn transaction_in_block( data: web::Data<ApiBlockData>, path: web::Path<(BlockNumber, u64)>, ) -> ApiResult<Option<TxData>> { let start = Instant::now(); let (block_number, block_index) = *path; let res = api_try!(data.tx_data(block_number, block_index).await); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "transaction_in_block"); ApiResult::Ok(res) } pub fn api_scope(pool: ConnectionPool, cache: BlockDetailsCache) -> Scope { let data = ApiBlockData::new(pool, cache); web::scope("blocks") .app_data(web::Data::new(data)) .route("", web::get().to(block_pagination)) .route("{block_position}", web::get().to(block_by_position)) .route( "{block_position}/transactions", web::get().to(block_transactions), ) .route( "{block_position}/transactions/{block_index}", web::get().to(transaction_in_block), ) } #[cfg(test)] mod tests { use super::*; use crate::api_server::rest::v02::{ test_utils::{deserialize_response_result, TestServerConfig}, SharedData, }; use zksync_api_types::v02::{ pagination::PaginationDirection, transaction::TransactionData, ApiVersion, }; #[actix_rt::test] #[cfg_attr( not(feature = "api_test"), ignore = "Use `zk test rust-api` command to perform this test" )] async fn blocks_scope() -> anyhow::Result<()> { let cfg = TestServerConfig::default(); cfg.fill_database().await?; let shared_data = SharedData { net: cfg.config.chain.eth.network, api_version: ApiVersion::V02, }; let (client, server) = cfg.start_server( |cfg: &TestServerConfig| api_scope(cfg.pool.clone(), BlockDetailsCache::new(10)), Some(shared_data), ); let query = PaginationQuery { from: ApiEither::from(BlockNumber(1)), limit: 3, direction: PaginationDirection::Newer, }; let expected_blocks: Paginated<BlockInfo, BlockNumber> = { let mut storage = cfg.pool.access_storage().await?; storage .paginate_checked(&query) .await .map_err(|err| anyhow::anyhow!(err.message))? }; let response = client.block_by_position("2").await?; let block: BlockInfo = deserialize_response_result(response)?; assert_eq!(block, expected_blocks.list[1]); let response = client.block_pagination(&query).await?; let paginated: Paginated<BlockInfo, BlockNumber> = deserialize_response_result(response)?; assert_eq!(paginated, expected_blocks); let block_number = BlockNumber(3); let expected_txs = { let mut storage = cfg.pool.access_storage().await?; storage .chain() .block_schema() .get_block_transactions(block_number) .await? }; assert!(expected_txs.len() >= 3); let tx_hash_str = expected_txs.first().unwrap().tx_hash.as_str(); let tx_hash = TxHash::from_str(tx_hash_str).unwrap(); let query = PaginationQuery { from: ApiEither::from(tx_hash), limit: 2, direction: PaginationDirection::Older, }; let response = client .block_transactions(&query, &*block_number.to_string()) .await?; let paginated: Paginated<Transaction, TxHash> = deserialize_response_result(response)?; assert_eq!(paginated.pagination.count as usize, expected_txs.len()); assert_eq!(paginated.pagination.limit, query.limit); assert_eq!(paginated.list.len(), query.limit as usize); assert_eq!(paginated.pagination.direction, PaginationDirection::Older); assert_eq!(paginated.pagination.from, tx_hash); for (tx, expected_tx) in paginated.list.into_iter().zip(expected_txs.clone()) { assert_eq!( tx.tx_hash.to_string().replace("sync-tx:", "0x"), expected_tx.tx_hash ); assert_eq!(tx.created_at, Some(expected_tx.created_at)); assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); assert_eq!(tx.fail_reason, expected_tx.fail_reason); if matches!(tx.op, TransactionData::L2(_)) { assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); } } for expected_tx in expected_txs { if !expected_tx.success { continue; } let response = client .transaction_in_block( expected_tx.block_number as u32, expected_tx.block_index.unwrap() as u32, ) .await?; let tx: Option<TxData> = deserialize_response_result(response)?; let tx = tx.unwrap().tx; assert_eq!(tx.created_at, Some(expected_tx.created_at)); assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); assert_eq!(tx.fail_reason, expected_tx.fail_reason); if matches!(tx.op, TransactionData::L2(_)) { assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); } } server.stop().await; Ok(()) } }
use std::str::FromStr; use std::time::Instant; use actix_web::{web, Scope}; use zksync_api_types::v02::{ block::{BlockInfo, BlockStatus}, pagination::{parse_query, ApiEither, BlockAndTxHash, Paginated, PaginationQuery}, transaction::{Transaction, TxData, TxHashSerializeWrapper}, }; use zksync_crypto::{convert::FeConvert, Fr}; use zksync_storage::{chain::block::records::StorageBlockDetails, ConnectionPool, QueryResult}; use zksync_types::{tx::TxHash, BlockNumber, H256}; use super::{ error::{Error, InvalidDataError}, paginate_trait::Paginate, response::ApiResult, }; use crate::{api_try, utils::block_details_cache::BlockDetailsCache}; pub fn block_info_from_details(details: StorageBlockDetails) -> BlockInfo { let status = if details.is_verified() { BlockStatus::Finalized } else { BlockStatus::Committed }; BlockInfo { block_number: BlockNumber(details.block_number as u32), new_state_root: Fr::from_bytes(&details.new_state_root).unwrap_or_else(|err| { panic!( "Database provided an incorrect new_state_root field: {:?}, an error occurred {}", details.new_state_root, err ) }), block_size: details.block_size as u64, commit_tx_hash: details.commit_tx_hash.map(|bytes| H256::from_slice(&bytes)), verify_tx_hash: details.verify_tx_hash.map(|bytes| H256::from_slice(&bytes)), committed_at: details.committed_at, finalized_at: details.verified_at, status, } } #[derive(Debug, Clone)] struct ApiBlockData { pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache, } impl ApiBlockData { fn new(pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache) -> Self { Self { pool, verified_blocks_cache, } } async fn block_info(&self, block_number: BlockNumber) -> Result<Option<BlockInfo>, Error> { let details = self .verified_blocks_cache .get(&self.pool, block_number) .await .map_err(Error::storage)?; if let Some(details) = details { Ok(Some(block_info_from_details(details))) } else { Ok(None) } } async fn get_block_number_by_position( &self, block_position: &str, ) -> Result<BlockNumber, Error> { if let Ok(number) = u32::from_str(block_position) { Ok(BlockNumber(number)) } else { match block_positio
async fn block_page( &self, query: PaginationQuery<ApiEither<BlockNumber>>, ) -> Result<Paginated<BlockInfo, BlockNumber>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; storage.paginate_checked(&query).await } async fn transaction_page( &self, block_number: BlockNumber, query: PaginationQuery<ApiEither<TxHash>>, ) -> Result<Paginated<Transaction, TxHashSerializeWrapper>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; let new_query = PaginationQuery { from: BlockAndTxHash { block_number, tx_hash: query.from, }, limit: query.limit, direction: query.direction, }; storage.paginate_checked(&new_query).await } async fn tx_data( &self, block_number: BlockNumber, block_index: u64, ) -> Result<Option<TxData>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; Ok(storage .chain() .operations_ext_schema() .tx_data_by_block_and_index_api_v02(block_number, block_index) .await .map_err(Error::storage)?) } async fn get_last_committed_block_number(&self) -> QueryResult<BlockNumber> { let mut storage = self.pool.access_storage().await?; storage .chain() .block_schema() .get_last_committed_confirmed_block() .await } async fn get_last_finalized_block_number(&self) -> QueryResult<BlockNumber> { let mut storage = self.pool.access_storage().await?; storage .chain() .block_schema() .get_last_verified_confirmed_block() .await } } async fn block_pagination( data: web::Data<ApiBlockData>, web::Query(query): web::Query<PaginationQuery<String>>, ) -> ApiResult<Paginated<BlockInfo, BlockNumber>> { let start = Instant::now(); let query = api_try!(parse_query(query).map_err(Error::from)); let res = data.block_page(query).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_pagination"); res } async fn block_by_position( data: web::Data<ApiBlockData>, block_position: web::Path<String>, ) -> ApiResult<Option<BlockInfo>> { let start = Instant::now(); let block_number = api_try!(data.get_block_number_by_position(&block_position).await); let res = data.block_info(block_number).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_by_position"); res } async fn block_transactions( data: web::Data<ApiBlockData>, block_position: web::Path<String>, web::Query(query): web::Query<PaginationQuery<String>>, ) -> ApiResult<Paginated<Transaction, TxHashSerializeWrapper>> { let start = Instant::now(); let block_number = api_try!(data.get_block_number_by_position(&block_position).await); let query = api_try!(parse_query(query).map_err(Error::from)); let res = data.transaction_page(block_number, query).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_transactions"); res } async fn transaction_in_block( data: web::Data<ApiBlockData>, path: web::Path<(BlockNumber, u64)>, ) -> ApiResult<Option<TxData>> { let start = Instant::now(); let (block_number, block_index) = *path; let res = api_try!(data.tx_data(block_number, block_index).await); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "transaction_in_block"); ApiResult::Ok(res) } pub fn api_scope(pool: ConnectionPool, cache: BlockDetailsCache) -> Scope { let data = ApiBlockData::new(pool, cache); web::scope("blocks") .app_data(web::Data::new(data)) .route("", web::get().to(block_pagination)) .route("{block_position}", web::get().to(block_by_position)) .route( "{block_position}/transactions", web::get().to(block_transactions), ) .route( "{block_position}/transactions/{block_index}", web::get().to(transaction_in_block), ) } #[cfg(test)] mod tests { use super::*; use crate::api_server::rest::v02::{ test_utils::{deserialize_response_result, TestServerConfig}, SharedData, }; use zksync_api_types::v02::{ pagination::PaginationDirection, transaction::TransactionData, ApiVersion, }; #[actix_rt::test] #[cfg_attr( not(feature = "api_test"), ignore = "Use `zk test rust-api` command to perform this test" )] async fn blocks_scope() -> anyhow::Result<()> { let cfg = TestServerConfig::default(); cfg.fill_database().await?; let shared_data = SharedData { net: cfg.config.chain.eth.network, api_version: ApiVersion::V02, }; let (client, server) = cfg.start_server( |cfg: &TestServerConfig| api_scope(cfg.pool.clone(), BlockDetailsCache::new(10)), Some(shared_data), ); let query = PaginationQuery { from: ApiEither::from(BlockNumber(1)), limit: 3, direction: PaginationDirection::Newer, }; let expected_blocks: Paginated<BlockInfo, BlockNumber> = { let mut storage = cfg.pool.access_storage().await?; storage .paginate_checked(&query) .await .map_err(|err| anyhow::anyhow!(err.message))? }; let response = client.block_by_position("2").await?; let block: BlockInfo = deserialize_response_result(response)?; assert_eq!(block, expected_blocks.list[1]); let response = client.block_pagination(&query).await?; let paginated: Paginated<BlockInfo, BlockNumber> = deserialize_response_result(response)?; assert_eq!(paginated, expected_blocks); let block_number = BlockNumber(3); let expected_txs = { let mut storage = cfg.pool.access_storage().await?; storage .chain() .block_schema() .get_block_transactions(block_number) .await? }; assert!(expected_txs.len() >= 3); let tx_hash_str = expected_txs.first().unwrap().tx_hash.as_str(); let tx_hash = TxHash::from_str(tx_hash_str).unwrap(); let query = PaginationQuery { from: ApiEither::from(tx_hash), limit: 2, direction: PaginationDirection::Older, }; let response = client .block_transactions(&query, &*block_number.to_string()) .await?; let paginated: Paginated<Transaction, TxHash> = deserialize_response_result(response)?; assert_eq!(paginated.pagination.count as usize, expected_txs.len()); assert_eq!(paginated.pagination.limit, query.limit); assert_eq!(paginated.list.len(), query.limit as usize); assert_eq!(paginated.pagination.direction, PaginationDirection::Older); assert_eq!(paginated.pagination.from, tx_hash); for (tx, expected_tx) in paginated.list.into_iter().zip(expected_txs.clone()) { assert_eq!( tx.tx_hash.to_string().replace("sync-tx:", "0x"), expected_tx.tx_hash ); assert_eq!(tx.created_at, Some(expected_tx.created_at)); assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); assert_eq!(tx.fail_reason, expected_tx.fail_reason); if matches!(tx.op, TransactionData::L2(_)) { assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); } } for expected_tx in expected_txs { if !expected_tx.success { continue; } let response = client .transaction_in_block( expected_tx.block_number as u32, expected_tx.block_index.unwrap() as u32, ) .await?; let tx: Option<TxData> = deserialize_response_result(response)?; let tx = tx.unwrap().tx; assert_eq!(tx.created_at, Some(expected_tx.created_at)); assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); assert_eq!(tx.fail_reason, expected_tx.fail_reason); if matches!(tx.op, TransactionData::L2(_)) { assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); } } server.stop().await; Ok(()) } }
n { "lastCommitted" => self .get_last_committed_block_number() .await .map_err(Error::storage), "lastFinalized" => self .get_last_finalized_block_number() .await .map_err(Error::storage), _ => Err(Error::from(InvalidDataError::InvalidBlockPosition)), } } }
function_block-function_prefixed
[ { "content": "pub fn fr_into_u32_low(value: Fr) -> u32 {\n\n let mut be_bytes = [0u8; 32];\n\n value\n\n .into_repr()\n\n .write_be(be_bytes.as_mut())\n\n .expect(\"Write value bytes\");\n\n u32::from_be_bytes([be_bytes[28], be_bytes[29], be_bytes[30], be_bytes[31]])\n\n}\n\n\n\n/// Gathered signature data for calculating the operations in several\n\n/// witness structured (e.g. `TransferWitness` or `WithdrawWitness`).\n\n#[derive(Debug, Clone)]\n\npub struct SigDataInput {\n\n pub first_sig_msg: Fr,\n\n pub second_sig_msg: Fr,\n\n pub third_sig_msg: Fr,\n\n pub signature: SignatureData,\n\n pub signer_pub_key_packed: Vec<Option<bool>>,\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 0, "score": 347630.87177783286 }, { "content": "/// Parses the H256 panicking upon deserialization failure.\n\npub fn hash(addr_str: &str) -> H256 {\n\n H256::from_str(addr_str).expect(\"Incorrect hash string\")\n\n}\n", "file_path": "core/lib/config/src/configs/test_utils.rs", "rank": 1, "score": 334194.0379088875 }, { "content": "// Provides a quasi-random non-zero `Fr` to substitute an incorrect `Fr` value.\n\npub fn incorrect_fr() -> Fr {\n\n Fr::from_str(\"12345\").unwrap()\n\n}\n\n\n\n/// Helper structure to generate `ZkSyncState` and `CircuitAccountTree`.\n\n#[derive(Debug)]\n\npub struct ZkSyncStateGenerator;\n\n\n\nimpl ZkSyncStateGenerator {\n\n fn create_state(accounts: AccountMap) -> (ZkSyncState, CircuitAccountTree) {\n\n let plasma_state = ZkSyncState::from_acc_map(accounts);\n\n\n\n let mut circuit_account_tree =\n\n CircuitAccountTree::new(zksync_crypto::params::account_tree_depth());\n\n for (id, account) in plasma_state.get_accounts() {\n\n circuit_account_tree.insert(id, CircuitAccount::from(account))\n\n }\n\n\n\n (plasma_state, circuit_account_tree)\n\n }\n", "file_path": "core/lib/circuit/src/witness/tests/test_utils.rs", "rank": 2, "score": 322918.97075220884 }, { "content": "fn signing_failed_error(err: impl ToString) -> SignerError {\n\n SignerError::SigningFailed(err.to_string())\n\n}\n\n\n\npub struct Signer<S: EthereumSigner> {\n\n pub pubkey_hash: PubKeyHash,\n\n pub address: Address,\n\n pub(crate) private_key: PrivateKey,\n\n pub(crate) eth_signer: Option<S>,\n\n pub(crate) account_id: Option<AccountId>,\n\n}\n\n\n\nimpl<S: EthereumSigner> fmt::Debug for Signer<S> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut pk_contents = Vec::new();\n\n self.private_key\n\n .write(&mut pk_contents)\n\n .expect(\"Failed writing the private key contents\");\n\n f.debug_struct(\"Signer\")\n\n .field(\"pubkey_hash\", &self.pubkey_hash)\n", "file_path": "sdk/zksync-rs/src/signer.rs", "rank": 3, "score": 303909.11444627005 }, { "content": "/// Downloads universal setup in the monomial form of the given power of two (range: SETUP_MIN_POW2..=SETUP_MAX_POW2)\n\npub fn download_universal_setup_monomial_form(power_of_two: u32) -> Result<(), anyhow::Error> {\n\n anyhow::ensure!(\n\n (SETUP_MIN_POW2..=SETUP_MAX_POW2).contains(&power_of_two),\n\n \"setup power of two is not in the correct range\"\n\n );\n\n\n\n let mut retry_op = move || try_to_download_setup(power_of_two);\n\n\n\n let mut response = retry_op\n\n .retry_notify(&mut get_backoff(), |err, next_after: Duration| {\n\n let duration_secs = next_after.as_millis() as f32 / 1000.0f32;\n\n\n\n vlog::warn!(\n\n \"Failed to download setup err: <{}>, retrying after: {:.1}s\",\n\n err,\n\n duration_secs,\n\n )\n\n })\n\n .map_err(|e| {\n\n format_err!(\n\n \"Can't download setup, max elapsed time of the backoff reached: {}\",\n\n e\n\n )\n\n })?;\n\n\n\n fs_utils::save_universal_setup_monomial_file(power_of_two, &mut response)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/network_utils.rs", "rank": 4, "score": 286141.59124655207 }, { "content": "pub fn parse_ether(eth_value: &str) -> Result<BigUint, anyhow::Error> {\n\n let split = eth_value.split('.').collect::<Vec<&str>>();\n\n ensure!(split.len() == 1 || split.len() == 2, \"Wrong eth value\");\n\n let string_wei_value = if split.len() == 1 {\n\n format!(\"{}000000000000000000\", split[0])\n\n } else if split.len() == 2 {\n\n let before_dot = split[0];\n\n let after_dot = split[1];\n\n ensure!(\n\n after_dot.len() <= 18,\n\n \"ETH value can have up to 18 digits after dot.\"\n\n );\n\n let zeros_to_pad = 18 - after_dot.len();\n\n format!(\"{}{}{}\", before_dot, after_dot, \"0\".repeat(zeros_to_pad))\n\n } else {\n\n unreachable!()\n\n };\n\n\n\n Ok(BigUint::from_str(&string_wei_value)?)\n\n}\n\n\n\n/// Used to sign and post ETH transactions for the zkSync contracts.\n\n#[derive(Debug, Clone)]\n\npub struct EthereumAccount {\n\n pub private_key: H256,\n\n pub address: Address,\n\n pub main_contract_eth_client: ETHDirectClient<PrivateKeySigner>,\n\n}\n\n\n", "file_path": "core/tests/testkit/src/eth_account.rs", "rank": 5, "score": 285167.3546395779 }, { "content": "/// Returns a corresponding address for a provided network name.\n\npub fn get_rpc_addr(network: Network) -> &'static str {\n\n match network {\n\n Network::Mainnet => \"https://api.zksync.io/jsrpc\",\n\n Network::Rinkeby => \"https://rinkeby-api.zksync.io/jsrpc\",\n\n Network::Ropsten => \"https://ropsten-api.zksync.io/jsrpc\",\n\n Network::Localhost => \"http://127.0.0.1:3030\",\n\n Network::Unknown => panic!(\"Attempt to create a provider from an unknown network\"),\n\n Network::Test => panic!(\"Attempt to create a provider from an test network\"),\n\n }\n\n}\n\n\n\npub type ResponseResult<T> = Result<T, ClientError>;\n\n\n\n#[async_trait]\n", "file_path": "sdk/zksync-rs/src/provider.rs", "rank": 6, "score": 282955.29720213416 }, { "content": "/// Get root hash of the used subtree.\n\npub fn get_used_subtree_root_hash(account_tree: &CircuitAccountTree) -> Fr {\n\n // We take account 0, and hash it with it's Merkle proof.\n\n let account_index = 0;\n\n let account_merkle_path = account_tree.merkle_path(account_index);\n\n let account = account_tree.get(account_index).cloned().unwrap_or_default();\n\n let mut current_hash = account_tree.hasher.hash_bits(account.get_bits_le());\n\n for merkle_path_item in account_merkle_path\n\n .iter()\n\n .take(used_account_subtree_depth())\n\n {\n\n current_hash = account_tree\n\n .hasher\n\n .compress(&current_hash, &merkle_path_item.0, 0);\n\n }\n\n current_hash\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 7, "score": 274169.44581253786 }, { "content": "/// Strip the common prefixes off the HEX-encoded string\n\npub fn remove_prefix(hex: &str) -> &str {\n\n if let Some(hex) = hex.strip_prefix(\"0x\") {\n\n hex\n\n } else if let Some(hex) = hex.strip_prefix(\"sync-bl:\") {\n\n hex\n\n } else if let Some(hex) = hex.strip_prefix(\"sync-tx:\") {\n\n hex\n\n } else {\n\n hex\n\n }\n\n}\n", "file_path": "core/lib/utils/src/string.rs", "rank": 8, "score": 271033.648255904 }, { "content": "pub fn empty_account_as_field_elements<E: Engine>() -> Vec<E::Fr> {\n\n let acc = CircuitAccount::<Bn256>::default();\n\n let bits = acc.get_bits_le();\n\n\n\n use crate::franklin_crypto::circuit::multipack;\n\n\n\n multipack::compute_multipacking::<E>(&bits)\n\n}\n\n\n\n/// Representation of the zkSync account used in the `zksync_circuit`.\n\n#[derive(Clone)]\n\npub struct CircuitAccount<E: RescueEngine> {\n\n pub subtree: SparseMerkleTree<Balance<E>, E::Fr, RescueHasher<E>>,\n\n pub nonce: E::Fr,\n\n pub pub_key_hash: E::Fr,\n\n pub address: E::Fr,\n\n}\n\n\n\nimpl<E: RescueEngine> GetBits for CircuitAccount<E> {\n\n fn get_bits_le(&self) -> Vec<bool> {\n", "file_path": "core/lib/crypto/src/circuit/account.rs", "rank": 9, "score": 267863.44753811153 }, { "content": "pub fn deploy_contracts(use_prod_contracts: bool, genesis_root: Fr) -> Contracts {\n\n let mut args = vec![\"run\", \"deploy-testkit\", \"--genesisRoot\"];\n\n let genesis_root = format!(\"0x{}\", genesis_root.to_hex());\n\n args.push(genesis_root.as_str());\n\n if use_prod_contracts {\n\n args.push(\"--prodContracts\");\n\n }\n\n let stdout = run_external_command(\"zk\", &args);\n\n\n\n let mut contracts = HashMap::new();\n\n for std_out_line in stdout.split_whitespace().collect::<Vec<_>>() {\n\n if let Some((name, address)) = get_contract_address(std_out_line) {\n\n contracts.insert(name, address);\n\n }\n\n }\n\n\n\n Contracts {\n\n governance: contracts\n\n .remove(\"CONTRACTS_GOVERNANCE_ADDR\")\n\n .expect(\"GOVERNANCE_ADDR missing\"),\n", "file_path": "core/tests/testkit/src/external_commands.rs", "rank": 10, "score": 264863.1808425379 }, { "content": "pub fn generate_sig_witness(bits: &[bool]) -> (Fr, Fr, Fr) {\n\n let mut sig_bits_to_hash = bits.to_vec();\n\n assert!(sig_bits_to_hash.len() < MAX_CIRCUIT_MSG_HASH_BITS);\n\n\n\n sig_bits_to_hash.resize(MAX_CIRCUIT_MSG_HASH_BITS, false);\n\n let (first_sig_part_bits, remaining) = sig_bits_to_hash.split_at(Fr::CAPACITY as usize);\n\n let remaining = remaining.to_vec();\n\n let (second_sig_part_bits, third_sig_part_bits) = remaining.split_at(Fr::CAPACITY as usize);\n\n let first_sig_part: Fr = le_bit_vector_into_field_element(first_sig_part_bits);\n\n let second_sig_part: Fr = le_bit_vector_into_field_element(second_sig_part_bits);\n\n let third_sig_part: Fr = le_bit_vector_into_field_element(third_sig_part_bits);\n\n (first_sig_part, second_sig_part, third_sig_part)\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 11, "score": 263186.65241668443 }, { "content": "struct NodeIndex(pub u64);\n\n\n", "file_path": "core/lib/crypto/src/merkle_tree/parallel_smt.rs", "rank": 12, "score": 261220.3412704951 }, { "content": "pub fn parse_query<T: FromStr + Serialize>(\n\n query: PaginationQuery<String>,\n\n) -> Result<PaginationQuery<ApiEither<T>>, UnknownFromParameter> {\n\n let from = FromStr::from_str(&query.from)?;\n\n Ok(PaginationQuery {\n\n from,\n\n limit: query.limit,\n\n direction: query.direction,\n\n })\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct PaginationQuery<Id> {\n\n pub from: Id,\n\n pub limit: u32,\n\n pub direction: PaginationDirection,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]\n", "file_path": "core/lib/api_types/src/v02/pagination.rs", "rank": 13, "score": 258506.5873105028 }, { "content": "pub fn calculate_token_hash(creator_id: AccountId, serial_id: u32, content_hash: H256) -> Vec<u8> {\n\n let mut lhs_be_bits = vec![];\n\n lhs_be_bits.extend_from_slice(&creator_id.0.to_be_bytes());\n\n lhs_be_bits.extend_from_slice(&serial_id.to_be_bytes());\n\n lhs_be_bits.extend_from_slice(&content_hash.as_bytes()[..16]);\n\n let lhs_fr = Fr::from_hex(&format!(\"0x{}\", hex::encode(&lhs_be_bits))).expect(\"lhs as Fr\");\n\n\n\n let mut rhs_be_bits = vec![];\n\n rhs_be_bits.extend_from_slice(&content_hash.as_bytes()[16..]);\n\n let rhs_fr = Fr::from_hex(&format!(\"0x{}\", hex::encode(&rhs_be_bits))).expect(\"rhs as Fr\");\n\n\n\n let hash_result = rescue_hash::<Bn256, 2>(&[lhs_fr, rhs_fr]);\n\n hash_result[0].to_bytes()\n\n}\n", "file_path": "core/lib/types/src/tx/mint_nft.rs", "rank": 14, "score": 258080.64960261417 }, { "content": "/// Parses the address panicking upon deserialization failure.\n\npub fn addr(addr_str: &str) -> Address {\n\n Address::from_str(addr_str).expect(\"Incorrect address string\")\n\n}\n\n\n", "file_path": "core/lib/config/src/configs/test_utils.rs", "rank": 15, "score": 253311.82820617626 }, { "content": "pub fn eth_address_to_fr(address: &Address) -> Fr {\n\n ff::from_hex(&format!(\"{:x}\", address)).unwrap()\n\n}\n", "file_path": "core/lib/crypto/src/circuit/utils.rs", "rank": 16, "score": 253303.7482170963 }, { "content": "/// Parses the provided fixture in a form of `VARIABLE_NAME=variable_value` lines and\n\n/// sets the corresponding environment variables.\n\npub fn set_env(fixture: &str) {\n\n for line in fixture.split('\\n').map(str::trim) {\n\n if line.is_empty() {\n\n // Skip empty lines.\n\n continue;\n\n }\n\n\n\n let elements: Vec<_> = line.split('=').collect();\n\n assert_eq!(\n\n elements.len(),\n\n 2,\n\n \"Incorrect line for setting environment variable: {}\",\n\n line\n\n );\n\n\n\n let variable_name = elements[0];\n\n let variable_value = elements[1].trim_matches('\"');\n\n\n\n env::set_var(variable_name, variable_value);\n\n }\n\n}\n\n\n", "file_path": "core/lib/config/src/configs/test_utils.rs", "rank": 17, "score": 250040.03668095212 }, { "content": "#[doc(hidden)]\n\npub fn get_genesis_token_list(network: &str) -> Result<Vec<TokenInfo>, GetGenesisTokenListError> {\n\n let mut file_path = parse_env::<PathBuf>(\"ZKSYNC_HOME\");\n\n file_path.push(\"etc\");\n\n file_path.push(\"tokens\");\n\n file_path.push(network);\n\n file_path.set_extension(\"json\");\n\n Ok(serde_json::from_str(&read_to_string(file_path)?)?)\n\n}\n\n\n\n/// Token price known to the zkSync network.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct TokenPrice {\n\n #[serde(with = \"UnsignedRatioSerializeAsDecimal\")]\n\n pub usd_price: Ratio<BigUint>,\n\n pub last_updated: DateTime<Utc>,\n\n}\n\n\n\n/// Token price known to the zkSync network.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct TokenMarketVolume {\n", "file_path": "core/lib/types/src/tokens.rs", "rank": 18, "score": 248832.54341609497 }, { "content": "pub fn ratio_to_u64(num: Ratio<BigUint>) -> u64 {\n\n let digits = num.to_integer().to_u64_digits();\n\n if digits.is_empty() {\n\n 0\n\n } else {\n\n digits[0]\n\n }\n\n}\n\n\n\n/// The number scaled by which the subsidies are stored in the db\n\nconst SUBSIDY_USD_AMOUNTS_SCALE: u64 = 1_000_000;\n\n\n", "file_path": "core/lib/utils/src/convert.rs", "rank": 19, "score": 246818.58791546983 }, { "content": "pub fn fr_from_bytes(bytes: Vec<u8>) -> Fr {\n\n let mut fr_repr = <Fr as PrimeField>::Repr::default();\n\n fr_repr.read_be(&*bytes).unwrap();\n\n Fr::from_repr(fr_repr).unwrap()\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 20, "score": 246772.64270060963 }, { "content": "pub fn run_operation_counter(connection_pool: ConnectionPool) -> JoinHandle<()> {\n\n tokio::spawn(async move {\n\n loop {\n\n if let Err(e) = prometheus_exporter_iteration(connection_pool.clone()).await {\n\n vlog::error!(\"Prometheus error: {}\", e);\n\n }\n\n sleep(QUERY_INTERVAL).await;\n\n }\n\n })\n\n}\n\n\n\nasync fn prometheus_exporter_iteration(connection_pool: ConnectionPool) -> QueryResult<()> {\n\n let mut storage = connection_pool.access_storage().await?;\n\n let mut transaction = storage.start_transaction().await?;\n\n\n\n let mut block_schema = transaction.chain().block_schema();\n\n\n\n for &action in &[CommitBlocks, ExecuteBlocks] {\n\n for &is_confirmed in &[false, true] {\n\n let result = block_schema\n", "file_path": "core/lib/prometheus_exporter/src/lib.rs", "rank": 21, "score": 246635.4971595141 }, { "content": "pub fn scaled_u64_to_ratio(num: u64) -> Ratio<BigUint> {\n\n Ratio::from(BigUint::from(num)) / BigUint::from(SUBSIDY_USD_AMOUNTS_SCALE)\n\n}\n\n\n", "file_path": "core/lib/utils/src/convert.rs", "rank": 22, "score": 244508.4837088372 }, { "content": "pub fn ratio_to_scaled_u64(num: Ratio<BigUint>) -> u64 {\n\n let scale = BigUint::from(SUBSIDY_USD_AMOUNTS_SCALE);\n\n let scaled_num = num * scale;\n\n\n\n ratio_to_u64(scaled_num)\n\n}\n\n\n", "file_path": "core/lib/utils/src/convert.rs", "rank": 23, "score": 244508.4837088372 }, { "content": "/// Obtains the environment variable value.\n\n/// Panics if there is no environment variable with provided name set.\n\npub fn get_env(name: &str) -> String {\n\n env::var(name).unwrap_or_else(|e| panic!(\"Env var {} missing, {}\", name, e))\n\n}\n\n\n", "file_path": "core/lib/utils/src/env_tools.rs", "rank": 24, "score": 242425.8349408357 }, { "content": "pub fn fr_from<T: ToString>(input: T) -> Fr {\n\n Fr::from_str(&input.to_string()).unwrap()\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 25, "score": 241112.44915737503 }, { "content": "fn pub_key_hash_self<E: JubjubEngine, H: Hasher<E::Fr>>(\n\n pub_key: &PublicKey<E>,\n\n hasher: &H,\n\n) -> Vec<bool> {\n\n let (pub_x, pub_y) = pub_key.0.into_xy();\n\n let input = vec![pub_x, pub_y];\n\n let pub_key_hash = hasher.hash_elements(input);\n\n let mut pub_key_hash_bits = vec![];\n\n append_le_fixed_width(\n\n &mut pub_key_hash_bits,\n\n &pub_key_hash,\n\n params::NEW_PUBKEY_HASH_WIDTH,\n\n );\n\n pub_key_hash_bits\n\n}\n\n\n", "file_path": "core/lib/crypto/src/circuit/utils.rs", "rank": 26, "score": 238382.90198827646 }, { "content": "pub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n\n\n", "file_path": "sdk/zksync-crypto/src/utils.rs", "rank": 27, "score": 237633.8384735445 }, { "content": "pub fn js_revert_reason(tx_hash: &H256) -> String {\n\n let web3_urls =\n\n std::env::var(\"ETH_CLIENT_WEB3_URL\").expect(\"ETH_CLIENT_WEB3_URL should be installed\");\n\n let web3_urls: Vec<&str> = web3_urls.split(',').collect();\n\n run_external_command(\n\n \"zk\",\n\n &[\n\n \"run\",\n\n \"revert-reason\",\n\n &format!(\"0x{:x}\", tx_hash),\n\n web3_urls.first().expect(\"At least one should exist\"),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "core/tests/testkit/src/external_commands.rs", "rank": 28, "score": 237429.50649509917 }, { "content": "/// Formats amount in wei to tokens.\n\n/// Behaves just like js ethers.utils.formatEther\n\npub fn format_ether(wei: impl ToString) -> String {\n\n format_units(wei, 18)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_format_units() {\n\n // Test vector of (decimals, wei input, expected output)\n\n let vals = vec![\n\n (0, \"1000000000000000100000\", \"1000000000000000100000.0\"),\n\n (1, \"0\", \"0.0\"),\n\n (1, \"11000000000000000000\", \"1100000000000000000.0\"),\n\n (2, \"0\", \"0.0\"),\n\n (2, \"1000000000000000100000\", \"10000000000000001000.0\"),\n\n (4, \"10001000000\", \"1000100.0\"),\n\n (4, \"10100000000000000000000\", \"1010000000000000000.0\"),\n\n (4, \"110\", \"0.011\"),\n", "file_path": "core/lib/utils/src/format.rs", "rank": 29, "score": 235934.32624096956 }, { "content": "/// Obtains the environment variable value and parses it using the `FromStr` type implementation.\n\n/// Panics if there is no environment variable with provided name set, or the value cannot be parsed.\n\npub fn parse_env<F>(name: &str) -> F\n\nwhere\n\n F: FromStr,\n\n F::Err: std::fmt::Debug,\n\n{\n\n get_env(name)\n\n .parse()\n\n .unwrap_or_else(|e| panic!(\"Failed to parse environment variable {}: {:?}\", name, e))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_env_tools() {\n\n const KEY: &str = \"KEY\";\n\n // Our test environment variable.\n\n env::set_var(KEY, \"123\");\n\n assert_eq!(get_env(KEY), \"123\");\n\n assert_eq!(parse_env::<i32>(KEY), 123);\n\n }\n\n}\n", "file_path": "core/lib/utils/src/env_tools.rs", "rank": 30, "score": 233354.90513099334 }, { "content": "/// Creates a dummy ethereum operation hash based on its number.\n\npub fn dummy_ethereum_tx_hash(ethereum_op_id: i64) -> H256 {\n\n H256::from_low_u64_ne(ethereum_op_id as u64)\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 31, "score": 232736.16765486472 }, { "content": "/// handy function to get file path in file_dump dir\n\npub fn get_path_in_file_dump_dir(filename: &str) -> PathBuf {\n\n let mut base_dir = std::env::var(\"ZKSYNC_HOME\")\n\n .map(PathBuf::from)\n\n .unwrap_or_else(|_| std::env::current_dir().expect(\"Current dir not set\"));\n\n base_dir.push(\"core\");\n\n base_dir.push(\"circuit\");\n\n base_dir.push(\"src\");\n\n base_dir.push(\"playground\");\n\n base_dir.push(\"file_dump\");\n\n base_dir.push(filename);\n\n base_dir\n\n}\n\n\n\npub mod plonk_playground;\n", "file_path": "core/lib/circuit/src/playground/mod.rs", "rank": 32, "score": 232705.33049821077 }, { "content": "/// Creates a dummy new root hash for the block based on its number.\n\npub fn dummy_root_hash_for_block(block_number: BlockNumber) -> Fr {\n\n Fr::from_str(&block_number.to_string()).unwrap()\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 33, "score": 232697.56804828095 }, { "content": "fn try_to_download_setup(power_of_two: u32) -> Result<Response, backoff::Error<anyhow::Error>> {\n\n let setup_network_dir = std::env::var(\"MISC_PROVER_SETUP_NETWORK_DIR\")\n\n .map_err(|e| backoff::Error::Permanent(e.into()))?;\n\n\n\n let setup_dl_path = format!(\"{}/setup_2%5E{}.key\", setup_network_dir, power_of_two);\n\n\n\n vlog::info!(\"Downloading universal setup from {}\", &setup_dl_path);\n\n\n\n reqwest::blocking::get(&setup_dl_path).map_err(|e| backoff::Error::Transient(e.into()))\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/network_utils.rs", "rank": 34, "score": 231881.72510986836 }, { "content": "/// Generates proof for exit given circuit using step-by-step algorithm.\n\npub fn gen_verified_proof_for_exit_circuit<C: Circuit<Engine> + Clone>(\n\n circuit: C,\n\n) -> Result<SingleProof, anyhow::Error> {\n\n let vk = VerificationKey::read(File::open(get_exodus_verification_key_path())?)?;\n\n\n\n vlog::info!(\"Proof for circuit started\");\n\n\n\n let hints = transpile(circuit.clone())?;\n\n let setup = setup(circuit.clone(), &hints)?;\n\n let size_log2 = setup.n.next_power_of_two().trailing_zeros();\n\n\n\n let size_log2 = std::cmp::max(size_log2, SETUP_MIN_POW2); // for exit circuit\n\n let key_monomial_form = get_universal_setup_monomial_form(size_log2, false)?;\n\n\n\n let proof = prove_by_steps::<_, _, RollingKeccakTranscript<Fr>>(\n\n circuit,\n\n &hints,\n\n &setup,\n\n None,\n\n &key_monomial_form,\n\n None,\n\n )?;\n\n\n\n let valid = verify::<_, _, RollingKeccakTranscript<Fr>>(&proof, &vk, None)?;\n\n anyhow::ensure!(valid, \"proof for exit is invalid\");\n\n\n\n vlog::info!(\"Proof for circuit successful\");\n\n Ok(proof.into())\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/lib.rs", "rank": 35, "score": 226226.50479844224 }, { "content": "pub fn pub_key_hash_bytes<E: JubjubEngine, H: Hasher<E::Fr>>(\n\n pub_key: &PublicKey<E>,\n\n hasher: &H,\n\n) -> Vec<u8> {\n\n let pk_hash_bits = pub_key_hash_self(pub_key, hasher);\n\n le_bit_vector_into_bytes(&pk_hash_bits)\n\n}\n\n\n", "file_path": "core/lib/crypto/src/circuit/utils.rs", "rank": 36, "score": 225184.0512606404 }, { "content": "pub fn pub_key_hash_fe<E: JubjubEngine, H: Hasher<E::Fr>>(\n\n pub_key: &PublicKey<E>,\n\n hasher: &H,\n\n) -> E::Fr {\n\n let pk_hash_bits = pub_key_hash_self(pub_key, hasher);\n\n le_bit_vector_into_field_element(&pk_hash_bits)\n\n}\n\n\n", "file_path": "core/lib/crypto/src/circuit/utils.rs", "rank": 37, "score": 225184.0512606404 }, { "content": "/// Depth of the left subtree of the account tree that can be used in the current version of the circuit.\n\npub fn used_account_subtree_depth() -> usize {\n\n let num = 24; // total accounts = 2.pow(num) ~ 16mil\n\n\n\n assert!(num <= account_tree_depth());\n\n\n\n num\n\n}\n\n\n", "file_path": "core/lib/crypto/src/params.rs", "rank": 38, "score": 224924.2048362469 }, { "content": "/// Formats amount in wei to tokens with precision.\n\n/// Behaves just like ethers.utils.formatUnits\n\npub fn format_units(wei: impl ToString, units: u8) -> String {\n\n let mut chars: VecDeque<char> = wei.to_string().chars().collect();\n\n\n\n while chars.len() < units as usize {\n\n chars.push_front('0');\n\n }\n\n chars.insert(chars.len() - units as usize, '.');\n\n if *chars.front().unwrap() == '.' {\n\n chars.push_front('0');\n\n }\n\n while *chars.back().unwrap() == '0' {\n\n chars.pop_back();\n\n }\n\n if *chars.back().unwrap() == '.' {\n\n chars.push_back('0');\n\n }\n\n chars.iter().collect()\n\n}\n\n\n", "file_path": "core/lib/utils/src/format.rs", "rank": 39, "score": 220300.4167127405 }, { "content": "pub fn reverse_bytes<T: Clone>(bits: &[T]) -> Vec<T> {\n\n bits.chunks(8)\n\n .rev()\n\n .map(|x| x.to_vec())\n\n .fold(Vec::new(), |mut acc, mut byte| {\n\n acc.append(&mut byte);\n\n acc\n\n })\n\n}\n\n\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 40, "score": 220296.5295934796 }, { "content": "fn assert_tx_signature(signature: &TxSignature, expected_pub: &str, expected_sig: &str) {\n\n let TxSignature { pub_key, signature } = signature;\n\n\n\n let pub_point = pub_key.serialize_packed().unwrap();\n\n assert_eq!(hex::encode(pub_point), expected_pub);\n\n\n\n let packed_sig = signature.serialize_packed().unwrap();\n\n assert_eq!(hex::encode(packed_sig), expected_sig);\n\n}\n\n\n\n#[cfg(test)]\n\nmod primitives_with_vectors {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_signature() {\n\n let test_vectors = TestVectorsConfig::load();\n\n for TestEntry { inputs, outputs } in test_vectors.crypto_primitives.items {\n\n let private_key =\n\n private_key_from_seed(&inputs.seed).expect(\"Cannot get key from seed\");\n", "file_path": "sdk/zksync-rs/tests/unit.rs", "rank": 41, "score": 218496.52236742026 }, { "content": "fn pk_to_address(eth_pk: &H256) -> Address {\n\n PackedEthSignature::address_from_private_key(eth_pk)\n\n .expect(\"Can't get an address from the private key\")\n\n}\n", "file_path": "core/tests/loadnext/src/account_pool.rs", "rank": 42, "score": 217222.88927928964 }, { "content": "pub fn spawn_panic_handler() -> (JoinHandle<()>, mpsc::Sender<bool>) {\n\n let (panic_sender, mut panic_receiver) = mpsc::channel(1);\n\n\n\n let handler = tokio::spawn(async move {\n\n panic_receiver.next().await.unwrap();\n\n });\n\n (handler, panic_sender)\n\n}\n", "file_path": "core/lib/utils/src/panic_notify.rs", "rank": 43, "score": 213367.557531341 }, { "content": "fn get_universal_setup_monomial_file_name(power_of_two: u32) -> Result<String, anyhow::Error> {\n\n anyhow::ensure!(\n\n (SETUP_MIN_POW2..=SETUP_MAX_POW2).contains(&power_of_two),\n\n \"setup power of two is not in the correct range\"\n\n );\n\n Ok(format!(\"setup_2^{}.key\", power_of_two))\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/fs_utils.rs", "rank": 44, "score": 212681.2956532669 }, { "content": "pub fn noop_operation(tree: &CircuitAccountTree, acc_id: u32) -> Operation<Bn256> {\n\n let signature_data = SignatureData::init_empty();\n\n let first_sig_msg = Fr::zero();\n\n let second_sig_msg = Fr::zero();\n\n let third_sig_msg = Fr::zero();\n\n let signer_pub_key_packed = [Some(false); 256];\n\n\n\n let acc = tree.get(acc_id).unwrap();\n\n let account_address_fe = fr_from(acc_id);\n\n let token_fe = Fr::zero();\n\n let balance_value = match acc.subtree.get(0) {\n\n None => Fr::zero(),\n\n Some(bal) => bal.value,\n\n };\n\n let pubdata = vec![false; CHUNK_BIT_WIDTH];\n\n let pubdata_chunks: Vec<_> = pubdata\n\n .chunks(CHUNK_BIT_WIDTH)\n\n .map(|x| le_bit_vector_into_field_element(&x.to_vec()))\n\n .collect();\n\n let (audit_account, audit_balance) = get_audits(tree, acc_id, 0);\n", "file_path": "core/lib/circuit/src/witness/noop.rs", "rank": 45, "score": 211186.22244540032 }, { "content": "pub fn big_decimal_to_ratio(num: &BigDecimal) -> Result<Ratio<BigUint>, anyhow::Error> {\n\n let (big_int, exp) = num.as_bigint_and_exponent();\n\n anyhow::ensure!(!big_int.is_negative(), \"BigDecimal should be unsigned\");\n\n let big_uint = big_int.to_biguint().unwrap();\n\n let ten_pow = BigUint::from(10_u32).pow(exp as u128);\n\n Ok(Ratio::new(big_uint, ten_pow))\n\n}\n\n\n", "file_path": "core/lib/utils/src/convert.rs", "rank": 46, "score": 206767.14364243418 }, { "content": "pub fn append_le_fixed_width(content: &mut Vec<bool>, x: &Fr, width: usize) {\n\n let mut token_bits: Vec<bool> = BitIterator::new(x.into_repr()).collect();\n\n token_bits.reverse();\n\n token_bits.resize(width, false);\n\n content.extend(token_bits);\n\n}\n\n\n", "file_path": "sdk/zksync-crypto/src/utils.rs", "rank": 47, "score": 205064.39623192843 }, { "content": "pub fn scaled_big_decimal_to_ratio(num: BigDecimal) -> Result<Ratio<BigUint>, anyhow::Error> {\n\n let scale = BigDecimal::from(SUBSIDY_USD_AMOUNTS_SCALE);\n\n\n\n let unscaled = num / scale;\n\n\n\n big_decimal_to_ratio(&unscaled)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::str::FromStr;\n\n\n\n #[test]\n\n fn test_ratio_to_big_decimal() {\n\n let ratio = Ratio::from_integer(BigUint::from(0u32));\n\n let dec = ratio_to_big_decimal(&ratio, 1);\n\n assert_eq!(dec.to_string(), \"0.0\");\n\n let ratio = Ratio::from_integer(BigUint::from(1234u32));\n\n let dec = ratio_to_big_decimal(&ratio, 7);\n", "file_path": "core/lib/utils/src/convert.rs", "rank": 48, "score": 204668.6430750359 }, { "content": "pub fn le_bit_vector_into_field_element<P: PrimeField>(bits: &[bool]) -> P {\n\n // double and add\n\n let mut fe = P::zero();\n\n let mut base = P::one();\n\n\n\n for bit in bits {\n\n if *bit {\n\n fe.add_assign(&base);\n\n }\n\n base.double();\n\n }\n\n\n\n fe\n\n}\n\n\n", "file_path": "core/lib/crypto/src/circuit/utils.rs", "rank": 49, "score": 203280.48040780867 }, { "content": "/// Performs the operation on the circuit, but not on the plasma,\n\n/// since the operation is meant to be incorrect and should result in an error.\n\n/// The error is caught and checked to match the provided message.\n\npub fn incorrect_op_test_scenario<W, F, B>(\n\n accounts: &[WitnessTestAccount],\n\n op: W::OperationType,\n\n input: W::CalculateOpsInput,\n\n expected_msg: &str,\n\n collect_fees: F,\n\n corrupt_witness_builder: B,\n\n) where\n\n W: Witness,\n\n W::CalculateOpsInput: Clone + std::fmt::Debug,\n\n F: FnOnce() -> Vec<CollectedFee>,\n\n B: FnOnce(&mut WitnessBuilder),\n\n{\n\n // Initialize WitnessBuilder.\n\n let (_, mut circuit_account_tree) = ZkSyncStateGenerator::generate(accounts);\n\n let mut witness_accum = WitnessBuilder::new(\n\n &mut circuit_account_tree,\n\n FEE_ACCOUNT_ID,\n\n BlockNumber(1),\n\n BLOCK_TIMESTAMP,\n", "file_path": "core/lib/circuit/src/witness/tests/test_utils.rs", "rank": 50, "score": 201620.62084201523 }, { "content": "/// Deserializes either a `String` or `Vec<u8>` into `Vec<u8>`.\n\n/// The reason we cannot expect just a vector is backward compatibility: messages\n\n/// used to be stored as strings.\n\npub fn deserialize_eth_message<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StringOrVec;\n\n\n\n impl<'de> Visitor<'de> for StringOrVec {\n\n type Value = Vec<u8>;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a byte array or a string\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n Ok(v.as_bytes().to_vec())\n\n }\n\n\n", "file_path": "core/lib/types/src/utils.rs", "rank": 51, "score": 199410.9034272461 }, { "content": "fn rescue_hash_elements(input: &[Fr]) -> Fr {\n\n RESCUE_PARAMS.with(|params| {\n\n let sponge_output = rescue_hash::<Engine>(params, input);\n\n assert_eq!(sponge_output.len(), 1, \"rescue hash problem\");\n\n sponge_output[0]\n\n })\n\n}\n\n\n", "file_path": "sdk/zksync-crypto/src/utils.rs", "rank": 52, "score": 194252.14462125947 }, { "content": "fn nonce_increment(amount: &Fr) -> Fr {\n\n if amount.is_zero() {\n\n Fr::zero()\n\n } else {\n\n Fr::one()\n\n }\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/swap.rs", "rank": 53, "score": 194252.14462125947 }, { "content": "pub fn resize_grow_only<T: Clone>(to_resize: &mut Vec<T>, new_size: usize, pad_with: T) {\n\n assert!(to_resize.len() <= new_size);\n\n to_resize.resize(new_size, pad_with);\n\n}\n\n\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 54, "score": 194238.62138190775 }, { "content": "/// Generates a new `PrivateKey` from seed using a deterministic algorithm:\n\n/// seed is hashed via `sha256` hash (twice), and the output treated as a `PrivateKey`.\n\n/// If the obtained value doesn't have a correct value to be a `PrivateKey`, hashing operation is applied\n\n/// repeatedly to the previous output, until the value can be interpreted as a `PrivateKey`.\n\npub fn private_key_from_seed(seed: &[u8]) -> Result<PrivateKey, ClientError> {\n\n if seed.len() < 32 {\n\n return Err(ClientError::SeedTooShort);\n\n }\n\n\n\n let sha256_bytes = |input: &[u8]| {\n\n let mut hasher = Sha256::new();\n\n hasher.input(input);\n\n hasher.result()\n\n };\n\n\n\n let mut effective_seed = sha256_bytes(seed);\n\n\n\n loop {\n\n let raw_priv_key = sha256_bytes(&effective_seed);\n\n let mut fs_repr = FsRepr::default();\n\n fs_repr\n\n .read_be(&raw_priv_key[..])\n\n .expect(\"failed to read raw_priv_key\");\n\n match Fs::from_repr(fs_repr) {\n", "file_path": "sdk/zksync-rs/src/utils.rs", "rank": 55, "score": 193522.00678264344 }, { "content": "fn pack_amount(amount: u128) -> (Fr, Fr) {\n\n let amount_fe = fr_from(amount);\n\n let amount_bits = FloatConversions::to_float(\n\n amount,\n\n AMOUNT_EXPONENT_BIT_WIDTH,\n\n AMOUNT_MANTISSA_BIT_WIDTH,\n\n 10,\n\n )\n\n .unwrap();\n\n let amount_packed: Fr = le_bit_vector_into_field_element(&amount_bits);\n\n (amount_fe, amount_packed)\n\n}\n", "file_path": "core/lib/circuit/src/witness/swap.rs", "rank": 56, "score": 187666.54210432374 }, { "content": "pub fn comma_list_to_vec<T: FromStr>(elems: String) -> Vec<T>\n\nwhere\n\n <T as std::str::FromStr>::Err: Debug,\n\n{\n\n elems\n\n .split(',')\n\n .map(|str| T::from_str(str).expect(\"Failed to deserialize stored item\"))\n\n .collect()\n\n}\n", "file_path": "core/lib/storage/src/forced_exit_requests/utils.rs", "rank": 57, "score": 185887.42650054154 }, { "content": "pub fn get_audits(\n\n tree: &CircuitAccountTree,\n\n account_address: u32,\n\n token: u32,\n\n) -> (Vec<Option<Fr>>, Vec<Option<Fr>>) {\n\n let default_account = CircuitAccount::default();\n\n let audit_account: Vec<Option<Fr>> = tree\n\n .merkle_path(account_address)\n\n .into_iter()\n\n .map(|e| Some(e.0))\n\n .collect();\n\n\n\n let audit_balance: Vec<Option<Fr>> = tree\n\n .get(account_address)\n\n .unwrap_or(&default_account)\n\n .subtree\n\n .merkle_path(token)\n\n .into_iter()\n\n .map(|e| Some(e.0))\n\n .collect();\n\n (audit_account, audit_balance)\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 58, "score": 184270.81722172242 }, { "content": "pub fn apply_fee(\n\n tree: &mut CircuitAccountTree,\n\n validator_address: u32,\n\n token: u32,\n\n fee: u128,\n\n) -> (Fr, AccountWitness<Bn256>) {\n\n let fee_fe = fr_from(fee);\n\n let mut validator_leaf = tree\n\n .remove(validator_address)\n\n .expect(\"validator_leaf is empty\");\n\n let validator_account_witness = AccountWitness::from_circuit_account(&validator_leaf);\n\n\n\n let mut balance = validator_leaf.subtree.remove(token).unwrap_or_default();\n\n balance.value.add_assign(&fee_fe);\n\n validator_leaf.subtree.insert(token, balance);\n\n\n\n tree.insert(validator_address, validator_leaf);\n\n\n\n let root_after_fee = tree.root_hash();\n\n (root_after_fee, validator_account_witness)\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 59, "score": 184270.81722172242 }, { "content": "pub fn serialize_single_proof(\n\n proof: &OldProof<Engine, PlonkCsWidth4WithNextStepParams>,\n\n) -> EncodedSingleProof {\n\n let mut inputs = vec![];\n\n for input in proof.input_values.iter() {\n\n let ser = EthereumSerializer::serialize_fe(input);\n\n inputs.push(ser);\n\n }\n\n let mut serialized_proof = vec![];\n\n\n\n for c in proof.wire_commitments.iter() {\n\n let (x, y) = EthereumSerializer::serialize_g1(c);\n\n serialized_proof.push(x);\n\n serialized_proof.push(y);\n\n }\n\n\n\n let (x, y) = EthereumSerializer::serialize_g1(&proof.grand_product_commitment);\n\n serialized_proof.push(x);\n\n serialized_proof.push(y);\n\n\n", "file_path": "core/lib/crypto/src/serialization.rs", "rank": 60, "score": 184270.81722172242 }, { "content": "/// This method initializes params for current thread, otherwise they will be initialized when signing\n\n/// first message.\n\npub fn zksync_crypto_init() {\n\n JUBJUB_PARAMS.with(|_| {});\n\n RESCUE_PARAMS.with(|_| {});\n\n set_panic_hook();\n\n}\n\n\n", "file_path": "sdk/zksync-crypto/src/lib.rs", "rank": 61, "score": 184270.81722172242 }, { "content": "#[must_use]\n\npub fn run_mempool_tx_handler(\n\n db_pool: ConnectionPool,\n\n tx_requests: mpsc::Receiver<MempoolTransactionRequest>,\n\n block_chunk_sizes: Vec<usize>,\n\n) -> JoinHandle<()> {\n\n let mempool_state = MempoolState::new(db_pool.clone());\n\n let max_block_size_chunks = *block_chunk_sizes\n\n .iter()\n\n .max()\n\n .expect(\"failed to find max block chunks size\");\n\n let handler = MempoolTransactionsHandler {\n\n db_pool,\n\n mempool_state,\n\n requests: tx_requests,\n\n max_block_size_chunks,\n\n };\n\n tokio::spawn(handler.run())\n\n}\n\n\n", "file_path": "core/lib/mempool/src/lib.rs", "rank": 62, "score": 182393.94255603393 }, { "content": "#[must_use]\n\npub fn run_mempool_block_handler(\n\n db_pool: ConnectionPool,\n\n block_requests: mpsc::Receiver<MempoolBlocksRequest>,\n\n block_chunk_sizes: Vec<usize>,\n\n) -> JoinHandle<()> {\n\n let mempool_state = MempoolState::new(db_pool);\n\n let max_block_size_chunks = *block_chunk_sizes\n\n .iter()\n\n .max()\n\n .expect(\"failed to find max block chunks size\");\n\n\n\n let blocks_handler = MempoolBlocksHandler {\n\n mempool_state,\n\n requests: block_requests,\n\n max_block_size_chunks,\n\n };\n\n\n\n tokio::spawn(blocks_handler.run())\n\n}\n", "file_path": "core/lib/mempool/src/lib.rs", "rank": 63, "score": 182393.94255603393 }, { "content": "pub fn append_packed_public_key(\n\n content: &mut Vec<Boolean>,\n\n x_bits: Vec<Boolean>,\n\n y_bits: Vec<Boolean>,\n\n) {\n\n assert_eq!(franklin_constants::FR_BIT_WIDTH - 1, y_bits.len());\n\n assert_eq!(1, x_bits.len());\n\n content.extend(y_bits);\n\n content.extend(x_bits);\n\n}\n\n\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 64, "score": 182387.69562845887 }, { "content": "/// Generates dummy operation with the unique `new_root_hash` in the block.\n\npub fn gen_unique_operation(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n) -> Operation {\n\n gen_unique_operation_with_txs(block_number, action, block_chunks_size, vec![])\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 65, "score": 182387.69562845887 }, { "content": "pub fn gen_sample_block(\n\n block_number: BlockNumber,\n\n block_chunks_size: usize,\n\n txs: Vec<ExecutedOperations>,\n\n) -> Block {\n\n Block {\n\n block_number,\n\n new_root_hash: dummy_root_hash_for_block(block_number),\n\n fee_account: AccountId(0),\n\n block_transactions: txs,\n\n processed_priority_ops: (0, 0),\n\n block_chunks_size,\n\n commit_gas_limit: 1_000_000.into(),\n\n verify_gas_limit: 1_500_000.into(),\n\n block_commitment: H256::zero(),\n\n timestamp: 0,\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 66, "score": 182387.69562845887 }, { "content": "/// Construct the first part of the message that should be signed by Ethereum key.\n\n/// The pattern is as follows:\n\n///\n\n/// [{Transfer/Withdraw} {amount} {token} to: {to_address}]\n\n/// [Fee: {fee} {token}]\n\n///\n\n/// Note that both lines are optional.\n\npub fn ethereum_sign_message_part(\n\n transaction: &str,\n\n token_symbol: &str,\n\n decimals: u8,\n\n amount: &BigUint,\n\n fee: &BigUint,\n\n to: &Address,\n\n) -> String {\n\n let mut message = if !amount.is_zero() {\n\n format!(\n\n \"{transaction} {amount} {token} to: {to:?}\",\n\n transaction = transaction,\n\n amount = format_units(amount, decimals),\n\n token = token_symbol,\n\n to = to\n\n )\n\n } else {\n\n String::new()\n\n };\n\n if !fee.is_zero() {\n", "file_path": "core/lib/types/src/utils.rs", "rank": 67, "score": 182387.69562845887 }, { "content": "/// Creates several random updates for the provided account map,\n\n/// and returns the resulting account map together with the list\n\n/// of generated updates.\n\npub fn apply_random_updates(\n\n mut accounts: AccountMap,\n\n rng: &mut XorShiftRng,\n\n) -> (AccountMap, Vec<(AccountId, AccountUpdate)>) {\n\n let updates = (0..3)\n\n .flat_map(|_| gen_acc_random_updates(rng))\n\n .collect::<AccountUpdates>();\n\n apply_updates(&mut accounts, updates.clone());\n\n (accounts, updates)\n\n}\n\n\n\n/// Here we create updates for blocks 1,2,3 (commit 3 blocks)\n\n/// We apply updates for blocks 1,2 (verify 2 blocks)\n\n/// Make sure that we can get state for all blocks.\n\n#[db_test]\n\nasync fn test_commit_rewind(mut storage: StorageProcessor<'_>) -> QueryResult<()> {\n\n let mut rng = create_rng();\n\n\n\n // Create the input data for three blocks.\n\n // Data for the next block is based on previous block data.\n", "file_path": "core/lib/storage/src/tests/chain/block.rs", "rank": 68, "score": 180576.20183864617 }, { "content": "/// Generates dummy aggregated operation with the unique `new_root_hash` in the block.\n\npub fn gen_unique_aggregated_operation(\n\n block_number: BlockNumber,\n\n action: AggregatedActionType,\n\n block_chunks_size: usize,\n\n) -> AggregatedOperation {\n\n gen_unique_aggregated_operation_with_txs(block_number, action, block_chunks_size, vec![])\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 69, "score": 180570.4100955545 }, { "content": "/// Construct transaction event with the given type, account id, token and\n\n/// status.\n\npub fn get_transaction_event(\n\n tx_type: TransactionType,\n\n account_id: AccountId,\n\n token_id: TokenId,\n\n status: TransactionStatus,\n\n) -> ZkSyncEvent {\n\n // Initialize the cell to prevent panic when deserializing\n\n // empty `tx` json.\n\n let tx_event = TransactionEvent {\n\n tx_hash: String::new(),\n\n account_id,\n\n token_id,\n\n block_number: BlockNumber(0),\n\n tx: Default::default(),\n\n status,\n\n fail_reason: None,\n\n created_at: Utc::now(),\n\n tx_type: OnceCell::from(tx_type),\n\n };\n\n ZkSyncEvent {\n\n id: EventId(0),\n\n block_number: BlockNumber(0),\n\n data: EventData::Transaction(tx_event),\n\n }\n\n}\n", "file_path": "core/lib/types/src/event/test_data.rs", "rank": 70, "score": 180570.4100955545 }, { "content": "/// Gets smallest block size given the list of supported chunk sizes.\n\npub fn smallest_block_size_for_chunks(\n\n chunks_used: usize,\n\n available_block_sizes: &[usize],\n\n) -> usize {\n\n for &block_size in available_block_sizes {\n\n if block_size >= chunks_used {\n\n return block_size;\n\n }\n\n }\n\n panic!(\n\n \"Provided chunks amount ({}) cannot fit in one block, maximum available size is {}\",\n\n chunks_used,\n\n available_block_sizes.last().unwrap()\n\n );\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct OnchainOperationsBlockInfo {\n\n pub public_data_offset: u32,\n\n pub eth_witness: Vec<u8>,\n\n}\n\n\n\n/// Additional data attached to block that is not related to the core protocol\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct BlockMetadata {\n\n pub fast_processing: bool,\n\n}\n", "file_path": "core/lib/types/src/block/mod.rs", "rank": 71, "score": 180570.4100955545 }, { "content": "// Thread join handle and stop channel sender.\n\npub fn spawn_state_keeper(\n\n fee_account: &Address,\n\n initial_state: ZkSyncStateInitParams,\n\n) -> (JoinHandle<()>, oneshot::Sender<()>, StateKeeperChannels) {\n\n let (proposed_blocks_sender, proposed_blocks_receiver) = mpsc::channel(256);\n\n let (state_keeper_req_sender, state_keeper_req_receiver) = mpsc::channel(256);\n\n let (mempool_req_sender, mempool_req_receiver) = mpsc::channel(256);\n\n let (processed_tx_events_sender, processed_tx_events_receiver) = mpsc::channel(256);\n\n\n\n let max_ops_in_block = 1000;\n\n let ops_chunks = vec![\n\n TransferToNewOp::CHUNKS,\n\n TransferOp::CHUNKS,\n\n DepositOp::CHUNKS,\n\n FullExitOp::CHUNKS,\n\n WithdrawOp::CHUNKS,\n\n ];\n\n let mut block_chunks_sizes = (0..max_ops_in_block)\n\n .cartesian_product(ops_chunks)\n\n .map(|(x, y)| x * y)\n", "file_path": "core/tests/testkit/src/state_keeper_utils.rs", "rank": 72, "score": 180570.4100955545 }, { "content": "pub fn gen_aggregate_proof(\n\n single_vks: Vec<Vk>,\n\n proofs: Vec<SingleProofData>,\n\n available_aggregated_proof_sizes: &[(usize, u32)],\n\n download_setup_network: bool,\n\n) -> anyhow::Result<AggregatedProof> {\n\n // proofs: Vec<SingleProofData>,\n\n let mut individual_vk_inputs = Vec::new();\n\n let mut individual_vk_idxs = Vec::new();\n\n for p in &proofs {\n\n let individual_input = {\n\n anyhow::ensure!(\n\n p.proof.0.input_values.len() == 1,\n\n \"Single should have one input\"\n\n );\n\n p.proof.0.input_values[0]\n\n };\n\n individual_vk_inputs.push(individual_input);\n\n individual_vk_idxs.push(p.vk_idx);\n\n }\n", "file_path": "core/lib/prover_utils/src/aggregated_proofs.rs", "rank": 73, "score": 180570.4100955545 }, { "content": "pub fn gen_sample_pending_block(\n\n block_number: BlockNumber,\n\n txs: Vec<ExecutedOperations>,\n\n) -> PendingBlock {\n\n PendingBlock {\n\n number: block_number,\n\n chunks_left: 1,\n\n unprocessed_priority_op_before: 0,\n\n pending_block_iteration: 1,\n\n success_operations: txs,\n\n failed_txs: Vec::new(),\n\n timestamp: 0,\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 74, "score": 180570.4100955545 }, { "content": "/// Construct account event with the given account id, token and status.\n\npub fn get_account_event(\n\n account_id: AccountId,\n\n token_id: Option<TokenId>,\n\n status: AccountStateChangeStatus,\n\n) -> ZkSyncEvent {\n\n let (update_type, new_balance) = if token_id.is_some() {\n\n (\n\n AccountStateChangeType::UpdateBalance,\n\n Some(BigDecimal::from(100)),\n\n )\n\n } else {\n\n (AccountStateChangeType::Create, None)\n\n };\n\n let update_details = AccountUpdateDetails {\n\n account_id,\n\n nonce: Nonce(0),\n\n new_pub_key_hash: None,\n\n token_id,\n\n new_balance,\n\n };\n", "file_path": "core/lib/types/src/event/test_data.rs", "rank": 75, "score": 180570.4100955545 }, { "content": "/// Generates dummy operation with the unique `new_root_hash` in the block and\n\n/// given set of transactions..\n\npub fn gen_unique_operation_with_txs(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n txs: Vec<ExecutedOperations>,\n\n) -> Operation {\n\n Operation {\n\n id: None,\n\n action,\n\n block: gen_sample_block(block_number, block_chunks_size, txs),\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 76, "score": 180570.4100955545 }, { "content": "pub fn is_signature_from_address(\n\n signature: &PackedEthSignature,\n\n msg: &[u8],\n\n address: Address,\n\n) -> Result<bool, SignerError> {\n\n let signature_is_correct = signature\n\n .signature_recover_signer(msg)\n\n .map_err(|err| SignerError::RecoverAddress(err.to_string()))?\n\n == address;\n\n Ok(signature_is_correct)\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum AddressOrIndex {\n\n Address(Address),\n\n Index(usize),\n\n}\n\n\n\n/// Describes whether to add a prefix `\\x19Ethereum Signed Message:\\n`\n\n/// when requesting a message signature.\n", "file_path": "core/lib/eth_signer/src/json_rpc_signer.rs", "rank": 77, "score": 180570.4100955545 }, { "content": "pub fn prepare_proof_data(\n\n available_chunks: &[usize],\n\n proofs: Vec<(SingleProof, usize)>,\n\n) -> (Vec<Vk>, Vec<SingleProofData>) {\n\n let all_vks = available_chunks\n\n .iter()\n\n .map(|chunks| {\n\n PlonkVerificationKey::read_verification_key_for_main_circuit(*chunks)\n\n .unwrap()\n\n .0\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let mut single_proof_data = Vec::new();\n\n for (proof, block_size) in proofs {\n\n let (vk_idx, _) = available_chunks\n\n .iter()\n\n .enumerate()\n\n .find(|(_, size)| **size == block_size)\n\n .expect(\"block size not found\");\n\n\n\n single_proof_data.push(SingleProofData { proof, vk_idx });\n\n }\n\n (all_vks, single_proof_data)\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/aggregated_proofs.rs", "rank": 78, "score": 180570.4100955545 }, { "content": "pub fn gen_sample_incomplete_block(\n\n block_number: BlockNumber,\n\n block_chunks_size: usize,\n\n txs: Vec<ExecutedOperations>,\n\n) -> IncompleteBlock {\n\n IncompleteBlock {\n\n block_number,\n\n fee_account: AccountId(0),\n\n block_transactions: txs,\n\n processed_priority_ops: (0, 1),\n\n block_chunks_size,\n\n commit_gas_limit: 1_000_000.into(),\n\n verify_gas_limit: 1_500_000.into(),\n\n timestamp: 0,\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 79, "score": 180570.4100955545 }, { "content": "pub fn generate_dummy_sig_data(\n\n bits: &[bool],\n\n rescue_hasher: &RescueHasher<Bn256>,\n\n rescue_params: &Bn256RescueParams,\n\n jubjub_params: &AltJubjubBn256,\n\n) -> (SignatureData, Fr, Fr, Fr, Fr, Fr) {\n\n let rng = &mut XorShiftRng::from_seed([0x3dbe_6258, 0x8d31_3d76, 0x3237_db17, 0xe5bc_0654]);\n\n let p_g = FixedGenerators::SpendingKeyGenerator;\n\n let private_key = PrivateKey::<Bn256>(rng.gen());\n\n let sender_pk = PublicKey::from_private(&private_key, p_g, jubjub_params);\n\n let (sender_x, sender_y) = sender_pk.0.into_xy();\n\n let mut sig_bits_to_hash = bits.to_vec();\n\n assert!(sig_bits_to_hash.len() < MAX_CIRCUIT_MSG_HASH_BITS);\n\n\n\n sig_bits_to_hash.resize(MAX_CIRCUIT_MSG_HASH_BITS, false);\n\n let (first_sig_part_bits, remaining) = sig_bits_to_hash.split_at(Fr::CAPACITY as usize);\n\n let remaining = remaining.to_vec();\n\n let (second_sig_part_bits, third_sig_part_bits) = remaining.split_at(Fr::CAPACITY as usize);\n\n let first_sig_part: Fr = le_bit_vector_into_field_element(first_sig_part_bits);\n\n let second_sig_part: Fr = le_bit_vector_into_field_element(second_sig_part_bits);\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 80, "score": 180570.4100955545 }, { "content": "/// Runs external command and returns stdout output\n\nfn run_external_command(command: &str, args: &[&str]) -> String {\n\n let result = Command::new(command)\n\n .args(args)\n\n .output()\n\n .unwrap_or_else(|e| panic!(\"failed to execute command: {}, err: {}\", command, e));\n\n\n\n let stdout = String::from_utf8(result.stdout).expect(\"stdout is not valid utf8\");\n\n let stderr = String::from_utf8(result.stderr).expect(\"stderr is not valid utf8\");\n\n\n\n if !result.status.success() {\n\n panic!(\n\n \"failed to run exetrnal command {}:\\nstdout: {}\\nstderr: {}\",\n\n command, stdout, stderr\n\n );\n\n }\n\n stdout\n\n}\n\n\n", "file_path": "core/tests/testkit/src/external_commands.rs", "rank": 81, "score": 180263.2582232459 }, { "content": "pub fn create_exit_proof_fungible(\n\n accounts: AccountMap,\n\n account_id: AccountId,\n\n owner: Address,\n\n token_id: TokenId,\n\n) -> Result<(EncodedSingleProof, BigUint), anyhow::Error> {\n\n create_exit_proof(\n\n accounts,\n\n account_id,\n\n owner,\n\n token_id,\n\n Default::default(),\n\n Default::default(),\n\n Default::default(),\n\n )\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/exit_proof.rs", "rank": 82, "score": 178815.567384067 }, { "content": "/// Reads universal setup from disk or downloads from network.\n\npub fn get_universal_setup_monomial_form(\n\n power_of_two: u32,\n\n download_from_network: bool,\n\n) -> Result<Crs<Engine, CrsForMonomialForm>, anyhow::Error> {\n\n if let Some(cached_setup) = UNIVERSAL_SETUP_CACHE.take_setup_struct(power_of_two) {\n\n Ok(cached_setup)\n\n } else if !download_from_network {\n\n fs_utils::get_universal_setup_monomial_form(power_of_two)\n\n } else {\n\n let start = Instant::now();\n\n // try to find cache on disk\n\n let place_for_key;\n\n let res = if let Ok(res) = fs_utils::get_universal_setup_monomial_form(power_of_two) {\n\n place_for_key = \"disk\";\n\n res\n\n } else {\n\n place_for_key = \"remote\";\n\n network_utils::download_universal_setup_monomial_form(power_of_two)?;\n\n fs_utils::get_universal_setup_monomial_form(power_of_two)?\n\n };\n\n metrics::histogram!(\"prover\", start.elapsed(), \"stage\" => \"download_setup\", \"place\" => place_for_key);\n\n Ok(res)\n\n }\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/lib.rs", "rank": 83, "score": 178815.567384067 }, { "content": "/// Generates dummy operation with the unique `new_root_hash` in the block and\n\n/// given set of transactions..\n\npub fn gen_unique_aggregated_operation_with_txs(\n\n block_number: BlockNumber,\n\n action: AggregatedActionType,\n\n block_chunks_size: usize,\n\n txs: Vec<ExecutedOperations>,\n\n) -> AggregatedOperation {\n\n let block = gen_sample_block(block_number, block_chunks_size, txs);\n\n\n\n match action {\n\n AggregatedActionType::CommitBlocks => {\n\n AggregatedOperation::CommitBlocks(BlocksCommitOperation {\n\n last_committed_block: block.clone(),\n\n blocks: vec![block],\n\n })\n\n }\n\n AggregatedActionType::CreateProofBlocks => {\n\n AggregatedOperation::CreateProofBlocks(BlocksCreateProofOperation {\n\n blocks: vec![block],\n\n proofs_to_pad: 0,\n\n })\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 84, "score": 178815.567384067 }, { "content": "pub fn create_exit_proof_nft(\n\n accounts: AccountMap,\n\n account_id: AccountId,\n\n owner: Address,\n\n token_id: TokenId,\n\n creator_id: AccountId,\n\n serial_id: u32,\n\n content_hash: H256,\n\n) -> Result<(EncodedSingleProof, BigUint), anyhow::Error> {\n\n create_exit_proof(\n\n accounts,\n\n account_id,\n\n owner,\n\n token_id,\n\n creator_id,\n\n serial_id,\n\n content_hash,\n\n )\n\n}\n", "file_path": "core/lib/prover_utils/src/exit_proof.rs", "rank": 85, "score": 178815.567384067 }, { "content": "pub fn create_exit_circuit_with_public_input(\n\n account_tree: &mut CircuitAccountTree,\n\n account_id: AccountId,\n\n token_id: TokenId,\n\n nft_creator_id: AccountId,\n\n nft_serial_id: u32,\n\n nft_content_hash: H256,\n\n) -> ZkSyncExitCircuit<'static, Engine> {\n\n let account_address_fe = Fr::from_str(&account_id.to_string()).unwrap();\n\n let creator_account_address_fe = Fr::from_str(&nft_creator_id.to_string()).unwrap();\n\n let token_id_fe = Fr::from_str(&token_id.to_string()).unwrap();\n\n let serial_id_fe = Fr::from_str(&nft_serial_id.to_string()).unwrap();\n\n let root_hash = account_tree.root_hash();\n\n let (account_witness, _, balance, _) =\n\n apply_leaf_operation(account_tree, *account_id, *token_id as u32, |_| {}, |_| {});\n\n let (audit_path, audit_balance_path) = get_audits(account_tree, *account_id, *token_id as u32);\n\n\n\n let (special_account_witness, _, special_account_balance, _) = apply_leaf_operation(\n\n account_tree,\n\n NFT_STORAGE_ACCOUNT_ID.0,\n", "file_path": "core/lib/circuit/src/exit_circuit.rs", "rank": 86, "score": 178815.567384067 }, { "content": "fn web3_addr() -> &'static str {\n\n let ci: u8 = env::var(\"CI\").map_or(0, |s| s.parse().unwrap());\n\n if ci == 1 {\n\n DOCKER_WEB3_ADDR\n\n } else {\n\n LOCALHOST_WEB3_ADDR\n\n }\n\n}\n\n\n", "file_path": "sdk/zksync-rs/tests/integration.rs", "rank": 87, "score": 178049.57956656074 }, { "content": "/// Initialize logging with non blocking tracing and set up log format\n\n///\n\n/// If the sentry URL is provided via an environment variable, this function will also initialize sentry.\n\n/// Returns a VlogGuard guard. Which contains Sentry Guard and Logger Guard\n\n///\n\n/// The full description can be found in the official documentation:\n\n/// https://docs.sentry.io/platforms/rust/#configure\n\n/// https://docs.rs/tracing-appender/0.2.2/tracing_appender/non_blocking/index.html\n\npub fn init() -> VlogGuard {\n\n let log_format = std::env::var(\"MISC_LOG_FORMAT\").unwrap_or_else(|_| \"plain\".to_string());\n\n let (non_blocking, _logger_guard) = tracing_appender::non_blocking(std::io::stdout());\n\n match log_format.as_str() {\n\n \"plain\" => {\n\n tracing_subscriber::fmt::Subscriber::builder()\n\n .with_env_filter(tracing_subscriber::EnvFilter::from_default_env())\n\n .with_writer(non_blocking)\n\n .init();\n\n }\n\n \"json\" => {\n\n let timer = tracing_subscriber::fmt::time::ChronoUtc::rfc3339();\n\n tracing_subscriber::fmt::Subscriber::builder()\n\n .with_env_filter(tracing_subscriber::EnvFilter::from_default_env())\n\n .with_writer(non_blocking)\n\n .with_timer(timer)\n\n .json()\n\n .init();\n\n }\n\n _ => panic!(\"MISC_LOG_FORMAT has an unexpected value {}\", log_format),\n", "file_path": "core/lib/vlog/src/lib.rs", "rank": 88, "score": 177144.60046791888 }, { "content": "pub fn erc20_contract() -> Contract {\n\n let abi_string = read_file_to_json_value(IERC20_CONTRACT_FILE)\n\n .expect(\"couldn't read IERC20_CONTRACT_FILE\")\n\n .get(\"abi\")\n\n .expect(\"couldn't get abi from IERC20_CONTRACT_FILE\")\n\n .to_string();\n\n Contract::load(abi_string.as_bytes()).expect(\"erc20 contract abi\")\n\n}\n\n\n", "file_path": "core/lib/contracts/src/lib.rs", "rank": 89, "score": 177139.9645500983 }, { "content": "pub fn eip1271_contract() -> Contract {\n\n let abi_string = read_file_to_json_value(IEIP1271_CONTRACT_FILE)\n\n .expect(\"couldn't read IEIP1271_CONTRACT_FILE\")\n\n .get(\"abi\")\n\n .expect(\"couldn't get abi from IEIP1271_CONTRACT_FILE\")\n\n .to_string();\n\n Contract::load(abi_string.as_bytes()).expect(\"erc20 contract abi\")\n\n}\n", "file_path": "core/lib/contracts/src/lib.rs", "rank": 90, "score": 177139.9645500983 }, { "content": "pub fn zksync_contract() -> Contract {\n\n let abi_string = read_file_to_json_value(ZKSYNC_CONTRACT_FILE_V4)\n\n .expect(\"couldn't read ZKSYNC_CONTRACT_FILE_V4\")\n\n .get(\"abi\")\n\n .expect(\"couldn't get abi from ZKSYNC_CONTRACT_FILE_V4\")\n\n .to_string();\n\n Contract::load(abi_string.as_bytes()).expect(\"zksync contract abi\")\n\n}\n\n\n", "file_path": "core/lib/contracts/src/lib.rs", "rank": 91, "score": 177139.9645500983 }, { "content": "/// Number of supported tokens.\n\npub fn total_tokens() -> usize {\n\n 2usize.pow((balance_tree_depth() - 1) as u32) - 2\n\n}\n\n\n\npub const PROCESSABLE_TOKENS_DEPTH: u32 = 10;\n", "file_path": "core/lib/crypto/src/params.rs", "rank": 92, "score": 177139.9645500983 }, { "content": "pub fn sign_sha<E>(\n\n msg_data: &[bool],\n\n private_key: &PrivateKey<E>,\n\n p_g: FixedGenerators,\n\n params: &E::Params,\n\n) -> Option<TransactionSignature<E>>\n\nwhere\n\n E: JubjubEngine,\n\n{\n\n let raw_data: Vec<bool> = msg_data.to_vec();\n\n\n\n let mut message_bytes: Vec<u8> = vec![];\n\n\n\n let byte_chunks = raw_data.chunks(8);\n\n for byte_chunk in byte_chunks {\n\n let mut byte = 0u8;\n\n for (i, bit) in byte_chunk.iter().enumerate() {\n\n if *bit {\n\n byte |= 1 << (7 - i);\n\n }\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 93, "score": 177139.9645500983 }, { "content": "pub fn sign_rescue<E>(\n\n msg_data: &[bool],\n\n private_key: &PrivateKey<E>,\n\n p_g: FixedGenerators,\n\n rescue_params: &<E as RescueEngine>::Params,\n\n jubjub_params: &<E as JubjubEngine>::Params,\n\n) -> SignatureData\n\nwhere\n\n E: RescueEngine + JubjubEngine,\n\n{\n\n let message_bytes = BitConvert::into_bytes(msg_data.to_vec());\n\n\n\n let seed = Seed::deterministic_seed(private_key, &message_bytes);\n\n let signature =\n\n private_key.musig_rescue_sign(&message_bytes, &seed, p_g, rescue_params, jubjub_params);\n\n\n\n let pk = PublicKey::from_private(private_key, p_g, jubjub_params);\n\n let _is_valid_signature = pk.verify_musig_rescue(\n\n &message_bytes,\n\n &signature,\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 94, "score": 177139.9645500983 }, { "content": "pub fn upgrade_gatekeeper() -> Contract {\n\n let abi_string = read_file_to_json_value(UPGRADE_GATEKEEPER_CONTRACT_FILE)\n\n .expect(\"couldn't read UPGRADE_GATEKEEPER_CONTRACT_FILE\")\n\n .get(\"abi\")\n\n .expect(\"couldn't get abi from UPGRADE_GATEKEEPER_CONTRACT_FILE\")\n\n .to_string();\n\n Contract::load(abi_string.as_bytes()).expect(\"gatekeeper contract abi\")\n\n}\n\n\n", "file_path": "core/lib/contracts/src/lib.rs", "rank": 95, "score": 177139.9645500983 }, { "content": "pub fn governance_contract() -> Contract {\n\n let abi_string = read_file_to_json_value(GOVERNANCE_CONTRACT_FILE)\n\n .expect(\"couldn't read GOVERNANCE_CONTRACT_FILE\")\n\n .get(\"abi\")\n\n .expect(\"couldn't get abi from GOVERNANCE_CONTRACT_FILE\")\n\n .to_string();\n\n Contract::load(abi_string.as_bytes()).expect(\"governance contract abi\")\n\n}\n\n\n", "file_path": "core/lib/contracts/src/lib.rs", "rank": 96, "score": 177139.9645500983 }, { "content": "pub fn sign_sha256<E>(\n\n msg_data: &[bool],\n\n private_key: &PrivateKey<E>,\n\n p_g: FixedGenerators,\n\n params: &E::Params,\n\n) -> SignatureData\n\nwhere\n\n E: JubjubEngine,\n\n{\n\n let message_bytes = BitConvert::into_bytes(msg_data.to_vec());\n\n\n\n let seed = Seed::deterministic_seed(private_key, &message_bytes);\n\n let signature = private_key.musig_sha256_sign(&message_bytes, &seed, p_g, params);\n\n\n\n let pk = PublicKey::from_private(private_key, p_g, params);\n\n let _is_valid_signature = pk.verify_musig_sha256(&message_bytes, &signature, p_g, params);\n\n\n\n // TODO: handle the case where it is not valid (ZKS-101)\n\n // if !is_valid_signature {\n\n // return None;\n\n // }\n\n let (sig_r_x, sig_r_y) = signature.r.into_xy();\n\n vlog::debug!(\"signature.s: {}\", signature.s);\n\n vlog::debug!(\"signature.r.x: {}\", sig_r_x);\n\n vlog::debug!(\"signature.r.y: {}\", sig_r_y);\n\n\n\n convert_signature_to_representation(signature)\n\n}\n\n\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 97, "score": 177139.9645500983 }, { "content": "#[must_use]\n\npub fn run_gateway_watcher_if_multiplexed(\n\n eth_gateway: EthereumGateway,\n\n config: &GatewayWatcherConfig,\n\n) -> Option<JoinHandle<()>> {\n\n if eth_gateway.is_multiplexed() {\n\n Some(run_multiplexed_gateway_watcher(eth_gateway, config))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_same_depth_block_hash_check() {\n\n let h1 = H256::random();\n\n let h2 = H256::random();\n\n let mut b1 = Block::default();\n", "file_path": "core/lib/gateway_watcher/src/multiplexed_gateway_watcher.rs", "rank": 98, "score": 177126.2504307949 }, { "content": "#[must_use]\n\npub fn run_multiplexed_gateway_watcher(\n\n eth_gateway: EthereumGateway,\n\n config: &GatewayWatcherConfig,\n\n) -> JoinHandle<()> {\n\n let gateway_watcher = MultiplexedGatewayWatcher::new(\n\n eth_gateway,\n\n config.check_interval(),\n\n config.retry_delay(),\n\n config.request_timeout(),\n\n Some(config.request_per_task_limit()),\n\n Some(config.task_limit()),\n\n );\n\n\n\n tokio::spawn(gateway_watcher.run())\n\n}\n\n\n\n/// Runs `MultiplexedGatewayWatcher` as a tokio task for provided ethereum gateway if it's multiplexed.\n", "file_path": "core/lib/gateway_watcher/src/multiplexed_gateway_watcher.rs", "rank": 99, "score": 177126.2504307949 } ]
Rust
lib/src/nyengine_audio/src/lib.rs
NyantasticUwU/nyengine
b6a47d2bfb101366eeda1b318e66f09d37317688
mod stream; use crate::stream::OStream; use rodio::{Decoder, OutputStream, OutputStreamHandle, Sample, Sink, Source}; use std::{ ffi::CStr, fmt::Debug, fs::File, io::BufReader, os::raw::{c_char, c_float, c_int, c_void}, ptr, }; #[no_mangle] pub unsafe extern "C" fn na_new_default_ostream() -> *mut c_void { if let Ok((stream, handle)) = OutputStream::try_default() { return Box::into_raw(Box::new(OStream::new(stream, handle))) as *mut c_void; } ptr::null_mut::<c_void>() } #[no_mangle] pub unsafe extern "C" fn na_get_ostream_handle(stream: *mut c_void) -> *mut c_void { if !stream.is_null() { &mut (*(stream as *mut OStream)).handle as *mut OutputStreamHandle as *mut c_void } else { ptr::null_mut::<c_void>() } } #[no_mangle] pub unsafe extern "C" fn na_free_stream(stream: *mut c_void) { Box::<OStream>::from_raw(stream as *mut OStream); } #[no_mangle] pub unsafe extern "C" fn na_new_sink(ostream_handle: *mut c_void) -> *mut c_void { if !ostream_handle.is_null() { if let Ok(sink) = Sink::try_new(&*(ostream_handle as *mut OutputStreamHandle)) { return Box::into_raw(Box::new(sink)) as *mut c_void; } } ptr::null_mut::<c_void>() } unsafe fn append_to_sink<D>(sink: *mut c_void, decoder: D) -> bool where D: Source + Send + 'static, D::Item: Debug + Sample + Send, { if !sink.is_null() { let sink: *mut Sink = sink as *mut Sink; (*sink).append(decoder); return true; } false } #[no_mangle] pub unsafe extern "C" fn na_add_audio_to_sink( file_name: *const c_char, sink: *mut c_void, should_loop: c_int, ) -> c_int { if let Ok(file_name_str) = CStr::from_ptr(file_name).to_str() { if let Ok(file) = File::open(file_name_str) { if should_loop == 0 { if let Ok(decoder) = Decoder::new(BufReader::new(file)) { if append_to_sink(sink, decoder) { return 0; } } } else { if let Ok(decoder) = Decoder::new_looped(BufReader::new(file)) { if append_to_sink(sink, decoder) { return 0; } } } } } 1 } #[no_mangle] pub unsafe extern "C" fn na_play_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).play(); } #[no_mangle] pub unsafe extern "C" fn na_pause_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).pause(); } #[no_mangle] pub unsafe extern "C" fn na_stop_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).stop(); } #[no_mangle] pub unsafe extern "C" fn na_sleep_sink_until_end(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).sleep_until_end(); } #[no_mangle] pub unsafe extern "C" fn na_is_sink_paused(sink: *mut c_void) -> c_int { let sink: *mut Sink = sink as *mut Sink; (*sink).is_paused() as c_int } #[no_mangle] pub unsafe extern "C" fn na_set_sink_volume(sink: *mut c_void, volume: c_float) { let sink: *mut Sink = sink as *mut Sink; (*sink).set_volume(volume); } #[no_mangle] pub unsafe extern "C" fn na_get_sink_volume(sink: *mut c_void) -> c_float { let sink: *mut Sink = sink as *mut Sink; (*sink).volume() } #[no_mangle] pub unsafe extern "C" fn na_free_sink(sink: *mut c_void) { Box::<Sink>::from_raw(sink as *mut Sink); }
mod stream; use crate::stream::OStream; use rodio::{Decoder, OutputStream, OutputStreamHandle, Sample, Sink, Source}; use std::{ ffi::CStr, fmt::Debug, fs::File, io::BufReader, os::raw::{c_char, c_float, c_int, c_void}, ptr, }; #[no_mangle] pub unsafe extern "C" fn na_new_default_ostream() -> *mut c_void { if let Ok((stream, handle)) = OutputStream::try_default() { return Box::into_raw(Box::new(OStream::new(stream, handle))) as *mut c_void; } ptr::null_mut::<c_void>() } #[no_mangle] pub unsafe extern "C" fn na_get_ostream_handle(stream: *mut c_void) -> *mut c_void { if !stream.is_null() { &mut (*(stream as *mut OStream)).handle as *mut OutputStreamHandle as *mut c_void } else { ptr::null_mut::<c_void>() } } #[no_mangle] pub unsafe extern "C" fn na_free_stream(stream: *mut c_void) { Box::<OStream>::from_raw(stream as *mut OStream); } #[no_mangle] pub unsafe extern "C" fn na_new_sink(ostream_handle: *mut c_void) -> *mut c_void { if !ostream_handle.is_null() { if let Ok(sink) = Sink::try_new(&*(ostream_handle as *mut OutputStream
false } #[no_mangle] pub unsafe extern "C" fn na_add_audio_to_sink( file_name: *const c_char, sink: *mut c_void, should_loop: c_int, ) -> c_int { if let Ok(file_name_str) = CStr::from_ptr(file_name).to_str() { if let Ok(file) = File::open(file_name_str) { if should_loop == 0 { if let Ok(decoder) = Decoder::new(BufReader::new(file)) { if append_to_sink(sink, decoder) { return 0; } } } else { if let Ok(decoder) = Decoder::new_looped(BufReader::new(file)) { if append_to_sink(sink, decoder) { return 0; } } } } } 1 } #[no_mangle] pub unsafe extern "C" fn na_play_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).play(); } #[no_mangle] pub unsafe extern "C" fn na_pause_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).pause(); } #[no_mangle] pub unsafe extern "C" fn na_stop_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).stop(); } #[no_mangle] pub unsafe extern "C" fn na_sleep_sink_until_end(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).sleep_until_end(); } #[no_mangle] pub unsafe extern "C" fn na_is_sink_paused(sink: *mut c_void) -> c_int { let sink: *mut Sink = sink as *mut Sink; (*sink).is_paused() as c_int } #[no_mangle] pub unsafe extern "C" fn na_set_sink_volume(sink: *mut c_void, volume: c_float) { let sink: *mut Sink = sink as *mut Sink; (*sink).set_volume(volume); } #[no_mangle] pub unsafe extern "C" fn na_get_sink_volume(sink: *mut c_void) -> c_float { let sink: *mut Sink = sink as *mut Sink; (*sink).volume() } #[no_mangle] pub unsafe extern "C" fn na_free_sink(sink: *mut c_void) { Box::<Sink>::from_raw(sink as *mut Sink); }
Handle)) { return Box::into_raw(Box::new(sink)) as *mut c_void; } } ptr::null_mut::<c_void>() } unsafe fn append_to_sink<D>(sink: *mut c_void, decoder: D) -> bool where D: Source + Send + 'static, D::Item: Debug + Sample + Send, { if !sink.is_null() { let sink: *mut Sink = sink as *mut Sink; (*sink).append(decoder); return true; }
random
[]
Rust
src/runner/command.rs
doy/nbsh
4151ab7aab939a12721a0f4207c87b5c09ace339
use crate::runner::prelude::*; pub struct Command { inner: Inner, exe: std::path::PathBuf, redirects: Vec<crate::parse::Redirect>, pre_exec: Option< Box<dyn FnMut() -> std::io::Result<()> + Send + Sync + 'static>, >, } impl Command { pub fn new(exe: crate::parse::Exe, io: super::builtins::Io) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); Self { inner: super::builtins::Command::new(exe, io).map_or_else( |exe| Self::new_binary(&exe).inner, Inner::Builtin, ), exe: exe_path, redirects, pre_exec: None, } } pub fn new_binary(exe: &crate::parse::Exe) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); let mut cmd = tokio::process::Command::new(exe.exe()); cmd.args(exe.args()); Self { inner: Inner::Binary(cmd), exe: exe_path, redirects, pre_exec: None, } } pub fn new_builtin( exe: crate::parse::Exe, io: super::builtins::Io, ) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); Self { inner: super::builtins::Command::new(exe, io) .map_or_else(|_| todo!(), Inner::Builtin), exe: exe_path, redirects, pre_exec: None, } } pub fn stdin(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stdin(fh); } Inner::Builtin(cmd) => { cmd.stdin(fh); } } } pub fn stdout(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stdout(fh); } Inner::Builtin(cmd) => { cmd.stdout(fh); } } } pub fn stderr(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stderr(fh); } Inner::Builtin(cmd) => { cmd.stderr(fh); } } } pub unsafe fn pre_exec<F>(&mut self, f: F) where F: 'static + FnMut() -> std::io::Result<()> + Send + Sync, { self.pre_exec = Some(Box::new(f)); } pub fn spawn(self, env: &Env) -> Result<Child> { let Self { inner, exe, redirects, pre_exec, } = self; #[allow(clippy::as_conversions)] let pre_exec = pre_exec.map_or_else( || { let redirects = redirects.clone(); Box::new(move || { apply_redirects(&redirects)?; Ok(()) }) as Box<dyn FnMut() -> std::io::Result<()> + Send + Sync> }, |mut pre_exec| { let redirects = redirects.clone(); Box::new(move || { apply_redirects(&redirects)?; pre_exec()?; Ok(()) }) }, ); match inner { Inner::Binary(mut cmd) => { unsafe { cmd.pre_exec(pre_exec) }; Ok(Child::Binary(cmd.spawn().map_err(|e| { anyhow!( "{}: {}", crate::format::io_error(&e), exe.display() ) })?)) } Inner::Builtin(mut cmd) => { unsafe { cmd.pre_exec(pre_exec) }; cmd.apply_redirects(&redirects); Ok(Child::Builtin(cmd.spawn(env)?)) } } } } pub enum Inner { Binary(tokio::process::Command), Builtin(super::builtins::Command), } pub enum Child { Binary(tokio::process::Child), Builtin(super::builtins::Child), } impl Child { pub fn id(&self) -> Option<u32> { match self { Self::Binary(child) => child.id(), Self::Builtin(child) => child.id(), } } pub fn status( self, ) -> std::pin::Pin< Box< dyn std::future::Future<Output = Result<std::process::ExitStatus>> + Send + Sync, >, > { Box::pin(async move { match self { Self::Binary(_) => unreachable!(), Self::Builtin(child) => Ok(child.status().await?), } }) } } fn apply_redirects( redirects: &[crate::parse::Redirect], ) -> std::io::Result<()> { for redirect in redirects { match &redirect.to { crate::parse::RedirectTarget::Fd(fd) => { nix::unistd::dup2(*fd, redirect.from)?; } crate::parse::RedirectTarget::File(path) => { let fd = redirect.dir.open(path)?; if fd != redirect.from { nix::unistd::dup2(fd, redirect.from)?; nix::unistd::close(fd)?; } } } } Ok(()) }
use crate::runner::prelude::*; pub struct Command { inner: Inner, exe: std::path::PathBuf, redirects: Vec<crate::parse::Redirect>, pre_exec: Option< Box<dyn FnMut() -> std::io::Result<()> + Send + Sync + 'static>, >, } impl Command { pub fn new(exe: crate::parse::Exe, io: super::builtins::Io) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); Self { inner: super::builtins::Command::new(exe, io).map_or_else( |exe| Self::new_binary(&exe).inner, Inner::Builtin, ), exe: exe_path, redirects, pre_exec: None, } } pub fn new_binary(exe: &crate::parse::Exe) -> Self { let exe_path = exe.exe().to_pa
F: 'static + FnMut() -> std::io::Result<()> + Send + Sync, { self.pre_exec = Some(Box::new(f)); } pub fn spawn(self, env: &Env) -> Result<Child> { let Self { inner, exe, redirects, pre_exec, } = self; #[allow(clippy::as_conversions)] let pre_exec = pre_exec.map_or_else( || { let redirects = redirects.clone(); Box::new(move || { apply_redirects(&redirects)?; Ok(()) }) as Box<dyn FnMut() -> std::io::Result<()> + Send + Sync> }, |mut pre_exec| { let redirects = redirects.clone(); Box::new(move || { apply_redirects(&redirects)?; pre_exec()?; Ok(()) }) }, ); match inner { Inner::Binary(mut cmd) => { unsafe { cmd.pre_exec(pre_exec) }; Ok(Child::Binary(cmd.spawn().map_err(|e| { anyhow!( "{}: {}", crate::format::io_error(&e), exe.display() ) })?)) } Inner::Builtin(mut cmd) => { unsafe { cmd.pre_exec(pre_exec) }; cmd.apply_redirects(&redirects); Ok(Child::Builtin(cmd.spawn(env)?)) } } } } pub enum Inner { Binary(tokio::process::Command), Builtin(super::builtins::Command), } pub enum Child { Binary(tokio::process::Child), Builtin(super::builtins::Child), } impl Child { pub fn id(&self) -> Option<u32> { match self { Self::Binary(child) => child.id(), Self::Builtin(child) => child.id(), } } pub fn status( self, ) -> std::pin::Pin< Box< dyn std::future::Future<Output = Result<std::process::ExitStatus>> + Send + Sync, >, > { Box::pin(async move { match self { Self::Binary(_) => unreachable!(), Self::Builtin(child) => Ok(child.status().await?), } }) } } fn apply_redirects( redirects: &[crate::parse::Redirect], ) -> std::io::Result<()> { for redirect in redirects { match &redirect.to { crate::parse::RedirectTarget::Fd(fd) => { nix::unistd::dup2(*fd, redirect.from)?; } crate::parse::RedirectTarget::File(path) => { let fd = redirect.dir.open(path)?; if fd != redirect.from { nix::unistd::dup2(fd, redirect.from)?; nix::unistd::close(fd)?; } } } } Ok(()) }
th_buf(); let redirects = exe.redirects().to_vec(); let mut cmd = tokio::process::Command::new(exe.exe()); cmd.args(exe.args()); Self { inner: Inner::Binary(cmd), exe: exe_path, redirects, pre_exec: None, } } pub fn new_builtin( exe: crate::parse::Exe, io: super::builtins::Io, ) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); Self { inner: super::builtins::Command::new(exe, io) .map_or_else(|_| todo!(), Inner::Builtin), exe: exe_path, redirects, pre_exec: None, } } pub fn stdin(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stdin(fh); } Inner::Builtin(cmd) => { cmd.stdin(fh); } } } pub fn stdout(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stdout(fh); } Inner::Builtin(cmd) => { cmd.stdout(fh); } } } pub fn stderr(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stderr(fh); } Inner::Builtin(cmd) => { cmd.stderr(fh); } } } pub unsafe fn pre_exec<F>(&mut self, f: F) where
random
[ { "content": "pub fn setpgid_child(pg: Option<nix::unistd::Pid>) -> std::io::Result<()> {\n\n nix::unistd::setpgid(PID0, pg.unwrap_or(PID0))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 0, "score": 130883.43219710211 }, { "content": "pub fn io_error(e: &std::io::Error) -> String {\n\n let mut s = format!(\"{}\", e);\n\n if e.raw_os_error().is_some() {\n\n let i = s.rfind('(').unwrap();\n\n s.truncate(i - 1);\n\n }\n\n s\n\n}\n", "file_path": "src/format.rs", "rank": 1, "score": 125980.47069663097 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nstruct Redirect {\n\n from: std::os::unix::io::RawFd,\n\n to: Word,\n\n dir: super::Direction,\n\n}\n\n\n\nimpl Redirect {\n\n fn build_ast(pair: pest::iterators::Pair<Rule>) -> Self {\n\n assert!(matches!(pair.as_rule(), Rule::redirect));\n\n let mut iter = pair.into_inner();\n\n\n\n let prefix = iter.next().unwrap().as_str();\n\n let (from, dir) = prefix.strip_suffix(\">>\").map_or_else(\n\n || {\n\n prefix.strip_suffix('>').map_or_else(\n\n || {\n\n (\n\n prefix.strip_suffix('<').unwrap(),\n\n super::Direction::In,\n\n )\n", "file_path": "src/parse/ast.rs", "rank": 2, "score": 114187.55642269195 }, { "content": "fn command(\n\n mut exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n exe.shift();\n\n let mut cmd = crate::runner::Command::new_binary(&exe);\n\n cfg.setup_command(&mut cmd);\n\n Ok(command::Child::new_wrapped(cmd.spawn(env)?))\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 4, "score": 105457.48258375417 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn current_exe() -> Result<std::path::PathBuf> {\n\n Ok(std::env::current_exe()?)\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 5, "score": 100654.27440390576 }, { "content": "pub fn setpgid_parent(\n\n pid: nix::unistd::Pid,\n\n pg: Option<nix::unistd::Pid>,\n\n) -> Result<()> {\n\n nix::unistd::setpgid(pid, pg.unwrap_or(PID0))\n\n // the child already called exec, so it must have already called\n\n // setpgid itself\n\n .allow(nix::errno::Errno::EACCES)\n\n // the child already exited, so we don't care\n\n .allow(nix::errno::Errno::ESRCH)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 6, "score": 96433.56383534288 }, { "content": "pub fn pid() -> String {\n\n nix::unistd::getpid().to_string()\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 7, "score": 95007.98500481475 }, { "content": "pub fn user() -> Result<String> {\n\n Ok(users::get_current_username()\n\n .ok_or_else(|| anyhow!(\"couldn't get username\"))?\n\n .to_string_lossy()\n\n .into_owned())\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 8, "score": 89256.40524349514 }, { "content": "pub fn hostname() -> Result<String> {\n\n let mut hostname = hostname::get()?.to_string_lossy().into_owned();\n\n if let Some(idx) = hostname.find('.') {\n\n hostname.truncate(idx);\n\n }\n\n Ok(hostname)\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 9, "score": 89256.40524349514 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\npub fn prompt_char() -> Result<String> {\n\n if users::get_current_uid() == 0 {\n\n Ok(\"#\".into())\n\n } else {\n\n Ok(\"$\".into())\n\n }\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 10, "score": 86946.53959291396 }, { "content": "pub fn channel() -> (Writer, Reader) {\n\n let (event_w, event_r) = tokio::sync::mpsc::unbounded_channel();\n\n (Writer::new(event_w), Reader::new(event_r))\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Writer(tokio::sync::mpsc::UnboundedSender<Event>);\n\n\n\nimpl Writer {\n\n pub fn new(event_w: tokio::sync::mpsc::UnboundedSender<Event>) -> Self {\n\n Self(event_w)\n\n }\n\n\n\n pub fn send(&self, event: Event) {\n\n // the only time this should ever error is when the application is\n\n // shutting down, at which point we don't actually care about any\n\n // further dropped messages\n\n #[allow(clippy::let_underscore_drop)]\n\n let _ = self.0.send(event);\n\n }\n", "file_path": "src/shell/event.rs", "rank": 11, "score": 86946.53959291396 }, { "content": "struct InnerReader {\n\n pending: std::sync::Mutex<Pending>,\n\n cvar: tokio::sync::Notify,\n\n}\n\n\n\nimpl InnerReader {\n\n fn new() -> Self {\n\n Self {\n\n pending: std::sync::Mutex::new(Pending::new()),\n\n cvar: tokio::sync::Notify::new(),\n\n }\n\n }\n\n\n\n async fn recv(&self) -> Option<Event> {\n\n loop {\n\n if let Some(event) = self.pending.lock().unwrap().get_event() {\n\n return event;\n\n }\n\n self.cvar.notified().await;\n\n }\n\n }\n\n\n\n fn new_event(&self, event: Option<Event>) {\n\n self.pending.lock().unwrap().new_event(event);\n\n self.cvar.notify_one();\n\n }\n\n}\n\n\n", "file_path": "src/shell/event.rs", "rank": 12, "score": 85525.64410686764 }, { "content": "// the time crate is currently unable to get the local offset on unix due to\n\n// soundness concerns, so we have to do it manually/:\n\n//\n\n// https://github.com/time-rs/time/issues/380\n\npub fn get_offset() -> time::UtcOffset {\n\n let offset_str =\n\n std::process::Command::new(\"date\").args(&[\"+%:z\"]).output();\n\n if let Ok(offset_str) = offset_str {\n\n let offset_str = String::from_utf8(offset_str.stdout).unwrap();\n\n time::UtcOffset::parse(\n\n offset_str.trim(),\n\n &time::format_description::parse(\"[offset_hour]:[offset_minute]\")\n\n .unwrap(),\n\n )\n\n .unwrap_or(time::UtcOffset::UTC)\n\n } else {\n\n time::UtcOffset::UTC\n\n }\n\n}\n", "file_path": "src/info.rs", "rank": 13, "score": 84818.52322976221 }, { "content": "type Builtin = &'static (dyn for<'a> Fn(\n\n crate::parse::Exe,\n\n &'a Env,\n\n command::Cfg,\n\n) -> Result<command::Child>\n\n + Sync\n\n + Send);\n\n\n\n#[allow(clippy::as_conversions)]\n\nstatic BUILTINS: once_cell::sync::Lazy<\n\n std::collections::HashMap<&'static str, Builtin>,\n\n> = once_cell::sync::Lazy::new(|| {\n\n let mut builtins = std::collections::HashMap::new();\n\n builtins.insert(\"cd\", &cd as Builtin);\n\n builtins.insert(\"set\", &set);\n\n builtins.insert(\"unset\", &unset);\n\n builtins.insert(\"echo\", &echo);\n\n builtins.insert(\"read\", &read);\n\n builtins.insert(\"and\", &and);\n\n builtins.insert(\"or\", &or);\n", "file_path": "src/runner/builtins/mod.rs", "rank": 14, "score": 81261.74669073822 }, { "content": "pub fn history_file() -> std::path::PathBuf {\n\n data_dir().join(\"history\")\n\n}\n\n\n", "file_path": "src/dirs.rs", "rank": 15, "score": 80148.15857058254 }, { "content": "pub fn config_file() -> std::path::PathBuf {\n\n config_dir().join(\"config.toml\")\n\n}\n\n\n", "file_path": "src/dirs.rs", "rank": 16, "score": 80148.15857058254 }, { "content": "fn home(user: Option<&std::ffi::OsStr>) -> Option<std::path::PathBuf> {\n\n let user = user.map_or_else(\n\n || users::get_user_by_uid(users::get_current_uid()),\n\n users::get_user_by_name,\n\n );\n\n user.map(|user| user.home_dir().to_path_buf())\n\n}\n\n\n\n#[cfg(test)]\n\n#[path = \"test_ast.rs\"]\n\nmod test;\n", "file_path": "src/parse/ast.rs", "rank": 17, "score": 79799.24846206681 }, { "content": "#[test]\n\nfn test_redirect() {\n\n parse_eq!(\n\n \"foo > bar\",\n\n cs!(p!((0, 9), e!(w!(\"foo\") ; r!(1, w!(\"bar\"), Out))))\n\n );\n\n parse_eq!(\n\n \"foo <bar\",\n\n cs!(p!((0, 8), e!(w!(\"foo\") ; r!(0, w!(\"bar\"), In))))\n\n );\n\n parse_eq!(\n\n \"foo > /dev/null 2>&1\",\n\n cs!(p!(\n\n (0, 20),\n\n e!(\n\n w!(\"foo\") ;\n\n r!(1, w!(\"/dev/null\"), Out), r!(2, w!(\"&1\"), Out)\n\n )\n\n ))\n\n );\n\n parse_eq!(\n", "file_path": "src/parse/test_ast.rs", "rank": 18, "score": 77540.83341373538 }, { "content": "pub fn time(time: time::OffsetDateTime) -> String {\n\n let format = if time::OffsetDateTime::now_utc() - time\n\n > std::time::Duration::from_secs(60 * 60 * 24)\n\n {\n\n time::format_description::parse(\n\n \"[year]-[month]-[day] [hour]:[minute]:[second]\",\n\n )\n\n .unwrap()\n\n } else {\n\n time::format_description::parse(\"[hour]:[minute]:[second]\").unwrap()\n\n };\n\n time.format(&format).unwrap()\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 19, "score": 76032.08048812984 }, { "content": "pub fn path(path: &std::path::Path) -> String {\n\n let mut path = path.display().to_string();\n\n if let Ok(home) = std::env::var(\"HOME\") {\n\n if path.starts_with(&home) {\n\n path.replace_range(..home.len(), \"~\");\n\n }\n\n }\n\n path\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 20, "score": 75886.32221955054 }, { "content": "pub fn duration(dur: std::time::Duration) -> String {\n\n let secs = dur.as_secs();\n\n let nanos = dur.subsec_nanos();\n\n if secs > 60 {\n\n let mins = secs / 60;\n\n let secs = secs - mins * 60;\n\n format!(\"{}m{}s\", mins, secs)\n\n } else if secs > 9 {\n\n format!(\"{}.{:02}s\", secs, nanos / 10_000_000)\n\n } else if secs > 0 {\n\n format!(\"{}.{:03}s\", secs, nanos / 1_000_000)\n\n } else if nanos >= 1_000_000 {\n\n format!(\"{}ms\", nanos / 1_000_000)\n\n } else if nanos >= 1_000 {\n\n format!(\"{}us\", nanos / 1_000)\n\n } else {\n\n format!(\"{}ns\", nanos)\n\n }\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 21, "score": 75886.32221955054 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\npub fn time(offset: time::UtcOffset) -> Result<String> {\n\n Ok(crate::format::time(\n\n time::OffsetDateTime::now_utc().to_offset(offset),\n\n ))\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 22, "score": 74063.03596732474 }, { "content": "pub fn id_to_pid(id: u32) -> nix::unistd::Pid {\n\n nix::unistd::Pid::from_raw(id.try_into().unwrap())\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 23, "score": 72368.13127793616 }, { "content": "pub fn exit_status(status: std::process::ExitStatus) -> String {\n\n status.signal().map_or_else(\n\n || format!(\"{:03} \", status.code().unwrap()),\n\n |sig| {\n\n nix::sys::signal::Signal::try_from(sig).map_or_else(\n\n |_| format!(\"SIG{} \", sig),\n\n |sig| format!(\"{:4} \", &sig.as_str()[3..]),\n\n )\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 24, "score": 72368.13127793616 }, { "content": "pub fn set_foreground_pg(pg: nix::unistd::Pid) -> Result<()> {\n\n let pty = nix::fcntl::open(\n\n \"/dev/tty\",\n\n nix::fcntl::OFlag::empty(),\n\n nix::sys::stat::Mode::empty(),\n\n )?;\n\n\n\n // if a background process calls tcsetpgrp, the kernel will send it\n\n // SIGTTOU which suspends it. if that background process is the session\n\n // leader and doesn't have SIGTTOU blocked, the kernel will instead just\n\n // return ENOTTY from the tcsetpgrp call rather than sending a signal to\n\n // avoid deadlocking the process. therefore, we need to ensure that\n\n // SIGTTOU is blocked here.\n\n\n\n // Safety: setting a signal handler to SigIgn is always safe\n\n unsafe {\n\n nix::sys::signal::signal(\n\n nix::sys::signal::Signal::SIGTTOU,\n\n nix::sys::signal::SigHandler::SigIgn,\n\n )?;\n", "file_path": "src/runner/sys.rs", "rank": 25, "score": 70788.5098523994 }, { "content": "fn parse_fd(s: &str) -> std::os::unix::io::RawFd {\n\n match s {\n\n \"in\" => 0,\n\n \"out\" => 1,\n\n \"err\" => 2,\n\n _ => s.parse().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "src/parse/ast.rs", "rank": 26, "score": 70217.29201625701 }, { "content": "pub fn pipe() -> Result<(std::fs::File, std::fs::File)> {\n\n let (r, w) = nix::unistd::pipe2(nix::fcntl::OFlag::O_CLOEXEC)?;\n\n // Safety: these file descriptors were just returned by pipe2 above, and\n\n // are only available in this function, so nothing else can be accessing\n\n // them\n\n Ok((unsafe { std::fs::File::from_raw_fd(r) }, unsafe {\n\n std::fs::File::from_raw_fd(w)\n\n }))\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 27, "score": 67581.31704247482 }, { "content": "pub fn neg_pid(pid: nix::unistd::Pid) -> nix::unistd::Pid {\n\n nix::unistd::Pid::from_raw(-pid.as_raw())\n\n}\n", "file_path": "src/runner/sys.rs", "rank": 28, "score": 66105.60380548141 }, { "content": "#[derive(clap::Parser)]\n\n#[clap(about = \"NoteBook SHell\")]\n\nstruct Opt {\n\n #[clap(short = 'c')]\n\n command: Option<String>,\n\n\n\n #[clap(long)]\n\n status_fd: Option<std::os::unix::io::RawFd>,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn async_main(opt: Opt) -> Result<i32> {\n\n if let Some(command) = opt.command {\n\n let mut shell_write = opt.status_fd.and_then(|fd| {\n\n nix::sys::stat::fstat(fd).ok().map(|_| {\n\n // Safety: we don't create File instances for or read/write\n\n // data on this fd anywhere else\n\n unsafe { tokio::fs::File::from_raw_fd(fd) }\n\n })\n\n });\n\n\n\n return runner::main(command, &mut shell_write).await;\n\n }\n\n\n\n #[cfg(nbsh_tokio_console)]\n\n console_subscriber::init();\n\n\n\n shell::main().await\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 29, "score": 58329.55639076253 }, { "content": "#[allow(clippy::option_option)]\n\n#[derive(Default)]\n\nstruct Pending {\n\n key: std::collections::VecDeque<textmode::Key>,\n\n size: Option<(u16, u16)>,\n\n pty_output: bool,\n\n child_run_pipeline: std::collections::VecDeque<(usize, (usize, usize))>,\n\n child_suspend: std::collections::VecDeque<usize>,\n\n child_exit: Option<(usize, super::history::ExitInfo, Option<Env>)>,\n\n git_info: Option<Option<super::inputs::GitInfo>>,\n\n clock_timer: bool,\n\n done: bool,\n\n}\n\n\n\nimpl Pending {\n\n fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n fn get_event(&mut self) -> Option<Option<Event>> {\n\n if self.done {\n\n return Some(None);\n", "file_path": "src/shell/event.rs", "rank": 30, "score": 56794.64783863733 }, { "content": "struct Stack {\n\n frames: Vec<Frame>,\n\n}\n\n\n\nimpl Stack {\n\n fn new() -> Self {\n\n Self { frames: vec![] }\n\n }\n\n\n\n fn push(&mut self, frame: Frame) {\n\n self.frames.push(frame);\n\n }\n\n\n\n fn pop(&mut self) -> Frame {\n\n self.frames.pop().unwrap()\n\n }\n\n\n\n fn top(&self) -> Option<&Frame> {\n\n self.frames.last()\n\n }\n", "file_path": "src/runner/mod.rs", "rank": 31, "score": 56790.2305916849 }, { "content": "#[derive(pest_derive::Parser)]\n\n#[grammar = \"shell.pest\"]\n\nstruct Shell;\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct Commands {\n\n commands: Vec<Command>,\n\n}\n\n\n\nimpl Commands {\n\n pub fn parse(full_cmd: &str) -> Result<Self, super::Error> {\n\n Ok(Self::build_ast(\n\n Shell::parse(Rule::line, full_cmd)\n\n .map_err(|e| super::Error::new(full_cmd.to_string(), e))?\n\n .next()\n\n .unwrap()\n\n .into_inner()\n\n .next()\n\n .unwrap(),\n\n ))\n\n }\n\n\n", "file_path": "src/parse/ast.rs", "rank": 32, "score": 56790.2305916849 }, { "content": "#[derive(pest_derive::Parser)]\n\n#[grammar = \"history.pest\"]\n\nstruct HistoryLine;\n\n\n\npub struct History {\n\n entries: std::sync::Arc<std::sync::Mutex<Vec<Entry>>>,\n\n}\n\n\n\nimpl History {\n\n pub fn new() -> Self {\n\n let entries = std::sync::Arc::new(std::sync::Mutex::new(vec![]));\n\n tokio::spawn(Self::task(std::sync::Arc::clone(&entries)));\n\n Self { entries }\n\n }\n\n\n\n pub fn entry_count(&self) -> usize {\n\n self.entries.lock().unwrap().len()\n\n }\n\n\n\n async fn task(entries: std::sync::Arc<std::sync::Mutex<Vec<Entry>>>) {\n\n // TODO: we should actually read this in reverse order, because we\n\n // want to populate the most recent entries first\n", "file_path": "src/shell/old_history.rs", "rank": 33, "score": 54106.20749056259 }, { "content": "fn main() {\n\n match async_main(Opt::parse()) {\n\n Ok(code) => {\n\n std::process::exit(code);\n\n }\n\n Err(e) => {\n\n eprintln!(\"nbsh: {}\", e);\n\n std::process::exit(1);\n\n }\n\n };\n\n}\n", "file_path": "src/main.rs", "rank": 34, "score": 52832.71219828648 }, { "content": " pub trait Result {\n\n type T;\n\n type E;\n\n\n\n fn allow(self, allow_e: Self::E) -> Self;\n\n fn allow_with(self, allow_e: Self::E, default_t: Self::T) -> Self;\n\n }\n\n\n\n impl<T, E> Result for std::result::Result<T, E>\n\n where\n\n T: std::default::Default,\n\n E: std::cmp::PartialEq,\n\n {\n\n type T = T;\n\n type E = E;\n\n\n\n fn allow(self, allow_e: Self::E) -> Self {\n\n self.or_else(|e| {\n\n if e == allow_e {\n\n Ok(std::default::Default::default())\n", "file_path": "src/prelude.rs", "rank": 35, "score": 52635.26406661101 }, { "content": "fn or(\n\n mut exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n exe.shift();\n\n if env.latest_status().success() {\n\n let status = env.latest_status();\n\n Ok(command::Child::new_task(move || status))\n\n } else {\n\n let mut cmd = crate::runner::Command::new(exe, cfg.io().clone());\n\n cfg.setup_command(&mut cmd);\n\n Ok(command::Child::new_wrapped(cmd.spawn(env)?))\n\n }\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 36, "score": 51336.81277274103 }, { "content": "fn and(\n\n mut exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n exe.shift();\n\n if env.latest_status().success() {\n\n let mut cmd = crate::runner::Command::new(exe, cfg.io().clone());\n\n cfg.setup_command(&mut cmd);\n\n Ok(command::Child::new_wrapped(cmd.spawn(env)?))\n\n } else {\n\n let status = env.latest_status();\n\n Ok(command::Child::new_task(move || status))\n\n }\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 37, "score": 51336.81277274103 }, { "content": "struct VisibleEntries<'a> {\n\n entries: std::collections::VecDeque<(\n\n usize,\n\n usize,\n\n std::sync::MutexGuard<'a, pty::Vt>,\n\n )>,\n\n}\n\n\n\nimpl<'a> VisibleEntries<'a> {\n\n fn new() -> Self {\n\n Self {\n\n entries: std::collections::VecDeque::new(),\n\n }\n\n }\n\n\n\n fn add(\n\n &mut self,\n\n idx: usize,\n\n offset: usize,\n\n vt: std::sync::MutexGuard<'a, pty::Vt>,\n", "file_path": "src/shell/history/mod.rs", "rank": 38, "score": 51336.529722443025 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn read(\n\n exe: crate::parse::Exe,\n\n _env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n Ok(command::Child::new_task(move || {\n\n let var = if let Some(var) = exe.args().get(0).map(String::as_str) {\n\n var\n\n } else {\n\n bail!(cfg, exe, \"usage: read var\");\n\n };\n\n\n\n let (val, done) = match cfg.io().read_line_stdin() {\n\n Ok((line, done)) => (line, done),\n\n Err(e) => {\n\n bail!(cfg, exe, e);\n\n }\n\n };\n\n\n\n std::env::set_var(var, val);\n\n std::process::ExitStatus::from_raw(if done { 1 << 8 } else { 0 })\n\n }))\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 39, "score": 49974.450900760785 }, { "content": "fn spawn_children(\n\n mut cmds: Vec<Command>,\n\n env: &Env,\n\n interactive: bool,\n\n) -> Result<(Vec<Child>, Option<nix::unistd::Pid>)> {\n\n for i in 0..(cmds.len() - 1) {\n\n let (r, w) = sys::pipe()?;\n\n cmds[i].stdout(w);\n\n cmds[i + 1].stdin(r);\n\n }\n\n\n\n let mut children = vec![];\n\n let mut pg_pid = None;\n\n for mut cmd in cmds {\n\n // Safety: setpgid is an async-signal-safe function\n\n unsafe {\n\n cmd.pre_exec(move || {\n\n sys::setpgid_child(pg_pid)?;\n\n Ok(())\n\n });\n", "file_path": "src/runner/mod.rs", "rank": 40, "score": 49974.450900760785 }, { "content": "fn builtin(\n\n mut exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n exe.shift();\n\n let mut cmd = crate::runner::Command::new_builtin(exe, cfg.io().clone());\n\n cfg.setup_command(&mut cmd);\n\n Ok(command::Child::new_wrapped(cmd.spawn(env)?))\n\n}\n", "file_path": "src/runner/builtins/mod.rs", "rank": 41, "score": 49974.450900760785 }, { "content": "// mostly just for testing and ensuring that builtins work, i'll likely remove\n\n// this later, since the binary seems totally fine\n\nfn echo(\n\n exe: crate::parse::Exe,\n\n _env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n Ok(command::Child::new_task(move || {\n\n macro_rules! write_stdout {\n\n ($bytes:expr) => {\n\n if let Err(e) = cfg.io().write_stdout($bytes) {\n\n cfg.io()\n\n .write_stderr(format!(\"echo: {}\", e).as_bytes())\n\n .unwrap();\n\n return std::process::ExitStatus::from_raw(1 << 8);\n\n }\n\n };\n\n }\n\n let count = exe.args().len();\n\n for (i, arg) in exe.args().iter().enumerate() {\n\n write_stdout!(arg.as_bytes());\n\n if i == count - 1 {\n\n write_stdout!(b\"\\n\");\n\n } else {\n\n write_stdout!(b\" \");\n\n }\n\n }\n\n\n\n std::process::ExitStatus::from_raw(0)\n\n }))\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 42, "score": 49974.450900760785 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn set(\n\n exe: crate::parse::Exe,\n\n _env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n Ok(command::Child::new_task(move || {\n\n let k = if let Some(k) = exe.args().get(0).map(String::as_str) {\n\n k\n\n } else {\n\n bail!(cfg, exe, \"usage: set key value\");\n\n };\n\n let v = if let Some(v) = exe.args().get(1).map(String::as_str) {\n\n v\n\n } else {\n\n bail!(cfg, exe, \"usage: set key value\");\n\n };\n\n\n\n std::env::set_var(k, v);\n\n std::process::ExitStatus::from_raw(0)\n\n }))\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 43, "score": 49974.450900760785 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn cd(\n\n exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n let prev_pwd = env.prev_pwd();\n\n let home = env.var(\"HOME\");\n\n Ok(command::Child::new_task(move || {\n\n let dir = if let Some(dir) = exe.args().get(0) {\n\n if dir.is_empty() {\n\n \".\".to_string().into()\n\n } else if dir == \"-\" {\n\n prev_pwd\n\n } else {\n\n dir.into()\n\n }\n\n } else {\n\n let dir = home;\n\n if let Some(dir) = dir {\n\n dir.into()\n", "file_path": "src/runner/builtins/mod.rs", "rank": 44, "score": 49974.450900760785 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn unset(\n\n exe: crate::parse::Exe,\n\n _env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n Ok(command::Child::new_task(move || {\n\n let k = if let Some(k) = exe.args().get(0).map(String::as_str) {\n\n k\n\n } else {\n\n bail!(cfg, exe, \"usage: unset key\");\n\n };\n\n\n\n std::env::remove_var(k);\n\n std::process::ExitStatus::from_raw(0)\n\n }))\n\n}\n\n\n\n// clippy can't tell that the type is necessary\n\n#[allow(clippy::unnecessary_wraps)]\n", "file_path": "src/runner/builtins/mod.rs", "rank": 45, "score": 49974.450900760785 }, { "content": "#[test]\n\nfn test_parse() {\n\n let entry: Entry =\n\n \": 1646779848:1234.56;vim ~/.zsh_history\".parse().unwrap();\n\n assert_eq!(entry.cmdline, \"vim ~/.zsh_history\");\n\n assert_eq!(\n\n entry.duration,\n\n Some(std::time::Duration::from_nanos(1_234_560_000_000))\n\n );\n\n assert_eq!(\n\n entry.start_time,\n\n Some(time::macros::datetime!(2022-03-08 22:50:48).assume_utc())\n\n );\n\n\n\n let entry: Entry = \": 1646779848:1;vim ~/.zsh_history\".parse().unwrap();\n\n assert_eq!(entry.cmdline, \"vim ~/.zsh_history\");\n\n assert_eq!(entry.duration, Some(std::time::Duration::from_secs(1)));\n\n assert_eq!(\n\n entry.start_time,\n\n Some(time::macros::datetime!(2022-03-08 22:50:48).assume_utc())\n\n );\n\n\n\n let entry: Entry = \"vim ~/.zsh_history\".parse().unwrap();\n\n assert_eq!(entry.cmdline, \"vim ~/.zsh_history\");\n\n assert_eq!(entry.duration, None);\n\n assert_eq!(entry.start_time, None);\n\n}\n", "file_path": "src/shell/old_history.rs", "rank": 46, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_parts() {\n\n parse_eq!(\n\n \"echo \\\"$HOME/bin\\\"\",\n\n cs!(p!((0, 16), e!(w!(\"echo\"), w!(wpv!(\"HOME\"), wpd!(\"/bin\")))))\n\n );\n\n parse_eq!(\n\n \"echo \\\"dir: $HOME/bin\\\"\",\n\n cs!(p!(\n\n (0, 21),\n\n e!(w!(\"echo\"), w!(wpd!(\"dir: \"), wpv!(\"HOME\"), wpd!(\"/bin\")))\n\n ))\n\n );\n\n parse_eq!(\n\n \"echo $HOME/bin\",\n\n cs!(p!((0, 14), e!(w!(\"echo\"), w!(wpv!(\"HOME\"), wpb!(\"/bin\")))))\n\n );\n\n parse_eq!(\n\n \"echo '$HOME/bin'\",\n\n cs!(p!((0, 16), e!(w!(\"echo\"), w!(wps!(\"$HOME/bin\")))))\n\n );\n", "file_path": "src/parse/test_ast.rs", "rank": 47, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_escape() {\n\n parse_eq!(\"foo\\\\ bar\", cs!(p!((0, 8), e!(w!(\"foo bar\")))));\n\n parse_eq!(\"'foo\\\\ bar'\", cs!(p!((0, 10), e!(w!(wps!(\"foo\\\\ bar\"))))));\n\n parse_eq!(\"\\\"foo\\\\ bar\\\"\", cs!(p!((0, 10), e!(w!(wpd!(\"foo bar\"))))));\n\n parse_eq!(\"\\\"foo\\\\\\\"bar\\\"\", cs!(p!((0, 10), e!(w!(wpd!(\"foo\\\"bar\"))))));\n\n parse_eq!(\n\n \"'foo\\\\'bar\\\\\\\\'\",\n\n cs!(p!((0, 12), e!(w!(wps!(\"foo'bar\\\\\")))))\n\n );\n\n parse_eq!(\n\n \"foo > bar\\\\ baz\",\n\n cs!(p!((0, 14), e!(w!(\"foo\") ; r!(1, w!(\"bar baz\"), Out))))\n\n );\n\n}\n\n\n", "file_path": "src/parse/test_ast.rs", "rank": 48, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_basic() {\n\n parse_eq!(\"foo\", cs!(p!((0, 3), e!(w!(\"foo\")))));\n\n parse_eq!(\"foo bar\", cs!(p!((0, 7), e!(w!(\"foo\"), w!(\"bar\")))));\n\n parse_eq!(\n\n \"foo bar baz\",\n\n cs!(p!((0, 11), e!(w!(\"foo\"), w!(\"bar\"), w!(\"baz\"))))\n\n );\n\n parse_eq!(\"foo | bar\", cs!(p!((0, 9), e!(w!(\"foo\")), e!(w!(\"bar\")))));\n\n parse_eq!(\n\n \"command ls; perl -E 'say foo' | tr a-z A-Z; builtin echo bar\",\n\n cs!(\n\n p!((0, 10), e!(w!(\"command\"), w!(\"ls\"))),\n\n p!(\n\n (12, 42),\n\n e!(w!(\"perl\"), w!(\"-E\"), w!(wps!(\"say foo\"))),\n\n e!(w!(\"tr\"), w!(\"a-z\"), w!(\"A-Z\"))\n\n ),\n\n p!((44, 60), e!(w!(\"builtin\"), w!(\"echo\"), w!(\"bar\")))\n\n )\n\n );\n", "file_path": "src/parse/test_ast.rs", "rank": 49, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_deserialize() {\n\n deserialize_eq!(\"foo\", e!(w!(\"foo\")));\n\n deserialize_eq!(\"foo bar baz\", e!(w!(\"foo\"), w!(\"bar\"), w!(\"baz\")));\n\n}\n", "file_path": "src/parse/test_ast.rs", "rank": 50, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_whitespace() {\n\n parse_eq!(\" foo \", cs!(p!((3, 6), e!(w!(\"foo\")))));\n\n parse_eq!(\n\n \" foo # this is a comment\",\n\n cs!(p!((3, 6), e!(w!(\"foo\"))))\n\n );\n\n parse_eq!(\"foo#comment\", cs!(p!((0, 3), e!(w!(\"foo\")))));\n\n parse_eq!(\n\n \"foo;bar|baz;quux#comment\",\n\n cs!(\n\n p!((0, 3), e!(w!(\"foo\"))),\n\n p!((4, 11), e!(w!(\"bar\")), e!(w!(\"baz\"))),\n\n p!((12, 16), e!(w!(\"quux\")))\n\n )\n\n );\n\n parse_eq!(\n\n \"foo | bar \",\n\n cs!(p!((0, 12), e!(w!(\"foo\")), e!(w!(\"bar\"))))\n\n );\n\n parse_eq!(\n", "file_path": "src/parse/test_ast.rs", "rank": 51, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_alternation() {\n\n parse_eq!(\n\n \"echo {foo,bar}\",\n\n cs!(p!((0, 14), e!(w!(\"echo\"), w!(wpa!(w!(\"foo\"), w!(\"bar\"))))))\n\n );\n\n parse_eq!(\n\n \"echo {foo,bar}.rs\",\n\n cs!(p!(\n\n (0, 17),\n\n e!(w!(\"echo\"), w!(wpa!(w!(\"foo\"), w!(\"bar\")), wpb!(\".rs\")))\n\n ))\n\n );\n\n parse_eq!(\n\n \"echo {foo,bar,baz}.rs\",\n\n cs!(p!(\n\n (0, 21),\n\n e!(\n\n w!(\"echo\"),\n\n w!(wpa!(w!(\"foo\"), w!(\"bar\"), w!(\"baz\")), wpb!(\".rs\"))\n\n )\n", "file_path": "src/parse/test_ast.rs", "rank": 52, "score": 48728.50943791392 }, { "content": "fn resize_event() -> Event {\n\n Event::Resize(terminal_size::terminal_size().map_or(\n\n (24, 80),\n\n |(terminal_size::Width(w), terminal_size::Height(h))| (h, w),\n\n ))\n\n}\n", "file_path": "src/shell/inputs/signals.rs", "rank": 53, "score": 45313.39467652333 }, { "content": "fn strip_escape(s: &str) -> String {\n\n let mut new = String::new();\n\n let mut escape = false;\n\n for c in s.chars() {\n\n if escape {\n\n new.push(c);\n\n escape = false;\n\n } else {\n\n match c {\n\n '\\\\' => escape = true,\n\n _ => new.push(c),\n\n }\n\n }\n\n }\n\n new\n\n}\n\n\n", "file_path": "src/parse/ast.rs", "rank": 54, "score": 41991.50321791448 }, { "content": "fn config_dir() -> std::path::PathBuf {\n\n PROJECT_DIRS.config_dir().to_path_buf()\n\n}\n\n\n", "file_path": "src/dirs.rs", "rank": 55, "score": 40937.72138189035 }, { "content": "fn strip_basic_escape(s: &str) -> String {\n\n let mut new = String::new();\n\n let mut escape = false;\n\n for c in s.chars() {\n\n if escape {\n\n match c {\n\n '\\\\' | '\\'' => {}\n\n _ => new.push('\\\\'),\n\n }\n\n new.push(c);\n\n escape = false;\n\n } else {\n\n match c {\n\n '\\\\' => escape = true,\n\n _ => new.push(c),\n\n }\n\n }\n\n }\n\n new\n\n}\n\n\n", "file_path": "src/parse/ast.rs", "rank": 56, "score": 40937.72138189035 }, { "content": "fn data_dir() -> std::path::PathBuf {\n\n PROJECT_DIRS.data_dir().to_path_buf()\n\n}\n", "file_path": "src/dirs.rs", "rank": 57, "score": 40937.72138189035 }, { "content": "fn expand_home(dir: &str) -> Result<String> {\n\n if dir.starts_with('~') {\n\n let path: std::path::PathBuf = dir.into();\n\n if let std::path::Component::Normal(prefix) =\n\n path.components().next().unwrap()\n\n {\n\n let prefix_bytes = prefix.as_bytes();\n\n let name = if prefix_bytes == b\"~\" {\n\n None\n\n } else {\n\n Some(std::ffi::OsStr::from_bytes(&prefix_bytes[1..]))\n\n };\n\n if let Some(home) = home(name) {\n\n Ok(home\n\n .join(path.strip_prefix(prefix).unwrap())\n\n .to_str()\n\n .unwrap()\n\n .to_string())\n\n } else {\n\n anyhow::bail!(\n", "file_path": "src/parse/ast.rs", "rank": 58, "score": 38712.448409025485 }, { "content": " io: Io,\n\n pre_exec: Option<\n\n Box<dyn 'static + FnMut() -> std::io::Result<()> + Send + Sync>,\n\n >,\n\n}\n\n\n\nimpl Cfg {\n\n fn new(io: Io) -> Self {\n\n Self { io, pre_exec: None }\n\n }\n\n\n\n pub fn io(&self) -> &Io {\n\n &self.io\n\n }\n\n\n\n // Safety: see pre_exec in tokio::process::Command (this is just a\n\n // wrapper)\n\n pub unsafe fn pre_exec<F>(&mut self, f: F)\n\n where\n\n F: 'static + FnMut() -> std::io::Result<()> + Send + Sync,\n", "file_path": "src/runner/builtins/command.rs", "rank": 68, "score": 31003.503190509397 }, { "content": " // Safety: see pre_exec in tokio::process::Command (this is just a\n\n // wrapper)\n\n pub unsafe fn pre_exec<F>(&mut self, f: F)\n\n where\n\n F: 'static + FnMut() -> std::io::Result<()> + Send + Sync,\n\n {\n\n self.cfg.pre_exec(f);\n\n }\n\n\n\n pub fn apply_redirects(&mut self, redirects: &[crate::parse::Redirect]) {\n\n self.cfg.io.apply_redirects(redirects);\n\n }\n\n\n\n pub fn spawn(self, env: &Env) -> Result<Child> {\n\n let Self { f, exe, cfg } = self;\n\n (f)(exe, env, cfg)\n\n }\n\n}\n\n\n\npub struct Cfg {\n", "file_path": "src/runner/builtins/command.rs", "rank": 69, "score": 30998.95571184378 }, { "content": "use crate::runner::prelude::*;\n\n\n\npub struct Command {\n\n exe: crate::parse::Exe,\n\n f: super::Builtin,\n\n cfg: Cfg,\n\n}\n\n\n\nimpl Command {\n\n pub fn new(\n\n exe: crate::parse::Exe,\n\n io: Io,\n\n ) -> Result<Self, crate::parse::Exe> {\n\n if let Some(s) = exe.exe().to_str() {\n\n if let Some(f) = super::BUILTINS.get(s) {\n\n Ok(Self {\n\n exe,\n\n f,\n\n cfg: Cfg::new(io),\n\n })\n", "file_path": "src/runner/builtins/command.rs", "rank": 70, "score": 30995.38241519993 }, { "content": " F: FnOnce() -> std::process::ExitStatus + Send + 'static,\n\n {\n\n Self::Task(tokio::task::spawn_blocking(f))\n\n }\n\n\n\n pub fn new_wrapped(child: crate::runner::Child) -> Self {\n\n Self::Wrapped(Box::new(child))\n\n }\n\n\n\n pub fn id(&self) -> Option<u32> {\n\n match self {\n\n Self::Task(_) => None,\n\n Self::Wrapped(child) => child.id(),\n\n }\n\n }\n\n\n\n pub fn status(\n\n self,\n\n ) -> std::pin::Pin<\n\n Box<\n", "file_path": "src/runner/builtins/command.rs", "rank": 71, "score": 30991.68956870079 }, { "content": " >,\n\n}\n\n\n\nimpl Io {\n\n pub fn new() -> Self {\n\n Self {\n\n fds: std::collections::HashMap::new(),\n\n }\n\n }\n\n\n\n fn stdin(&self) -> Option<std::sync::Arc<File>> {\n\n self.fds.get(&0).map(std::sync::Arc::clone)\n\n }\n\n\n\n pub fn set_stdin<T: std::os::unix::io::IntoRawFd>(&mut self, stdin: T) {\n\n if let Some(file) = self.fds.remove(&0) {\n\n File::maybe_drop(file);\n\n }\n\n self.fds.insert(\n\n 0,\n", "file_path": "src/runner/builtins/command.rs", "rank": 72, "score": 30989.544626215833 }, { "content": " }\n\n\n\n // Safety: fd must not be owned by any other File object\n\n pub unsafe fn output(fd: std::os::unix::io::RawFd) -> Self {\n\n Self::Out(std::fs::File::from_raw_fd(fd))\n\n }\n\n\n\n fn maybe_drop(file: std::sync::Arc<Self>) {\n\n if let Ok(file) = std::sync::Arc::try_unwrap(file) {\n\n if file.as_raw_fd() <= 2 {\n\n let _ = file.into_raw_fd();\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl std::os::unix::io::AsRawFd for File {\n\n fn as_raw_fd(&self) -> std::os::unix::io::RawFd {\n\n match self {\n\n Self::In(fh) | Self::Out(fh) => fh.as_raw_fd(),\n", "file_path": "src/runner/builtins/command.rs", "rank": 73, "score": 30986.22629745968 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl std::os::unix::io::IntoRawFd for File {\n\n fn into_raw_fd(self) -> std::os::unix::io::RawFd {\n\n match self {\n\n Self::In(fh) | Self::Out(fh) => fh.into_raw_fd(),\n\n }\n\n }\n\n}\n\n\n\npub enum Child {\n\n Task(tokio::task::JoinHandle<std::process::ExitStatus>),\n\n Wrapped(Box<crate::runner::Child>),\n\n}\n\n\n\nimpl Child {\n\n pub fn new_task<F>(f: F) -> Self\n\n where\n", "file_path": "src/runner/builtins/command.rs", "rank": 74, "score": 30985.48153455408 }, { "content": "}\n\n\n\nimpl Drop for Io {\n\n fn drop(&mut self) {\n\n for (_, file) in self.fds.drain() {\n\n File::maybe_drop(file);\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum File {\n\n In(std::fs::File),\n\n Out(std::fs::File),\n\n}\n\n\n\nimpl File {\n\n // Safety: fd must not be owned by any other File object\n\n pub unsafe fn input(fd: std::os::unix::io::RawFd) -> Self {\n\n Self::In(std::fs::File::from_raw_fd(fd))\n", "file_path": "src/runner/builtins/command.rs", "rank": 75, "score": 30985.069798516783 }, { "content": " }\n\n\n\n pub fn apply_redirects(&mut self, redirects: &[crate::parse::Redirect]) {\n\n for redirect in redirects {\n\n let to = match &redirect.to {\n\n crate::parse::RedirectTarget::Fd(fd) => {\n\n std::sync::Arc::clone(&self.fds[fd])\n\n }\n\n crate::parse::RedirectTarget::File(path) => {\n\n let fd = redirect.dir.open(path).unwrap();\n\n match redirect.dir {\n\n crate::parse::Direction::In => {\n\n // Safety: we just opened fd, and nothing else has\n\n // or can use it\n\n std::sync::Arc::new(unsafe { File::input(fd) })\n\n }\n\n crate::parse::Direction::Out\n\n | crate::parse::Direction::Append => {\n\n // Safety: we just opened fd, and nothing else has\n\n // or can use it\n", "file_path": "src/runner/builtins/command.rs", "rank": 76, "score": 30984.392527005664 }, { "content": " {\n\n self.pre_exec = Some(Box::new(f));\n\n }\n\n\n\n pub fn setup_command(mut self, cmd: &mut crate::runner::Command) {\n\n self.io.setup_command(cmd);\n\n if let Some(pre_exec) = self.pre_exec.take() {\n\n // Safety: pre_exec can only have been set by calling the pre_exec\n\n // method, which is itself unsafe, so the safety comments at the\n\n // point where that is called are the relevant ones\n\n unsafe { cmd.pre_exec(pre_exec) };\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Io {\n\n fds: std::collections::HashMap<\n\n std::os::unix::io::RawFd,\n\n std::sync::Arc<File>,\n", "file_path": "src/runner/builtins/command.rs", "rank": 77, "score": 30984.024328043903 }, { "content": " } else {\n\n Err(exe)\n\n }\n\n } else {\n\n Err(exe)\n\n }\n\n }\n\n\n\n pub fn stdin(&mut self, fh: std::fs::File) {\n\n self.cfg.io.set_stdin(fh);\n\n }\n\n\n\n pub fn stdout(&mut self, fh: std::fs::File) {\n\n self.cfg.io.set_stdout(fh);\n\n }\n\n\n\n pub fn stderr(&mut self, fh: std::fs::File) {\n\n self.cfg.io.set_stderr(fh);\n\n }\n\n\n", "file_path": "src/runner/builtins/command.rs", "rank": 78, "score": 30983.482795641095 }, { "content": " std::sync::Arc::new(unsafe { File::output(fd) })\n\n }\n\n }\n\n }\n\n };\n\n self.fds.insert(redirect.from, to);\n\n }\n\n }\n\n\n\n pub fn read_line_stdin(&self) -> Result<(String, bool)> {\n\n let mut line = vec![];\n\n if let Some(file) = self.stdin() {\n\n if let File::In(fh) = &*file {\n\n // we have to read only a single character at a time here\n\n // because stdin needs to be shared across all commands in the\n\n // command list, some of which may be builtins and others of\n\n // which may be external commands - if we read past the end of\n\n // a line, then the characters past the end of that line will\n\n // no longer be available to the next command, since we have\n\n // them buffered in memory rather than them being on the stdin\n", "file_path": "src/runner/builtins/command.rs", "rank": 79, "score": 30982.390497085216 }, { "content": " }),\n\n );\n\n }\n\n\n\n fn stderr(&self) -> Option<std::sync::Arc<File>> {\n\n self.fds.get(&2).map(std::sync::Arc::clone)\n\n }\n\n\n\n pub fn set_stderr<T: std::os::unix::io::IntoRawFd>(&mut self, stderr: T) {\n\n if let Some(file) = self.fds.remove(&2) {\n\n File::maybe_drop(file);\n\n }\n\n self.fds.insert(\n\n 2,\n\n // Safety: we just acquired stderr via into_raw_fd, which acquires\n\n // ownership of the fd, so we are now the sole owner\n\n std::sync::Arc::new(unsafe {\n\n File::output(stderr.into_raw_fd())\n\n }),\n\n );\n", "file_path": "src/runner/builtins/command.rs", "rank": 80, "score": 30982.10879537552 }, { "content": " dyn std::future::Future<Output = Result<std::process::ExitStatus>>\n\n + Send\n\n + Sync,\n\n >,\n\n > {\n\n Box::pin(async move {\n\n match self {\n\n Self::Task(task) => task.await.map_err(|e| anyhow!(e)),\n\n Self::Wrapped(child) => child.status().await,\n\n }\n\n })\n\n }\n\n}\n", "file_path": "src/runner/builtins/command.rs", "rank": 81, "score": 30981.49501541946 }, { "content": " // Safety: we just acquired stdin via into_raw_fd, which acquires\n\n // ownership of the fd, so we are now the sole owner\n\n std::sync::Arc::new(unsafe { File::input(stdin.into_raw_fd()) }),\n\n );\n\n }\n\n\n\n fn stdout(&self) -> Option<std::sync::Arc<File>> {\n\n self.fds.get(&1).map(std::sync::Arc::clone)\n\n }\n\n\n\n pub fn set_stdout<T: std::os::unix::io::IntoRawFd>(&mut self, stdout: T) {\n\n if let Some(file) = self.fds.remove(&1) {\n\n File::maybe_drop(file);\n\n }\n\n self.fds.insert(\n\n 1,\n\n // Safety: we just acquired stdout via into_raw_fd, which acquires\n\n // ownership of the fd, so we are now the sole owner\n\n std::sync::Arc::new(unsafe {\n\n File::output(stdout.into_raw_fd())\n", "file_path": "src/runner/builtins/command.rs", "rank": 82, "score": 30981.104937386423 }, { "content": " }\n\n\n\n pub fn setup_command(mut self, cmd: &mut crate::runner::Command) {\n\n if let Some(stdin) = self.fds.remove(&0) {\n\n if let Ok(stdin) = std::sync::Arc::try_unwrap(stdin) {\n\n let stdin = stdin.into_raw_fd();\n\n if stdin != 0 {\n\n // Safety: we just acquired stdin via into_raw_fd, which\n\n // acquires ownership of the fd, so we are now the sole\n\n // owner\n\n cmd.stdin(unsafe { std::fs::File::from_raw_fd(stdin) });\n\n self.fds.remove(&0);\n\n }\n\n }\n\n }\n\n if let Some(stdout) = self.fds.remove(&1) {\n\n if let Ok(stdout) = std::sync::Arc::try_unwrap(stdout) {\n\n let stdout = stdout.into_raw_fd();\n\n if stdout != 1 {\n\n // Safety: we just acquired stdout via into_raw_fd, which\n", "file_path": "src/runner/builtins/command.rs", "rank": 83, "score": 30979.870744669188 }, { "content": " if let File::Out(fh) = &*file {\n\n Ok((&*fh).write_all(buf)?)\n\n } else {\n\n Ok(())\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n pub fn write_stderr(&self, buf: &[u8]) -> Result<()> {\n\n if let Some(file) = self.stderr() {\n\n if let File::Out(fh) = &*file {\n\n Ok((&*fh).write_all(buf)?)\n\n } else {\n\n Ok(())\n\n }\n\n } else {\n\n Ok(())\n\n }\n", "file_path": "src/runner/builtins/command.rs", "rank": 84, "score": 30975.3714788108 }, { "content": " // pipe.\n\n for byte in fh.bytes() {\n\n let byte = byte?;\n\n line.push(byte);\n\n if byte == b'\\n' {\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n let done = line.is_empty();\n\n let mut line = String::from_utf8(line).unwrap();\n\n if line.ends_with('\\n') {\n\n line.truncate(line.len() - 1);\n\n }\n\n Ok((line, done))\n\n }\n\n\n\n pub fn write_stdout(&self, buf: &[u8]) -> Result<()> {\n\n if let Some(file) = self.stdout() {\n", "file_path": "src/runner/builtins/command.rs", "rank": 85, "score": 30975.074669643393 }, { "content": " // acquires ownership of the fd, so we are now the sole\n\n // owner\n\n cmd.stdout(unsafe { std::fs::File::from_raw_fd(stdout) });\n\n self.fds.remove(&1);\n\n }\n\n }\n\n }\n\n if let Some(stderr) = self.fds.remove(&2) {\n\n if let Ok(stderr) = std::sync::Arc::try_unwrap(stderr) {\n\n let stderr = stderr.into_raw_fd();\n\n if stderr != 2 {\n\n // Safety: we just acquired stderr via into_raw_fd, which\n\n // acquires ownership of the fd, so we are now the sole\n\n // owner\n\n cmd.stderr(unsafe { std::fs::File::from_raw_fd(stderr) });\n\n self.fds.remove(&2);\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/runner/builtins/command.rs", "rank": 86, "score": 30974.75264585205 }, { "content": "}\n\n\n\npub struct Reader(std::sync::Arc<InnerReader>);\n\n\n\nimpl Reader {\n\n pub fn new(\n\n mut input: tokio::sync::mpsc::UnboundedReceiver<Event>,\n\n ) -> Self {\n\n let inner = std::sync::Arc::new(InnerReader::new());\n\n {\n\n let inner = inner.clone();\n\n tokio::spawn(async move {\n\n while let Some(event) = input.recv().await {\n\n inner.new_event(Some(event));\n\n }\n\n inner.new_event(None);\n\n });\n\n }\n\n Self(inner)\n\n }\n\n\n\n pub async fn recv(&self) -> Option<Event> {\n\n self.0.recv().await\n\n }\n\n}\n\n\n", "file_path": "src/shell/event.rs", "rank": 87, "score": 20.393354584018823 }, { "content": "impl Exe {\n\n pub fn exe(&self) -> &std::path::Path {\n\n &self.exe\n\n }\n\n\n\n pub fn args(&self) -> &[String] {\n\n &self.args\n\n }\n\n\n\n pub fn append(&mut self, other: Self) {\n\n let Self {\n\n exe: _exe,\n\n args,\n\n redirects,\n\n } = other;\n\n self.args.extend(args);\n\n self.redirects.extend(redirects);\n\n }\n\n\n\n pub fn redirects(&self) -> &[Redirect] {\n", "file_path": "src/parse/mod.rs", "rank": 88, "score": 19.296474863272394 }, { "content": " }\n\n\n\n fn build_ast(pipeline: pest::iterators::Pair<Rule>) -> Self {\n\n assert!(matches!(pipeline.as_rule(), Rule::pipeline));\n\n let span = (pipeline.as_span().start(), pipeline.as_span().end());\n\n Self {\n\n exes: pipeline.into_inner().map(Exe::build_ast).collect(),\n\n span,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Exe {\n\n exe: Word,\n\n args: Vec<Word>,\n\n redirects: Vec<Redirect>,\n\n}\n\n\n\nimpl Exe {\n", "file_path": "src/parse/ast.rs", "rank": 89, "score": 16.948746966158193 }, { "content": "use crate::shell::prelude::*;\n\n\n\nuse notify::Watcher as _;\n\n\n\npub struct Handler {\n\n git_w: tokio::sync::mpsc::UnboundedSender<std::path::PathBuf>,\n\n}\n\n\n\nimpl Handler {\n\n pub fn new(event_w: crate::shell::event::Writer) -> Self {\n\n let (git_w, git_r) = tokio::sync::mpsc::unbounded_channel();\n\n tokio::spawn(Self::task(git_r, event_w));\n\n Self { git_w }\n\n }\n\n\n\n pub fn new_dir(&self, path: std::path::PathBuf) {\n\n self.git_w.send(path).unwrap();\n\n }\n\n\n\n async fn task(\n", "file_path": "src/shell/inputs/git.rs", "rank": 90, "score": 16.559222154251657 }, { "content": "use crate::runner::prelude::*;\n\n\n\npub mod command;\n\npub use command::{Child, Command, File, Io};\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 91, "score": 15.804132183204203 }, { "content": " pub fn commands(&self) -> &[Command] {\n\n &self.commands\n\n }\n\n\n\n fn build_ast(commands: pest::iterators::Pair<Rule>) -> Self {\n\n assert!(matches!(commands.as_rule(), Rule::commands));\n\n Self {\n\n commands: commands.into_inner().map(Command::build_ast).collect(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum Command {\n\n Pipeline(Pipeline),\n\n If(Pipeline),\n\n While(Pipeline),\n\n For(String, Vec<Word>),\n\n Else(Option<Pipeline>),\n\n End,\n", "file_path": "src/parse/ast.rs", "rank": 92, "score": 15.484113541481284 }, { "content": "\n\n let vt = std::sync::Arc::new(std::sync::Mutex::new(Vt::new(size)));\n\n\n\n tokio::spawn(Self::task(\n\n pty,\n\n std::sync::Arc::clone(&vt),\n\n request_r,\n\n event_w,\n\n ));\n\n\n\n Ok((Self { vt, request_w }, pts))\n\n }\n\n\n\n pub fn with_vt<T>(&self, f: impl FnOnce(&Vt) -> T) -> T {\n\n let vt = self.vt.lock().unwrap();\n\n f(&*vt)\n\n }\n\n\n\n pub fn with_vt_mut<T>(&self, f: impl FnOnce(&mut Vt) -> T) -> T {\n\n let mut vt = self.vt.lock().unwrap();\n", "file_path": "src/shell/history/pty.rs", "rank": 93, "score": 15.104058310604646 }, { "content": "pub mod ast;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct Pipeline {\n\n exes: Vec<Exe>,\n\n}\n\n\n\nimpl Pipeline {\n\n pub fn into_exes(self) -> impl Iterator<Item = Exe> {\n\n self.exes.into_iter()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct Exe {\n\n exe: std::path::PathBuf,\n\n args: Vec<String>,\n\n redirects: Vec<Redirect>,\n\n}\n\n\n", "file_path": "src/parse/mod.rs", "rank": 94, "score": 14.999823132987174 }, { "content": "use crate::prelude::*;\n\n\n\n#[derive(serde::Deserialize, Default, Debug)]\n\npub struct Config {\n\n aliases:\n\n std::collections::HashMap<std::path::PathBuf, crate::parse::ast::Exe>,\n\n}\n\n\n\nimpl Config {\n\n pub fn load() -> Result<Self> {\n\n let file = crate::dirs::config_file();\n\n if std::fs::metadata(&file).is_ok() {\n\n Ok(toml::from_slice(&std::fs::read(&file)?)?)\n\n } else {\n\n Ok(Self::default())\n\n }\n\n }\n\n\n\n pub fn alias_for(\n\n &self,\n\n path: &std::path::Path,\n\n ) -> Option<&crate::parse::ast::Exe> {\n\n self.aliases.get(path)\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 95, "score": 14.567071885302447 }, { "content": "use crate::shell::prelude::*;\n\n\n\npub struct Handler;\n\n\n\nimpl Handler {\n\n pub fn new(\n\n mut input: textmode::blocking::Input,\n\n event_w: crate::shell::event::Writer,\n\n ) -> Self {\n\n std::thread::spawn(move || {\n\n while let Some(key) = input.read_key().unwrap() {\n\n event_w.send(Event::Key(key));\n\n }\n\n });\n\n Self\n\n }\n\n}\n", "file_path": "src/shell/inputs/stdin.rs", "rank": 96, "score": 14.089513409914625 }, { "content": "use super::*;\n\n\n\nimpl From<Pipeline> for Command {\n\n fn from(pipeline: Pipeline) -> Self {\n\n Self::Pipeline(pipeline)\n\n }\n\n}\n\n\n\nmacro_rules! cs {\n\n ($($commands:expr),*) => {\n\n Commands {\n\n commands: [$($commands),*]\n\n .into_iter()\n\n .map(|c| c.into())\n\n .collect(),\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! p {\n", "file_path": "src/parse/test_ast.rs", "rank": 97, "score": 14.006069778032202 }, { "content": " let mut iter = pair.into_inner();\n\n let commands = iter.next().unwrap();\n\n assert!(matches!(commands.as_rule(), Rule::commands));\n\n let redirects = iter.map(Redirect::build_ast).collect();\n\n return Self {\n\n exe: Word {\n\n parts: vec![WordPart::SingleQuoted(\n\n crate::info::current_exe()\n\n .unwrap()\n\n .to_str()\n\n .unwrap()\n\n .to_string(),\n\n )],\n\n },\n\n args: vec![\n\n Word {\n\n parts: vec![WordPart::SingleQuoted(\"-c\".to_string())],\n\n },\n\n Word {\n\n parts: vec![WordPart::SingleQuoted(\n", "file_path": "src/parse/ast.rs", "rank": 98, "score": 13.63659430087517 }, { "content": " _ => unreachable!(),\n\n }\n\n }\n\n Self {\n\n exe,\n\n args,\n\n redirects,\n\n }\n\n }\n\n}\n\n\n\nimpl<'de> serde::Deserialize<'de> for Exe {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n struct Visitor;\n\n impl<'de> serde::de::Visitor<'de> for Visitor {\n\n type Value = Exe;\n\n\n", "file_path": "src/parse/ast.rs", "rank": 99, "score": 13.466828275887957 } ]